blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1368e7d59993e242bd8df5f4001e611ee15a8ea1
|
1e9c9f2a9639db7cdb032aae69cb4d99aef1d3a5
|
/others/language/python/flask/projects/flask-api-swagger-doc/app.py
|
cde3f48bff736128aa64aada4524a75af42aa4b0
|
[
"MIT"
] |
permissive
|
sagarnikam123/learnNPractice
|
f0da3f8acf653e56c591353ab342765a6831698c
|
1b3b0cb2cff2f478006626a4c37a99102acbb628
|
refs/heads/master
| 2023-02-04T11:21:18.211654 | 2023-01-24T14:47:52 | 2023-01-24T14:47:52 | 61,184,927 | 2 | 1 |
MIT
| 2022-03-06T11:07:18 | 2016-06-15T06:57:19 |
Python
|
UTF-8
|
Python
| false | false | 1,853 |
py
|
from apispec import APISpec
from apispec.ext.marshmallow import MarshmallowPlugin
from apispec_webframeworks.flask import FlaskPlugin
from flask import Flask, jsonify, render_template, send_from_directory
from marshmallow import Schema, fields
app = Flask(__name__, template_folder='swagger/templates')
@app.route('/')
def hello_world():
return 'Hello World!'
spec = APISpec(
title='flask-api-swagger-doc',
version='1.0.0',
openapi_version='3.0.2',
plugins=[FlaskPlugin(), MarshmallowPlugin()]
)
@app.route('/api/swagger.json')
def create_swagger_spec():
return jsonify(spec.to_dict())
class ToDoResponseSchema(Schema):
id = fields.Int()
title = fields.Str()
status = fields.Boolean()
class TodoListResponseSchema(Schema):
todo_list = fields.List(fields.Nested(ToDoResponseSchema))
@app.route('/todo')
def todo():
"""Get List of Todo
---
get:
description: Get List of Todos
responses:
200:
description: Return a todo list
content:
application/json:
schema: TodoListResponseSchema
"""
dummy_data = [{
'id': 1,
'title': 'Finish this task',
'status': False
},
{
'id': 2,
'title': 'Finish that task',
'status': True
}
]
return TodoListResponseSchema().dump({'todo_list': dummy_data})
with app.test_request_context():
spec.path(view=todo)
@app.route('/docs')
@app.route('/docs/<path:path>')
def swagger_docs(path=None):
if not path or path == 'index.html':
return render_template('index.html', base_url='/docs')
else:
return send_from_directory('./swagger/static', path)
if __name__ == '__main__':
app.run(debug=True)
|
[
"[email protected]"
] | |
71c207ff2c246376bcee9e9845a3679638b40e55
|
7b5828edda7751700ca7002b40a214e39e5f48a8
|
/EA/base/lib/shlex.py
|
f8196f4308aac174c8527a20cb7eb897ddf14814
|
[] |
no_license
|
daniela-venuta/Sims-4-Python-Script-Workspace
|
54c33dac02f84daed66f46b7307f222fede0fa62
|
f408b28fb34626b2e3b2953152343d591a328d66
|
refs/heads/main
| 2023-03-29T18:08:39.202803 | 2021-03-30T19:00:42 | 2021-03-30T19:00:42 | 353,111,243 | 1 | 0 | null | null | null | null |
WINDOWS-1252
|
Python
| false | false | 66,133 |
py
|
import os
import re
import sys
from collections import deque
from io import StringIO
__all__ = ['shlex', 'split', 'quote']
class shlex:
def __init__(self, instream=None, infile=None, posix=False, punctuation_chars=False):
if isinstance(instream, str):
instream = StringIO(instream)
if instream is not None:
self.instream = instream
self.infile = infile
else:
self.instream = sys.stdin
self.infile = None
self.posix = posix
if posix:
self.eof = None
else:
self.eof = ''
self.commenters = '#'
self.wordchars = 'abcdfeghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
if self.posix:
self.wordchars += 'ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ'
self.whitespace = ' \t\r\n'
self.whitespace_split = False
self.quotes = '\'"'
self.escape = '\\'
self.escapedquotes = '"'
self.state = ' '
self.pushback = deque()
self.lineno = 1
self.debug = 0
self.token = ''
self.filestack = deque()
self.source = None
if not punctuation_chars:
punctuation_chars = ''
elif punctuation_chars is True:
punctuation_chars = '();<>|&'
self.punctuation_chars = punctuation_chars
if punctuation_chars:
self._pushback_chars = deque()
self.wordchars += '~-./*?='
t = self.wordchars.maketrans(dict.fromkeys(punctuation_chars))
self.wordchars = self.wordchars.translate(t)
def push_token(self, tok):
if self.debug >= 1:
print('shlex: pushing token ' + repr(tok))
self.pushback.appendleft(tok)
def push_source(self, newstream, newfile=None):
if isinstance(newstream, str):
newstream = StringIO(newstream)
self.filestack.appendleft((self.infile, self.instream, self.lineno))
self.infile = newfile
self.instream = newstream
self.lineno = 1
if self.debug:
if newfile is not None:
print('shlex: pushing to file %s' % (self.infile,))
else:
print('shlex: pushing to stream %s' % (self.instream,))
def pop_source(self):
self.instream.close()
(self.infile, self.instream, self.lineno) = self.filestack.popleft()
if self.debug:
print('shlex: popping to %s, line %d' % (self.instream, self.lineno))
self.state = ' '
def get_token(self):
if self.pushback:
tok = self.pushback.popleft()
if self.debug >= 1:
print('shlex: popping token ' + repr(tok))
return tok
raw = self.read_token()
if self.source is not None:
while raw == self.source:
spec = self.sourcehook(self.read_token())
if spec:
(newfile, newstream) = spec
self.push_source(newstream, newfile)
raw = self.get_token()
if raw == self.eof:
if not self.filestack:
return self.eof
self.pop_source()
raw = self.get_token()
if self.debug >= 1:
if raw != self.eof:
print('shlex: token=' + repr(raw))
else:
print('shlex: token=EOF')
return raw
def read_token(self):
quoted = False
escapedstate = ' '
while True:
if self.punctuation_chars and self._pushback_chars:
nextchar = self._pushback_chars.pop()
else:
nextchar = self.instream.read(1)
if nextchar == '\n':
self.lineno += 1
if self.debug >= 3:
print('shlex: in state %r I see character: %r' % (self.state, nextchar))
if self.state is None:
self.token = ''
break
elif self.state == ' ':
if not nextchar:
self.state = None
break
else:
if nextchar in self.whitespace:
if self.debug >= 2:
print('shlex: I see whitespace in whitespace state')
if not self.token:
if self.posix and quoted:
break
if self.state in self.quotes:
quoted = True
if not nextchar:
if self.debug >= 2:
print('shlex: I see EOF in quotes state')
raise ValueError('No closing quotation')
if nextchar == self.state:
if not self.posix:
self.token += nextchar
self.state = ' '
break
else:
self.state = 'a'
elif self.posix and nextchar in self.escape and self.state in self.escapedquotes:
escapedstate = self.state
self.state = nextchar
else:
self.token += nextchar
elif self.state in self.escape:
if not nextchar:
if self.debug >= 2:
print('shlex: I see EOF in escape state')
raise ValueError('No escaped character')
if nextchar != escapedstate:
self.token += self.state
self.token += nextchar
self.state = escapedstate
elif self.state in ('a', 'c'):
if not nextchar:
self.state = None
break
elif nextchar in self.whitespace:
if self.debug >= 2:
print('shlex: I see whitespace in word state')
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
elif self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
elif nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
elif self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
elif nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars:
self.token = nextchar
self.state = 'a'
elif nextchar in self.punctuation_chars:
self.token = nextchar
self.state = 'c'
elif nextchar in self.quotes:
if not self.posix:
self.token = nextchar
self.state = nextchar
elif self.whitespace_split:
self.token = nextchar
self.state = 'a'
else:
self.token = nextchar
if not self.token:
if self.posix and quoted:
break
if self.state in self.quotes:
quoted = True
if not nextchar:
if self.debug >= 2:
print('shlex: I see EOF in quotes state')
raise ValueError('No closing quotation')
if nextchar == self.state:
if not self.posix:
self.token += nextchar
self.state = ' '
break
else:
self.state = 'a'
elif self.posix and nextchar in self.escape and self.state in self.escapedquotes:
escapedstate = self.state
self.state = nextchar
else:
self.token += nextchar
elif self.state in self.escape:
if not nextchar:
if self.debug >= 2:
print('shlex: I see EOF in escape state')
raise ValueError('No escaped character')
if nextchar != escapedstate:
self.token += self.state
self.token += nextchar
self.state = escapedstate
elif self.state in ('a', 'c'):
if not nextchar:
self.state = None
break
elif nextchar in self.whitespace:
if self.debug >= 2:
print('shlex: I see whitespace in word state')
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
elif self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
elif nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
elif self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
if self.state in self.quotes:
quoted = True
if not nextchar:
if self.debug >= 2:
print('shlex: I see EOF in quotes state')
raise ValueError('No closing quotation')
if nextchar == self.state:
if not self.posix:
self.token += nextchar
self.state = ' '
break
else:
self.state = 'a'
elif self.posix and nextchar in self.escape and self.state in self.escapedquotes:
escapedstate = self.state
self.state = nextchar
else:
self.token += nextchar
elif self.state in self.escape:
if not nextchar:
if self.debug >= 2:
print('shlex: I see EOF in escape state')
raise ValueError('No escaped character')
if nextchar != escapedstate:
self.token += self.state
self.token += nextchar
self.state = escapedstate
elif self.state in ('a', 'c'):
if not nextchar:
self.state = None
break
elif nextchar in self.whitespace:
if self.debug >= 2:
print('shlex: I see whitespace in word state')
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
elif self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
elif nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
elif self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
elif self.state in self.quotes:
quoted = True
if not nextchar:
if self.debug >= 2:
print('shlex: I see EOF in quotes state')
raise ValueError('No closing quotation')
if nextchar == self.state:
if not self.posix:
self.token += nextchar
self.state = ' '
break
else:
self.state = 'a'
elif self.posix and nextchar in self.escape and self.state in self.escapedquotes:
escapedstate = self.state
self.state = nextchar
else:
self.token += nextchar
elif self.state in self.escape:
if not nextchar:
if self.debug >= 2:
print('shlex: I see EOF in escape state')
raise ValueError('No escaped character')
if nextchar != escapedstate:
self.token += self.state
self.token += nextchar
self.state = escapedstate
elif self.state in ('a', 'c'):
if not nextchar:
self.state = None
break
elif nextchar in self.whitespace:
if self.debug >= 2:
print('shlex: I see whitespace in word state')
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
elif self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
elif nextchar in self.commenters:
self.instream.readline()
self.lineno += 1
if self.posix:
self.state = ' '
if not self.token:
if self.posix and quoted:
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
break
if self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
elif self.state == 'c':
if nextchar in self.punctuation_chars:
self.token += nextchar
else:
if nextchar not in self.whitespace:
self._pushback_chars.append(nextchar)
self.state = ' '
break
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif nextchar in self.wordchars or nextchar in self.quotes or self.whitespace_split:
self.token += nextchar
else:
if self.punctuation_chars:
self._pushback_chars.append(nextchar)
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print('shlex: I see punctuation in word state')
self.state = ' '
if (self.token or self.posix) and quoted:
break
result = self.token
self.token = ''
if result == '':
result = None
if self.posix and (quoted or self.debug > 1):
if result:
print('shlex: raw token=' + repr(result))
else:
print('shlex: raw token=EOF')
return result
def sourcehook(self, newfile):
if newfile[0] == '"':
newfile = newfile[1:-1]
if not os.path.isabs(newfile):
newfile = os.path.join(os.path.dirname(self.infile), newfile)
return (newfile, open(newfile, 'r'))
def error_leader(self, infile=None, lineno=None):
if infile is None:
infile = self.infile
if lineno is None:
lineno = self.lineno
return '"%s", line %d: ' % (infile, lineno)
def __iter__(self):
return self
def __next__(self):
token = self.get_token()
if token == self.eof:
raise StopIteration
return token
def split(s, comments=False, posix=True):
lex = shlex(s, posix=posix)
lex.whitespace_split = True
if not comments:
lex.commenters = ''
return list(lex)
_find_unsafe = re.compile('[^\\w@%+=:,./-]', re.ASCII).search
def quote(s):
if not s:
return "''"
if _find_unsafe(s) is None:
return s
return "'" + s.replace("'", '\'"\'"\'') + "'"
def _print_tokens(lexer):
while True:
tt = lexer.get_token()
if not tt:
break
print('Token: ' + repr(tt))
if __name__ == '__main__':
if len(sys.argv) == 1:
_print_tokens(shlex())
else:
fn = sys.argv[1]
with open(fn) as f:
_print_tokens(shlex(f, fn))
|
[
"[email protected]"
] | |
1e34501c813604e30eaf8cac50b7813ecb41e56c
|
755c0476da2bde006303b355371270132541c63c
|
/data_structures/tree/preorder.py
|
f9422a08eba0279ca625225241e27470c0af6ef3
|
[] |
no_license
|
diegoami/hackerrank-exercises
|
d411164eb32f4a5ac36df1ca81fa40cc51ae6fab
|
4daaa81273ec27278e530ab882445c040041cbd7
|
refs/heads/master
| 2021-03-24T13:04:26.105170 | 2017-08-27T20:07:25 | 2017-08-27T20:07:25 | 90,566,740 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 295 |
py
|
"""
Node is defined as
self.left (the left child of the node)
self.right (the right child of the node)
self.data (the value of the node)
"""
def scan(node):
return [str(node.data)] + scan(node.left) + scan(node.right) if node else []
def preOrder(root):
print(" ".join(scan(root)))
|
[
"[email protected]"
] | |
48dc96d39678bff752cc24ce6348a458d01041fd
|
0ba65f7c0e36c6eacdc559ecf6df221a6be89365
|
/myshop/__init__.py
|
37b137633c6d9184212ac2616855685c3a232ec3
|
[] |
no_license
|
maciejbihun9/django_tutorial_shop
|
a9811352dbf3ae20bb88972e4d593e6b7b4c2b4f
|
f2c0154ec47ba81a9ce5901c82aca50968380aa2
|
refs/heads/master
| 2021-08-31T18:49:44.787321 | 2017-12-22T12:25:03 | 2017-12-22T12:25:03 | 115,113,250 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 207 |
py
|
# Import celery.
# Konieczne jest zaimportowanie modułu celery w pliku
# __init__.py projektu, aby mieć pewność,
# że zostanie wczytany podczas uruchamiania Django.
from .celery import app as celery_app
|
[
"[email protected]"
] | |
73da36fb856c2e81d8b4903cbe62c6b5394362b7
|
b2b79cc61101ddf54959b15cf7d0887d114fb4e5
|
/web/pgadmin/tools/debugger/tests/test_debugger_poll_result.py
|
a19a968f21c3e24f8698a1fbe2cee3a5e4d98c57
|
[
"PostgreSQL"
] |
permissive
|
99Percent/pgadmin4
|
8afe737eb2ec1400ab034ad1d8a4f7c4ba4c35c8
|
5e0c113c7bc4ffefbec569e7ca5416d9acf9dd8a
|
refs/heads/master
| 2021-10-10T20:08:48.321551 | 2021-09-30T12:51:43 | 2021-09-30T12:51:43 | 165,702,958 | 0 | 0 |
NOASSERTION
| 2019-01-14T17:18:40 | 2019-01-14T17:18:39 | null |
UTF-8
|
Python
| false | false | 3,016 |
py
|
##########################################################################
#
# pgAdmin 4 - PostgreSQL Tools
#
# Copyright (C) 2013 - 2021, The pgAdmin Development Team
# This software is released under the PostgreSQL Licence
#
##########################################################################
import json
from pgadmin.utils.route import BaseTestGenerator
from regression.python_test_utils import test_utils as utils
from . import utils as debugger_utils
from unittest.mock import patch
from regression import parent_node_dict
from pgadmin.browser.server_groups.servers.databases.schemas.functions \
.tests import utils as funcs_utils
from pgadmin.browser.server_groups.servers.databases.tests import \
utils as db_utils
class DebuggerPollResult(BaseTestGenerator):
""" This class will execute query in debugger."""
scenarios = utils.generate_scenarios('poll_result',
debugger_utils.test_cases)
def setUp(self):
super(DebuggerPollResult, self).setUp()
self.schema_data = parent_node_dict['schema'][-1]
self.server_id = self.schema_data['server_id']
self.db_id = self.schema_data['db_id']
self.schema_id = self.schema_data['schema_id']
local_self = funcs_utils.set_up(self)
self.test_data['funcowner'] = self.server["username"]
function_info = debugger_utils.create_function(self, utils)
self.func_id = json.loads(function_info.data)['node']['_id']
if self.add_extension:
debugger_utils.add_extension(self, utils, db_utils=db_utils)
init_debugger = debugger_utils.init_debugger_function(self)
self.trans_id = json.loads(init_debugger.data)['data']['trans_id']
if self.init_target:
debugger_utils.initialize_target(self, utils)
debugger_utils.start_listener(self, utils, db_utils)
def execute_query(self):
return self.tester.get(
self.url + str(self.trans_id) + '/',
content_type='application/json')
def runTest(self):
"""
This function will initialize the debugger for function and procedures.
"""
if self.is_positive_test:
response = self.execute_query()
else:
if self.mocking_required:
with patch(self.mock_data["function_name"],
return_value=eval(self.mock_data["return_value"])):
response = self.execute_query()
else:
response = self.execute_query()
actual_response_code = response.status_code
expected_response_code = self.expected_data['status_code']
self.assertEqual(actual_response_code, expected_response_code)
def tearDown(self):
"""This function delete the server from SQLite """
debugger_utils.close_debugger(self)
debugger_utils.delete_function(self, utils)
db_utils.disconnect_database(self, self.server_id, self.db_id)
|
[
"[email protected]"
] | |
8de46a477f2b4ba26e46c99e2c0cf70696d64f32
|
1310ca784c1b0b9238f2407eb59d0704b8ae5a08
|
/NextGen/circuitpython/adafruit-circuitpython-bundle-6.x-mpy-20201114/examples/gps_echotest.py
|
326255efd4551edee0ab37c713ecdbe28cbe8600
|
[] |
no_license
|
RyannDaGreat/LightWave
|
6b89838bfd48dba010eb5229b84b206be4e8ccbb
|
d055b0c01b01b3795d9e6c28b6b70f969893ed97
|
refs/heads/master
| 2023-07-20T08:23:47.526629 | 2023-07-18T00:25:02 | 2023-07-18T00:25:02 | 123,113,725 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,712 |
py
|
# Simple GPS module demonstration.
# Will print NMEA sentences received from the GPS, great for testing connection
# Uses the GPS to send some commands, then reads directly from the GPS
import time
import board
import busio
import adafruit_gps
# Create a serial connection for the GPS connection using default speed and
# a slightly higher timeout (GPS modules typically update once a second).
# These are the defaults you should use for the GPS FeatherWing.
# For other boards set RX = GPS module TX, and TX = GPS module RX pins.
uart = busio.UART(board.TX, board.RX, baudrate=9600, timeout=10)
# for a computer, use the pyserial library for uart access
# import serial
# uart = serial.Serial("/dev/ttyUSB0", baudrate=9600, timeout=10)
# If using I2C, we'll create an I2C interface to talk to using default pins
# i2c = board.I2C()
# Create a GPS module instance.
gps = adafruit_gps.GPS(uart) # Use UART/pyserial
# gps = adafruit_gps.GPS_GtopI2C(i2c) # Use I2C interface
# Initialize the GPS module by changing what data it sends and at what rate.
# These are NMEA extensions for PMTK_314_SET_NMEA_OUTPUT and
# PMTK_220_SET_NMEA_UPDATERATE but you can send anything from here to adjust
# the GPS module behavior:
# https://cdn-shop.adafruit.com/datasheets/PMTK_A11.pdf
# Turn on the basic GGA and RMC info (what you typically want)
gps.send_command(b"PMTK314,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0")
# Turn on just minimum info (RMC only, location):
# gps.send_command(b'PMTK314,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0')
# Turn off everything:
# gps.send_command(b'PMTK314,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0')
# Tuen on everything (not all of it is parsed!)
# gps.send_command(b'PMTK314,1,1,1,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0')
# Set update rate to once a second (1hz) which is what you typically want.
gps.send_command(b"PMTK220,1000")
# Or decrease to once every two seconds by doubling the millisecond value.
# Be sure to also increase your UART timeout above!
# gps.send_command(b'PMTK220,2000')
# You can also speed up the rate, but don't go too fast or else you can lose
# data during parsing. This would be twice a second (2hz, 500ms delay):
# gps.send_command(b'PMTK220,500')
# Main loop runs forever printing data as it comes in
timestamp = time.monotonic()
while True:
data = gps.read(32) # read up to 32 bytes
# print(data) # this is a bytearray type
if data is not None:
# convert bytearray to string
data_string = "".join([chr(b) for b in data])
print(data_string, end="")
if time.monotonic() - timestamp > 5:
# every 5 seconds...
gps.send_command(b"PMTK605") # request firmware version
timestamp = time.monotonic()
|
[
"[email protected]"
] | |
a02e37cb873e78b9f8c832b6479e5964babeccbb
|
567123af2df15856e443f0202a1a44479bf77544
|
/claf/model/multi_task/bert.py
|
1245e11dd871a8db18c23ac61b7d1e7805677811
|
[
"MIT",
"Apache-2.0",
"BSD-3-Clause"
] |
permissive
|
y1027/claf
|
36d0d902dd06cbde868b8cdd9ebd9c7b4ff5d807
|
56422a8b0a91b9626425deeee5a57110b7b45605
|
refs/heads/master
| 2020-08-06T02:31:49.301180 | 2019-10-03T17:12:10 | 2019-10-03T17:12:10 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 7,776 |
py
|
from overrides import overrides
from pytorch_transformers import BertModel
import torch.nn as nn
from claf.data.data_handler import CachePath
from claf.decorator import register
from claf.model.base import ModelWithoutTokenEmbedder
from claf.model.multi_task.category import TaskCategory
from claf.model.multi_task.mixin import MultiTask
@register("model:bert_for_multi")
class BertForMultiTask(MultiTask, ModelWithoutTokenEmbedder):
"""
Implementation of Sentence Classification model presented in
BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding
(https://arxiv.org/abs/1810.04805)
* Args:
token_embedder: used to embed the sequence
num_classes: number of classified classes
* Kwargs:
pretrained_model_name: the name of a pre-trained model
dropout: classification layer dropout
"""
def __init__(self, token_makers, tasks, pretrained_model_name=None, dropouts=None):
super(BertForMultiTask, self).__init__(token_makers)
self.use_pytorch_transformers = True # for optimizer's model parameters
self.tasks = tasks
assert len(tasks) == len(dropouts)
self.curr_task_category = None
self.curr_dataset = None
self.shared_layers = BertModel.from_pretrained(
pretrained_model_name, cache_dir=str(CachePath.ROOT)
)
self.task_specific_layers = nn.ModuleList()
for task, dropout in zip(tasks, dropouts):
task_layer = nn.Sequential(
nn.Dropout(dropout),
nn.Linear(self.shared_layers.config.hidden_size, task["num_label"])
)
self.task_specific_layers.append(task_layer)
self._init_criterions(tasks)
def _init_criterions(self, tasks):
self.criterions = {}
for task_index, task in enumerate(tasks):
task_category = task["category"]
criterion = None
if task_category == TaskCategory.SEQUENCE_CLASSIFICATION or task_category == TaskCategory.READING_COMPREHENSION:
criterion = nn.CrossEntropyLoss()
elif task_category == TaskCategory.TOKEN_CLASSIFICATION:
ignore_tag_idx = task.get("ignore_tag_idx", 0)
criterion = nn.CrossEntropyLoss(ignore_index=ignore_tag_idx)
elif task_category == TaskCategory.REGRESSION:
criterion = nn.MSELoss()
else:
raise ValueError("Check task_category.")
self.criterions[task_index] = criterion
@overrides
def forward(self, features, labels=None):
"""
* Args:
features: feature dictionary like below.
{
"bert_input": {
"feature": [
[3, 4, 1, 0, 0, 0, ...],
...,
]
},
"token_type": {
"feature": [
[0, 0, 0, 0, 0, 0, ...],
...,
],
}
}
* Kwargs:
label: label dictionary like below.
{
"class_idx": [2, 1, 0, 4, 5, ...]
"data_idx": [2, 4, 5, 7, 2, 1, ...]
}
Do not calculate loss when there is no label. (inference/predict mode)
* Returns: output_dict (dict) consisting of
- sequence_embed: embedding vector of the sequence
- logits: representing unnormalized log probabilities
- class_idx: target class idx
- data_idx: data idx
- loss: a scalar loss to be optimized
"""
bert_inputs = features["bert_input"]["feature"]
token_type_ids = features["token_type"]["feature"]
attention_mask = (bert_inputs > 0).long()
outputs = self.shared_layers(
bert_inputs, token_type_ids=token_type_ids, attention_mask=attention_mask
)
pooled_output = outputs[1]
task_index = features["task_index"]
self.curr_task_category = self.tasks[task_index]["category"]
self.curr_dataset = self._dataset.task_datasets[task_index]
# TODO: add ReadingComprehension and TokenClassification forward
task_specific_layer = self.task_specific_layers[task_index]
logits = task_specific_layer(pooled_output)
output_dict = {
"task_index": task_index,
"sequence_embed": pooled_output,
"logits": logits,
}
if labels:
label_key = None
if self.curr_task_category == TaskCategory.SEQUENCE_CLASSIFICATION:
label_key = "class_idx"
elif self.curr_task_category == TaskCategory.REGRESSION:
label_key = "score"
else:
raise ValueError("task category error.")
label_value = labels[label_key]
data_idx = labels["data_idx"]
output_dict[label_key] = label_value
output_dict["data_idx"] = data_idx
# Loss
num_label = self.tasks[task_index]["num_label"]
criterion = self.criterions[task_index.item()]
logits = logits.view(-1, num_label)
if num_label == 1:
label_value = label_value.view(-1, 1)
loss = criterion(logits, label_value)
output_dict["loss"] = loss.unsqueeze(0) # NOTE: DataParallel concat Error
return output_dict
@overrides
def print_examples(self, index, inputs, predictions):
"""
Print evaluation examples
* Args:
index: data index
inputs: mini-batch inputs
predictions: prediction dictionary consisting of
- key: 'id' (sequence id)
- value: dictionary consisting of
- class_idx
* Returns:
print(Sequence, Sequence Tokens, Target Class, Predicted Class)
"""
task_index = inputs["features"]["task_index"]
task_dataset = self._dataset.task_datasets[task_index]
task_category = self.tasks[task_index]["category"]
data_idx = inputs["labels"]["data_idx"][index].item()
data_id = task_dataset.get_id(data_idx)
helper = task_dataset.helper
sequence_a = helper["examples"][data_id]["sequence_a"]
sequence_a_tokens = helper["examples"][data_id]["sequence_a_tokens"]
sequence_b = helper["examples"][data_id]["sequence_b"]
sequence_b_tokens = helper["examples"][data_id]["sequence_b_tokens"]
print()
print("Task(Dataset) name:", self.tasks[task_index]["name"])
print()
print("- Sequence a:", sequence_a)
print("- Sequence a Tokens:", sequence_a_tokens)
if sequence_b:
print("- Sequence b:", sequence_b)
print("- Sequence b Tokens:", sequence_b_tokens)
if task_category == TaskCategory.SEQUENCE_CLASSIFICATION:
target_class_text = helper["examples"][data_id]["class_text"]
pred_class_idx = predictions[data_id]["class_idx"]
pred_class_text = task_dataset.get_class_text_with_idx(pred_class_idx)
print("- Target:")
print(" Class:", target_class_text)
print("- Predict:")
print(" Class:", pred_class_text)
elif task_category == TaskCategory.REGRESSION:
target_score = helper["examples"][data_id]["score"]
pred_score = predictions[data_id]["score"]
print("- Target:")
print(" Score:", target_score)
print("- Predict:")
print(" Score:", pred_score)
print()
|
[
"[email protected]"
] | |
d97e5381d0709cc8ce03fd994e5b6cbcae03fc18
|
dfc2c18053b8e7576f88e7b2524d7ca3a8f47282
|
/ch01/section1/37.py
|
c8e30da41f75a18dde4404f0de40fdaed2599207
|
[] |
no_license
|
Xoozi/tchomework
|
a6eed3bbf697ff12af8d42249ec58a139aed0c4c
|
627c98b0b652ef20fd93025a17341bba76fbfce6
|
refs/heads/master
| 2021-01-23T21:18:15.793703 | 2018-10-21T11:05:55 | 2018-10-21T11:05:55 | 57,583,655 | 1 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 762 |
py
|
#磨光发动机气缸使之变窄到横截面积为9平方英寸之前, 需要知道它和标准的气缸直径x0=3.385英寸的偏差有多少.
#容许所要求的9平方英寸面积有0.01平方英寸以内的误差. 为求容许偏差, 设A(x) = pi*(x/2)**2
#求区间, 在该区间内所有的x都有|A(x) - 9| <= 0.01
#
# |A(x) - 9| <= 0.01
# => -0.01 <= A(x) - 9 <= 0.01
# => 8.99 <= pi*(x/2)**2 <= 9.01
# => 2*sqrt(8.99/pi)<=x<=2*sqrt(9.01/pi)
def g(x):
return 2*sqrt(x/pi)
print "x ∈ [%f, %f]" % (g(8.99), g(9.01))
#得到的答案是x ∈ [3.383256, 3.387018]
#xoozi 这里比较重要的一点, 实际加工中, 为了保险起见
#会将区间的低端向上舍入, 高段向下舍入得到
#x ∈ [3.384, 3.387]
|
[
"[email protected]"
] | |
716aba24b7c3bfb62f58316d5ba3598a3923dbdf
|
8afb5afd38548c631f6f9536846039ef6cb297b9
|
/MY_REPOS/DATA_STRUC_PYTHON_NOTES/python-prac/Overflow/_Learning/problems/problem_01_while_loop.py
|
81d830d837ce843244290f0fe32399965dad1a0f
|
[
"MIT",
"Python-2.0"
] |
permissive
|
bgoonz/UsefulResourceRepo2.0
|
d87588ffd668bb498f7787b896cc7b20d83ce0ad
|
2cb4b45dd14a230aa0e800042e893f8dfb23beda
|
refs/heads/master
| 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 |
MIT
| 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null |
UTF-8
|
Python
| false | false | 735 |
py
|
# WHILE LOOP
#
# In this problem, write a function named "my_while_loop" that accepts an
# iterable of strings as a parameter and returns a new list with strings from
# the original list that are longer than five characters. The function must use
# a while loop in its implementation. The order of the strings in the new list
# must be in the same order they were in the old list.
#
# There are two sample data calls for you to use.
# WRITE YOUR FUNCTION HERE
def
# TEST DATA
test = ["nope", "yes this one", "not", "uhuh", "here's one", "narp"]
print(my_while_loop(test)) # > ["yes this one", "here's one"]
test = ["plop", "", "drop", "zop", "stop"]
print(my_while_loop(test)) # > []
test = []
print(my_while_loop(test)) # > []
|
[
"[email protected]"
] | |
2064722d83023f32a337a5d533cd2a04636219f4
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2603/61106/286389.py
|
35e5a9b32b98fc8534ed27e662e9aad73c7de7b4
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 311 |
py
|
ori=input()
nums=[ori[i] for i in range(1,len(ori),2)]
k=int(input())
if nums!=[]:
for i in range(len(nums)):
nums[i]=int(nums[i])
nums.sort()
result=[]
for i in range(len(nums)):
n=i+1
while n<len(nums):
result.append(nums[n]-nums[i])
n += 1
result.sort()
print(result[k-1])
|
[
"[email protected]"
] | |
c678dfd352b74976c8f6fa986563092b5def9b0c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03327/s297227585.py
|
a516928e563406e47dec9baa926559fd14bc992f
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 204 |
py
|
import sys
def input(): return sys.stdin.readline().rstrip()
def main():
N = int(input())
if N >= 1000:
print('ABD')
else:
print('ABC')
if __name__ == '__main__':
main()
|
[
"[email protected]"
] | |
e644614e732ce14a452280dc39f0a321b105376b
|
05f9f4d091c1b79d524c3fc047b5209bbb27156c
|
/acoustics/standards/iso_9613_1_1993.py
|
0f84ca937a3406bd677d90ad7be901d809aeb104
|
[
"BSD-3-Clause",
"Python-2.0",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
miftanurfarid/python-acoustics
|
2c73b39c3e98690d313e07b7829561074cc76bbd
|
03aacc0f36c83e19d33c9989d91048af1ab752e6
|
refs/heads/master
| 2020-03-11T14:05:55.396964 | 2018-04-08T11:21:23 | 2018-04-08T11:21:23 | 130,044,011 | 1 | 0 | null | 2018-04-18T10:12:53 | 2018-04-18T10:12:53 | null |
UTF-8
|
Python
| false | false | 4,931 |
py
|
"""
ISO 9613-1:1993
===============
ISO 9613-1:1993 specifies an analytical method of calculating the attenuation of sound
as a result of atmospheric absorption for a variety of meteorological conditions.
"""
import numpy as np
SOUNDSPEED = 343.2
"""
Speed of sound.
"""
REFERENCE_TEMPERATURE = 293.15
"""
Reference temperature.
"""
REFERENCE_PRESSURE = 101.325
"""
International Standard Atmosphere in kilopascal.
"""
TRIPLE_TEMPERATURE = 273.16
""".
Triple point isotherm temperature.
"""
def soundspeed(temperature, reference_temperature=REFERENCE_TEMPERATURE):
"""
Speed of sound :math:`c`.
:param temperature: Ambient temperature :math:`T_0`
:param reference_temperature: Reference temperature :math:`T`
The speed of sound is calculated using
.. math:: c = 343.2 \\left( \\frac{T}{T_0} \\right)
"""
return 343.2 * np.sqrt(temperature / reference_temperature)
def saturation_pressure(temperature, reference_pressure=REFERENCE_PRESSURE, triple_temperature=TRIPLE_TEMPERATURE):
"""
Saturation vapour pressure :math:`p_{sat}`.
:param temperature: Ambient temperature :math:`T`
:param reference_pressure: Reference pressure :math:`p_r`
:param triple_temperature: Triple point temperature water :math:`T_{01}`
The saturation vapour pressure is calculated using
.. math:: p_{sat} = 10^C \cdot p_r
with exponent :math:`C` given by
.. math:: C = -6.8346 \cdot \\left( \\frac{T_{01}}{T} \\right)^{1.261} + 4.6151
"""
return reference_pressure * 10.0** (-6.8346 *(triple_temperature/temperature)**(1.261) + 4.6151)
def molar_concentration_water_vapour(relative_humidity, saturation_pressure, pressure):
"""
Molar concentration of water vapour :math:`h`.
:param relative_humidity: Relative humidity :math:`h_r`
:param saturation_pressure: Saturation pressure :math:`p_{sat}`
:param pressure: Ambient pressure :math:`p`
The molar concentration of water vapour is calculated using
.. math:: h = h_r \\frac{p_{sat}}{p_a}
"""
return relative_humidity * saturation_pressure / pressure
def relaxation_frequency_oxygen(pressure, h, reference_pressure=REFERENCE_PRESSURE):
"""
Relaxation frequency of oxygen :math:`f_{r,O}`.
:param pressure: Ambient pressure :math:`p_a`
:param reference_pressure: Reference pressure :math:`p_r`
:param h: Molar concentration of water vapour :math:`h`
The relaxation frequency of oxygen is calculated using
.. math:: f_{r,O} = \\frac{p_a}{p_r} \\left( 24 + 4.04 \cdot 10^4 h \\frac{0.02 + h}{0.391 + h} \\right)
"""
return pressure / reference_pressure * ( 24.0 + 4.04 * 10.0**4.0 * h * (0.02 + h) / (0.391 + h) )
def relaxation_frequency_nitrogen(pressure, temperature, h, reference_pressure=REFERENCE_PRESSURE, reference_temperature=REFERENCE_TEMPERATURE):
"""
Relaxation frequency of nitrogen :math:`f_{r,N}`.
:param pressure: Ambient pressure :math:`p_a`
:param temperature: Ambient temperature :math:`T`
:param h: Molar concentration of water vapour :math:`h`
:param reference_pressure: Reference pressure :math:`p_{ref}`
:param reference_temperature: Reference temperature :math:`T_{ref}`
The relaxation frequency of nitrogen is calculated using
.. math:: f_{r,N} = \\frac{p_a}{p_r} \\left( \\frac{T}{T_0} \\right)^{-1/2} \cdot \\left( 9 + 280 h \exp{\\left\{ -4.170 \\left[ \\left(\\frac{T}{T_0} \\right)^{-1/3} -1 \\right] \\right\} } \\right)
"""
return pressure / reference_pressure * (temperature/reference_temperature)**(-0.5) * (9.0 + 280.0 * h * np.exp(-4.170 * ((temperature/reference_temperature)**(-1.0/3.0) - 1.0 ) ) )
def attenuation_coefficient(pressure, temperature, reference_pressure, reference_temperature, relaxation_frequency_nitrogen, relaxation_frequency_oxygen, frequency):
"""
Attenuation coefficient :math:`\\alpha` describing atmospheric absorption in dB/m for the specified ``frequency``.
:param pressure: Ambient pressure :math:`T`
:param temperature: Ambient temperature :math:`T`
:param reference_pressure: Reference pressure :math:`p_{ref}`
:param reference_temperature: Reference temperature :math:`T_{ref}`
:param relaxation_frequency_nitrogen: Relaxation frequency of nitrogen :math:`f_{r,N}`.
:param relaxation_frequency_oxygen: Relaxation frequency of oxygen :math:`f_{r,O}`.
:param frequency: Frequencies to calculate :math:`\\alpha` for.
"""
return 8.686 * frequency**2.0 * ( ( 1.84 * 10.0**(-11.0) * (reference_pressure/pressure) * (temperature/reference_temperature)**(0.5)) + (temperature/reference_temperature)**(-2.5) * ( 0.01275 * np.exp(-2239.1 / temperature) * (relaxation_frequency_oxygen + (frequency**2.0/relaxation_frequency_oxygen))**(-1.0) + 0.1068 * np.exp(-3352.0/temperature) * (relaxation_frequency_nitrogen + (frequency**2.0/relaxation_frequency_nitrogen))**(-1.0) ) )
|
[
"[email protected]"
] | |
b94f13c430ff4e7bd671e0187dee59046813b3de
|
ae13e905feec06f2f94245481b31fcb605e485de
|
/practice/algorithms/implementation/the_bomberman_game.py
|
f731627a64180310dd3e39cc0f18d333133626d3
|
[] |
no_license
|
feadoor/hackerrank
|
e7a84bb20c01d420a3c37f0a7e5176ab0aac6604
|
8fa88b71d37ae83b0826a76499c9e69f947d0aeb
|
refs/heads/master
| 2021-05-04T17:28:27.089671 | 2019-02-21T17:25:34 | 2019-02-21T17:25:34 | 120,271,651 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,314 |
py
|
#!/usr/local/bin/pypy
BOMB = 'O'
EMPTY = '.'
def neighbours(x, y):
return [(x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)]
def next_value(board, x, y):
if board[x][y] == 0:
return 0
elif any(board[n_x][n_y] == 1 for n_x, n_y in neighbours(x, y) if 0 <= n_x < len(board) and 0 <= n_y < len(board[n_x])):
return 0
else:
return board[x][y] - 1
def add_bombs(board):
return [[3 if board[x][y] == 0 else board[x][y] for y in xrange(len(board[x]))] for x in xrange(len(board))]
def do_tick(board):
return [[next_value(board, x, y) for y in xrange(len(board[x]))] for x in xrange(len(board))]
def run(board, ticks):
for tick in xrange(ticks):
board = do_tick(board)
if tick % 2 == 1:
board = add_bombs(board)
return board
def get_state(board, ticks):
if ticks <= 5:
return run(board, ticks)
else:
return run(board, 4 + ticks % 4)
def read_board(rows):
return [[3 if c == BOMB else 0 for c in raw_input()] for _ in xrange(rows)]
def write_board(board):
for row in board:
print ''.join(BOMB if x > 0 else EMPTY for x in row)
def main():
rows, _, ticks = map(int, raw_input().strip().split(' '))
write_board(get_state(read_board(rows), ticks))
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
2fb73694654fc6905a84a964a772f70523ec131d
|
ecb7e109a62f6a2a130e3320ed1fb580ba4fc2de
|
/reference-code/lambda/cm-premembers-setting/github-to-create_CodePipeline/action_fail_notification.py
|
d324d7ae420ac5b8b6c9cb52538ab48ac8bae923
|
[] |
no_license
|
nisheeth84/prjs_sample
|
df732bc1eb58bc4fd4da6e76e6d59a2e81f53204
|
3fb10823ca4c0eb3cd92bcd2d5d4abc8d59436d9
|
refs/heads/master
| 2022-12-25T22:44:14.767803 | 2020-10-07T14:55:52 | 2020-10-07T14:55:52 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,326 |
py
|
import logging
import os
import json
import boto3
logger = logging.getLogger()
log_level = os.getenv('LOG_LEVEL', default='INFO')
logger.setLevel(log_level)
lambda_client = None
def lambda_handler(event, context):
detail = event['detail']
state = detail['state']
logger.debug("Received event: " + json.dumps(event, indent=2))
if(state == 'FAILED'):
send_result_message(detail)
def send_result_message(detail):
global lambda_client
if not lambda_client:
lambda_client = boto3.client('lambda')
service_name = os.environ['SERVICE_NAME']
stage = os.environ['STAGE']
url_template = "https://ap-northeast-1.console.aws.amazon.com/codepipeline/home?region=ap-northeast-1#/view/{pipeline}"
message = "CodePipelineの処理にてエラーが発生しています。\n"
attachments_json = [{
"title": "AWS CodePipeline Management Console ",
"title_link": url_template.format(**detail),
"color": "danger",
"text": "詳細: {}".format(json.dumps(detail, indent=2))
}]
event = {
'message': message,
'attachments': attachments_json
}
lambda_client.invoke(
FunctionName='{0}-{1}-{2}'.format(service_name, stage, "push_slack"),
InvocationType="RequestResponse",
Payload=json.dumps(event))
|
[
"[email protected]"
] | |
3a80c05f6704a2de17aa722bded43b906b10f096
|
7f7213fe407f252b2323025c9b9e381a73474b7d
|
/analysis/summarize_results.py
|
45276c2dda5e706f9078f707749e49e0772a3185
|
[] |
no_license
|
ahy3nz/graphene_build
|
0ce62c2123b8c39248048d2cafbd0aafdd06ff9a
|
44590b8db799136929fc06e490151f450ad30029
|
refs/heads/master
| 2021-06-20T07:28:20.958807 | 2019-07-03T19:48:24 | 2019-07-03T19:48:24 | 135,504,561 | 0 | 1 | null | 2018-08-06T21:47:17 | 2018-05-30T22:40:17 |
Python
|
UTF-8
|
Python
| false | false | 1,104 |
py
|
import os
import numpy as np
import pandas as pd
import pdb
trials = ['a', 'b','c']
constants = ['50', '100', '125']
#constants = ['250', '500', '1000']
angles = ['0']
#angles = ['0', '15', '30', '45']
df = pd.DataFrame()
curr_dir = os.getcwd()
for k in constants:
for a in angles:
all_forces = []
all_works = []
for trial in trials:
name = 'k{}_{}_{}'.format(k, a, trial)
os.chdir(os.path.join(curr_dir, name))
forces = np.loadtxt('pull_pullf.xvg', comments=['@', '#'])
max_force = np.max(forces[:,1])
work = np.loadtxt('work_profile.dat')
max_work = np.mean(work[int(0.9*work.shape[0]) : , 1])
all_forces.append(max_force)
all_works.append(max_work)
to_add = {'k': [k], 'angle': [a],
'max_force': [np.mean(all_forces)], 'max_force_std': [np.std(all_forces)],
'max_work': [np.mean(all_works)], 'max_work_std': [np.std(all_works)]}
df = df.append(pd.DataFrame.from_dict(to_add))
os.chdir(curr_dir)
df.to_csv('summary_weak.csv')
|
[
"[email protected]"
] | |
048e94b3a99ff1def1070784c1b9e04fbe6136a7
|
46684dd05ac738cdf99a63ab30b1b30544564098
|
/math300/test.py
|
6c0d76af421b583c97d8cb75a956801886ba8523
|
[
"MIT"
] |
permissive
|
johnnydevriese/wsu_courses
|
ed0e0a4c98defaea1b4dc467395101504ff075fc
|
b55efd501c2d8f0651891f422a486e32533f5aa0
|
refs/heads/master
| 2020-12-02T22:23:03.263500 | 2017-07-03T15:16:24 | 2017-07-03T15:16:24 | 96,124,013 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 455 |
py
|
#done on 12.15.15 an interesting way to make a 21 x 2 matrix.
from numpy import *
#~
#~ a = 2 * ones(21)
#~
#~ b = arange(1,19 + 1, 2)
#~
#~ c = zeros(21)
#~
#~ for i in arange(len(b)):
#~ c[i] = b[i]
#~
#~ z = append(a,c).reshape(2,21)
#~
#~ z = transpose(z)
#~
#~ print z
#perhaps a more traditional method using slices.
a = zeros((21, 2))
a[:,0] = 2
b = arange(1,19+1,2)
for i in arange(len(b)):
a[i,1] = b[i]
print a
|
[
"[email protected]"
] | |
312109917f757ebdf6b55ea0418859c583dbe428
|
8fc7fa4f10691d44563df09b093ff493b7d02e28
|
/eoxserver/services/opensearch/v11/search.py
|
6780066fa1764ba83bf0b0abd8ee36575312c4b6
|
[
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
constantinius/eoxserver_combined_merge_ours
|
f023126aefc83bd2e2dcb07bbc5028a2d3147628
|
0bc751fe4780d9095892b5be4baf11dcde2126a5
|
refs/heads/master
| 2021-01-17T06:39:10.461762 | 2016-04-15T14:32:11 | 2016-04-15T14:32:11 | 56,326,583 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,034 |
py
|
#-------------------------------------------------------------------------------
#
# Project: EOxServer <http://eoxserver.org>
# Authors: Fabian Schindler <[email protected]>
#
#-------------------------------------------------------------------------------
# Copyright (C) 2015 EOX IT Services GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies of this Software or works derived from this Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#-------------------------------------------------------------------------------
from collections import namedtuple
from django.http import Http404
from eoxserver.core import Component, ExtensionPoint
from eoxserver.core.decoders import kvp
from eoxserver.core.util.xmltools import NameSpaceMap
from eoxserver.resources.coverages import models
from eoxserver.services.opensearch.interfaces import (
SearchExtensionInterface, ResultFormatInterface
)
class SearchContext(namedtuple("SearchContext", [
"total_count", "start_index", "page_size", "count",
"parameters", "namespaces"
])):
@property
def page_count(self):
return self.total_count // (self.page_size or self.count)
@property
def current_page(self):
return self.start_index // (self.page_size or self.count)
class OpenSearch11SearchHandler(Component):
search_extensions = ExtensionPoint(SearchExtensionInterface)
result_formats = ExtensionPoint(ResultFormatInterface)
def handle(self, request, collection_id=None, format_name=None):
decoder = OpenSearch11BaseDecoder(request.GET)
if collection_id:
qs = models.Collection.objects.get(
identifier=collection_id
).eo_objects.all()
else:
qs = models.Collection.objects.all()
if decoder.search_terms:
# TODO: search descriptions, summary etc once available
qs = qs.filter(identifier__icontains=decoder.search_terms)
namespaces = NameSpaceMap()
all_parameters = {}
for search_extension in self.search_extensions:
# get all search extension related parameters and translate the name
# to the actual parameter name
params = dict(
(parameter["type"], request.GET[parameter["name"]])
for parameter in search_extension.get_schema()
if parameter["name"] in request.GET
)
qs = search_extension.filter(qs, params)
namespaces.add(search_extension.namespace)
all_parameters[search_extension.namespace.prefix] = params
total_count = len(qs)
if decoder.start_index and not decoder.count:
qs = qs[decoder.start_index:]
elif decoder.start_index and decoder.count:
qs = qs[decoder.start_index:decoder.start_index+decoder.count]
elif decoder.count:
qs = qs[:decoder.count]
try:
result_format = next(
result_format
for result_format in self.result_formats
if result_format.name == format_name
)
except StopIteration:
raise Http404("No such result format '%s'." % format_name)
search_context = SearchContext(
total_count, decoder.start_index, decoder.count, len(qs),
all_parameters, namespaces
)
return (
result_format.encode(request, collection_id, qs, search_context),
result_format.mimetype
)
def pos_int_zero(raw):
value = int(raw)
if value < 0:
raise ValueError("Value is negative")
return value
def pos_int(raw):
value = int(raw)
if value < 1:
raise ValueError("Value is negative or zero")
return value
class OpenSearch11BaseDecoder(kvp.Decoder):
search_terms = kvp.Parameter("q", num="?")
start_index = kvp.Parameter("startIndex", pos_int_zero, num="?", default=0)
count = kvp.Parameter("count", pos_int, num="?", default=None)
output_encoding = kvp.Parameter("outputEncoding", num="?", default="UTF-8")
|
[
"[email protected]"
] | |
f6a5603118608acf248d8651056781609c4b02bb
|
b872ccff0c2f79886c0136b32da5f04cb8d3276c
|
/etce/testdirectory.py
|
b30f5feb64f4abd34cd4a1e23ebbf7b4ed9aa8a5
|
[] |
no_license
|
prj8121/python-etce
|
9c22b3a182f103f46b1d865d13ded277482e4a34
|
bbd74a65280a09f3edc05457961b8c51ec009165
|
refs/heads/master
| 2022-11-18T05:19:19.324966 | 2020-04-02T15:15:47 | 2020-04-02T15:15:47 | 276,674,792 | 0 | 0 | null | 2020-07-02T14:57:07 | 2020-07-02T14:57:06 | null |
UTF-8
|
Python
| false | false | 8,510 |
py
|
#
# Copyright (c) 2014-2018 - Adjacent Link LLC, Bridgewater, New Jersey
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Adjacent Link LLC nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import os
import re
import shutil
from etce.config import ConfigDictionary
from etce.configfiledoc import ConfigFileDoc
from etce.field import Field
from etce.testfiledoc import TestFileDoc
from etce.platform import Platform
from etce.templateutils import get_file_overlays
from etce.testdirectoryerror import TestDirectoryError
import etce.utils
class TestDirectory(object):
TESTFILENAME = 'test.xml'
STEPSFILENAME = 'steps.xml'
CONFIGFILENAME = 'config.xml'
HOSTFILENAME = 'nodefile.txt'
DOCSUBDIRNAME = 'doc'
def __init__(self, rootdir, basedir_override):
self._rootdir = rootdir
self._platform = Platform()
self._testdoc = TestFileDoc(
os.path.join(self._rootdir,
TestDirectory.TESTFILENAME))
self._merged = not self._testdoc.has_base_directory
self._basedir = self._testdoc.base_directory
if not basedir_override is None:
self._basedir = basedir_override
self._configfile = ConfigFileDoc(
os.path.join(self._rootdir,
TestDirectory.CONFIGFILENAME))
# add the hostfile to the test directory
# before copying it to hostfile's root nodes
hostfile = os.path.join(self._rootdir,
TestDirectory.HOSTFILENAME)
self._verified_nodes = []
if os.path.exists(hostfile) or os.path.isfile(hostfile):
self._verified_nodes = self._verify_nodes_in_hostfile(hostfile)
def hasconfig(self, wrappername, argname):
return self._configfile.hasconfig(wrappername, argname)
def getconfig(self, wrappername, argname, default):
return self._configfile.getconfig(wrappername, argname, default)
def location(self):
return self._rootdir
def info(self):
return { 'name':self.name(),
'description':self.description() }
def name(self):
return self._testdoc.name
def tags(self):
return self._testdoc.tags
def description(self):
return self._testdoc.description
def overlay_names(self):
return self._find_overlay_names()
def stepsfile(self):
return TestDirectory.STEPSFILENAME
def __str__(self):
info = self.info()
s = '-' * len(info['name']) + '\n'
s += info['name'] + '\n'
s += '-' * len(info['name']) + '\n'
s += 'location:\n\t%s\n' % self._rootdir
s += 'description:\n\t%s\n' % info['description']
s += 'overlays:\n'
for p in self.overlay_names():
s += '\t%s\n' % p
return s
def determine_nodenames(self):
# Determine the nodenames defined by the test files and templates:
#
# 1. read the base directory and test directory and take any
# subdirectory that does not end with .TEMPLATE_DIRECTORY_SUFFIX to
# be a nodename
#
# 2. add all of the directory names that will be generated
# by template directories
#
# 3. remove the doc subdirectory (the doc subdirectory is ignored,
# a place for additional test
# documentation).
#
template_suffix = ConfigDictionary().get('etce', 'TEMPLATE_DIRECTORY_SUFFIX')
hostnames = set([])
# if this is already a merged test directory, ignore base directory
# search
if not self._merged:
for entry in os.listdir(os.path.join(self.location(), self._basedir)):
abs_entry = os.path.join(self.location(), self._basedir, entry)
if os.path.isdir(abs_entry):
if entry.split('.')[-1] == template_suffix:
continue
hostnames.update([entry])
for entry in os.listdir(self.location()):
abs_entry = os.path.join(self.location(), entry)
if os.path.isdir(abs_entry):
if entry.split('.')[-1] == template_suffix:
continue
hostnames.update([entry])
formatted_dirnames = self._testdoc.formatted_directory_names
hostnames.update(formatted_dirnames)
# and the doc directory
hostnames.difference_update([TestDirectory.DOCSUBDIRNAME])
return list(hostnames)
def _verify_nodes_in_hostfile(self, hostfile):
field = Field(hostfile)
hostnames = field.leaves()
nodenames = self.determine_nodenames()
for nodename in nodenames:
if not nodename in hostnames:
errstr = 'Hostname "%s" required by test, but not ' \
'found in nodefile "%s". Quitting.' \
% (nodename, nodefile)
raise TestDirectoryError(errstr)
return nodenames
def nodename_from_hostname(self, hostname):
if hostname in self._verified_nodes:
return hostname
samehosts = self._find_this_host_names(self._verified_nodes)
if len(samehosts) == 1:
return samehosts[0]
return None
def nodename(self):
return self.nodename_from_hostname(self._platform.hostname())
def nodeid(self):
nodename = self.nodename()
if not nodename:
return None
regex = re.compile(r'(\d+)')
match = regex.search(nodename)
if match:
return int(match.group(1))
return None
def getfile(self, name):
for entry in os.listdir(os.path.join(self._rootdir, self.nodename())):
if entry == name:
if os.path.isfile(entry):
return os.path.join(self._rootdir, self.nodename(), entry)
return None
def _find_this_host_names(self, namelist):
''' Determine which names in namelist map to an
ip address on this host.
'''
return [ other for other in namelist
if self._platform.hostname_has_local_address(other) ]
def _find_overlay_names(self):
overlays = set([])
search_dirs = [self._rootdir]
if not self._merged:
# push the basedirectory if this directory is not already merged
search_dirs.insert(0,
os.path.join(self.location(), self._basedir))
for search_dir in search_dirs:
for dirname,dirnames,filenames in os.walk(search_dir):
if TestDirectory.DOCSUBDIRNAME in dirname.split('/'):
# ignore doc sub directory
continue
for filename in filenames:
overlays.update(
get_file_overlays(os.path.join(dirname,filename)))
return tuple(sorted(overlays))
|
[
"[email protected]"
] | |
c1dd32cf1fe3d0358e1900c84b2d4c2300278ace
|
88e286474dd82c7e2e94dc4e9c7b729ef5a3a8ba
|
/票据打印配置修改.py
|
7f5f081e07b02c6bcb340b9f970bf27c26aa02df
|
[] |
no_license
|
FengZiQ/sp_gui
|
d02d06e94d9980c6f763039edc2de0272cb18a86
|
f9c60f26f9d0dccc5c363cb422179152456e1d01
|
refs/heads/master
| 2020-03-28T05:04:12.877598 | 2018-12-18T11:03:37 | 2018-12-18T11:03:37 | 147,754,055 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,097 |
py
|
# coding=utf-8
from gui_test_tool import *
from dataForTest import *
tool = GUITestTool()
def modify_receipt_config():
print_type = ['默认', '追加', '覆盖']
# 前置条件
add_receipt_printer_config('票据打印配置modifyTest')
# 点击支付配置管理标签
tool.click_action(
'//ul[@id="leftNav"]/li[4]',
'支付配置管理标签'
)
# 点击支付配置管理二级标签
tool.click_action(
'//a[@data-menucode="spReceiptPrint"]',
'票据打印配置二级标签',
response_time=5
)
for i in range(len(print_type)):
# 点击修改图标
tool.click_action(
'//a[@title="修改"]',
'修改图标'
)
# 配置名称输入框
tool.fill_action(
'configName',
'打印配置_' + print_type[i],
'配置名称输入框',
locator=By.ID
)
# 点击打印类型下拉列表
tool.click_action(
'//button[@data-id="printType"]',
'打印类型下拉列表',
response_time=1
)
# 打印类型选择
tool.click_action(
'//form/div[4]/div/div/div/ul/li['+str(i+1)+']',
'打印类型',
response_time=1
)
# 点击保存按钮
tool.click_action(
'saveBtn',
'保存按钮',
locator=By.CLASS_NAME,
response_time=1
)
# 断言
tool.equal_text_assert(
'msValue',
'消息提示',
'修改成功',
end='@结束@',
locator=By.CLASS_NAME
)
time.sleep(3)
if __name__ == '__main__':
modify_receipt_config()
tool.mark_status()
tool.finished()
# 清理环境
try:
config_id = get_receipt_printer_config_info('打印配置_覆盖').get('id')
del_receipt_printer_config(config_id)
except:
pass
unbind_device([d['id'] for d in device_info])
delete_customer(customer_info['id'])
|
[
"[email protected]"
] | |
9cdfdaedfc70944b3269a00af13bd9deefe4deaf
|
9f9a9413e43d8c45f700b015cb6de664e5115c04
|
/0x04-python-more_data_structures/100-weight_average.py
|
b406ee9ee1dc8637f9719921cecce5dcc67e88f5
|
[] |
no_license
|
JennyHadir/holbertonschool-higher_level_programming
|
d2bfc733800bee7fcca10a408a2d744af40b0d4b
|
c826d364665e40173e453048dce1ea5cb97b4075
|
refs/heads/master
| 2023-04-26T19:29:17.370132 | 2021-05-17T23:04:32 | 2021-05-17T23:04:32 | 319,390,421 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 233 |
py
|
#!/usr/bin/python3
def weight_average(my_list=[]):
if my_list:
weight = 0
score = 0
for i in my_list:
score += i[1]
weight += i[0] * i[1]
return weight / score
return 0
|
[
"[email protected]"
] | |
9af0273121518eccb7ee6a791785c8e7a6ea7a41
|
a74b7a424159638508e4083aee927e6fca1e31ad
|
/vlbdiffwave/impl.py
|
5bcf51d38183b552386c6a5e5e1dfb906d318bf7
|
[
"MIT"
] |
permissive
|
WN1695173791/jax-variational-diffwave
|
77f17c44e6df9086754d15ad314a4a6cc58aff8f
|
780f615d76cff6f6210d5db0b9790961fc716905
|
refs/heads/main
| 2023-07-15T14:58:27.550266 | 2021-09-06T15:07:22 | 2021-09-06T15:07:22 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,731 |
py
|
from typing import Optional, Tuple
import flax
import flax.linen as nn
import jax.numpy as jnp
from .config import Config
from .diffwave import DiffWave
from .logsnr import LogSNR
class VLBDiffWave:
"""Model definition of VLB-Diffwave.
"""
def __init__(self, config: Config):
"""Initializer.
Args:
config: model configuration.
"""
self.diffwave = DiffWave(config=config)
self.logsnr = LogSNR(internal=config.internal)
def init(self,
key: jnp.ndarray,
signal: jnp.ndarray,
aux: jnp.ndarray,
mel: jnp.ndarray) -> flax.core.frozen_dict.FrozenDict:
"""Initialize model parameters.
Args:
signal: [float32; [B, T]], noise signal.
aux: [float32; [B]], timestep for logsnr, logSNR for diffwave.
mel: [float32; [B, T // H, M]], mel-spectrogram.
Returns:
model parameters.
"""
lparam = self.logsnr.init(key, aux)
dparam = self.diffwave.init(key, signal, aux, mel)
return flax.core.freeze({'diffwave': dparam, 'logsnr': lparam})
def snr(self, param: flax.core.frozen_dict.FrozenDict, time: jnp.ndarray) -> \
Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray, jnp.ndarray]:
"""Compute SNR and alpha, sigma.
Args:
param: parameters of LogSNR.
time: [float32; [B]], current timestep.
Returns:
[float32; [B]], logSNR, normalized -logSNR, square of alpha and sigma.
"""
# [B], [B]
logsnr, norm_nlogsnr = self.logsnr.apply(param, time)
# [B]
alpha_sq, sigma_sq = nn.sigmoid(logsnr), nn.sigmoid(-logsnr)
return logsnr, norm_nlogsnr, alpha_sq, sigma_sq
def apply(self,
param: flax.core.frozen_dict.FrozenDict,
signal: jnp.ndarray,
mel: jnp.ndarray,
time: jnp.ndarray) -> Tuple[jnp.ndarray, Tuple[jnp.ndarray, jnp.ndarray]]:
"""Denoise signal w.r.t timestep on mel-condition.
Args:
param: model parameters.
signal: [float32; [B, T]], noised signal.
mel: [float32; [B, T // H, M]], mel-spectrogram.
timestep: [float32; [B]], current timestep.
Returns:
noise: [float32; [B, T]], estimated noise.
alpha_sq, sigma_sq: [float32; [B]], signal, noise rates.
"""
# [B] x 4
_, norm_nlogsnr, alpha_sq, sigma_sq = self.snr(param['logsnr'], time)
# [B, T]
noise = self.diffwave.apply(param['diffwave'], signal, norm_nlogsnr, mel)
return noise, (alpha_sq, sigma_sq)
def denoise(self,
param: flax.core.frozen_dict.FrozenDict,
signal: jnp.ndarray,
mel: jnp.ndarray,
t: jnp.ndarray,
s: jnp.ndarray) -> Tuple[jnp.ndarray, jnp.ndarray]:
"""Denoise process.
Args:
param: model parameters.
signal: [float32; [B, T]], input signal.
mel: [float32; [B, T // H, M]], mel-spectrogram.
t: [float32; [B]], target time in range[0, 1].
s: [float32; [B]], start time in range[0, 1], s < t.
Returns:
mean: [float32; [B, T]], denoised signal mean.
std: [float32; [B]], standard deviation.
"""
# [B, T], [B], [B]
noise, (alpha_sq_t, sigma_sq_t) = self.apply(param, signal, mel, t)
# [B] x 2
_, _, alpha_sq_s, sigma_sq_s = self.snr(param['logsnr'], s)
# [B]
alpha_sq_tbars = alpha_sq_t / alpha_sq_s
sigma_sq_tbars = sigma_sq_t - alpha_sq_tbars * sigma_sq_s
# [B]
std = jnp.sqrt(sigma_sq_tbars * sigma_sq_s / sigma_sq_t)
# [B, T]
mean = 1 / jnp.sqrt(alpha_sq_tbars[:, None]) * (
signal - sigma_sq_tbars[:, None] / jnp.sqrt(sigma_sq_t[:, None]) * noise)
# [B, T], [B]
return mean, std
def diffusion(self,
param: flax.core.frozen_dict.FrozenDict,
signal: jnp.ndarray,
noise: jnp.ndarray,
s: jnp.ndarray,
t: Optional[jnp.ndarray] = None) -> \
Tuple[jnp.ndarray, jnp.ndarray, jnp.ndarray]:
"""Add noise to signal.
Args:
param: model parameters.
signal: [float32; [B, T]], input signal.
noise: [float32; [B, T]], gaussian noise.
s: [float32; [B]], start time in range[0, 1].
t: [float32; [B]], target time in range[0, 1], s < t.
if t is None, compute q(z_t|x), otherwise, q(z_t|z_s).
Returns:
alpha, sigma: [float32; [B]], signal, noise ratio.
noised: [float32; [B, T]], noised signal.
"""
# B
bsize = s.shape[0]
# [B']
time = s if t is None else jnp.concatenate([s, t], axis=0)
# [B'] x 4
_, _, alpha_sq, sigma_sq = self.snr(param['logsnr'], time)
if t is not None:
# [B]
alpha_sq_s, alpha_sq_t = alpha_sq[:bsize], alpha_sq[bsize:]
sigma_sq_s, sigma_sq_t = sigma_sq[:bsize], sigma_sq[bsize:]
# [B]
alpha_sq_tbars = alpha_sq_t / alpha_sq_s
sigma_sq_tbars = sigma_sq_t - alpha_sq_tbars * sigma_sq_s
# [B]
alpha_sq, sigma_sq = alpha_sq_tbars, sigma_sq_tbars
# [B]
alpha = jnp.sqrt(jnp.maximum(alpha_sq, 1e-5))
sigma = jnp.sqrt(jnp.maximum(sigma_sq, 1e-5))
# [B, T]
noised = alpha[:, None] * signal + sigma[:, None] * noise
# [B], [B], [B, T]
return alpha, sigma, noised
|
[
"[email protected]"
] | |
5bdf7a1ae800e202382e3d7d0b820fccc7907387
|
9b9a02657812ea0cb47db0ae411196f0e81c5152
|
/repoData/docopt-docopts/allPythonContent.py
|
27cda325177a0b927e2fbf1d1044a93b41b333f8
|
[] |
no_license
|
aCoffeeYin/pyreco
|
cb42db94a3a5fc134356c9a2a738a063d0898572
|
0ac6653219c2701c13c508c5c4fc9bc3437eea06
|
refs/heads/master
| 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,806 |
py
|
__FILENAME__ = language_agnostic_tester
#! /usr/bin/env python2
import sys, json, re, os
from subprocess import Popen, PIPE, STDOUT
fixtures = open(os.path.join(os.path.dirname(__file__), 'testcases.docopt'), 'r').read()
# remove comments
fixtures = re.sub('#.*$', '', fixtures, flags=re.M)
testee = (sys.argv[1] if len(sys.argv) >= 2 else
exit('Usage: language_agnostic_tester.py ./path/to/executable/testee [ID ...]'))
ids = [int(x) for x in sys.argv[2:]] if len(sys.argv) > 2 else None
summary = ''
index = 0
for fixture in fixtures.split('r"""'):
doc, _, body = fixture.partition('"""')
for case in body.split('$')[1:]:
index += 1
if ids is not None and index not in ids:
continue
argv, _, expect = case.strip().partition('\n')
prog, _, argv = argv.strip().partition(' ')
assert prog == 'prog', repr(prog)
p = Popen(testee + ' ' + argv,
stdout=PIPE, stdin=PIPE, stderr=STDOUT, shell=True)
result = p.communicate(input=doc)[0]
try:
py_result = json.loads(result)
py_expect = json.loads(expect)
except:
summary += 'J'
print (' %d: BAD JSON ' % index).center(79, '=')
print 'result>', result
print 'expect>', expect
continue
if py_result == py_expect:
summary += '.'
else:
print (' %d: FAILED ' % index).center(79, '=')
print 'r"""%s"""' % doc
print '$ prog %s\n' % argv
print 'result>', result
print 'expect>', expect
summary += 'F'
print (' %d / %d ' % (summary.count('.'), len(summary))).center(79, '=')
print summary
########NEW FILE########
|
[
"[email protected]"
] | |
768adacbe51e4ff930528aa640c60e9bac3172af
|
eee480b75e1454832a07bd02ab4ff5058aa8033e
|
/db/models.py
|
8f68f729ef071c222eeabe329b133a153718b6b2
|
[] |
no_license
|
545314690/wechat-spider
|
92dac44f8559ab0347b681945541f0b42be994d5
|
fe5cbac4aaedf32d974a01ab5fbb3fa33332c91d
|
refs/heads/master
| 2021-01-01T17:45:21.451910 | 2017-07-27T09:40:44 | 2017-07-27T09:40:44 | 98,148,453 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,464 |
py
|
# -*-coding:utf-8 -*-
from sqlalchemy import Column, Integer, String, Boolean, INTEGER, DateTime
from db.basic_db import Base, engine
from db.tables import *
class LoginInfo(Base):
# 登录账号表 login_info
__tablename__ = 'login_info'
id = Column(INTEGER, primary_key=True, autoincrement=True)
username = Column(String(255), nullable=False)
password = Column(String(50), nullable=False)
status = Column(INTEGER)
enable = Column(Boolean, default=1, server_default='1')
class User(Base):
# 用户表 wechat_user
__tablename__ = 'wechat_user'
id = Column(Integer, primary_key=True, autoincrement=True)
nickname = Column(String(100), default='', server_default='')
alias = Column(String(100), default='', server_default='')
service_type = Column("service_type", INTEGER, default=1, server_default='1')
fakeid = Column("fakeid", String(100), default='', server_default='')
description = Column("description", String(500), default='', server_default='')
round_head_img = Column("round_head_img", String(500), default='', server_default='')
is_crawled = Column("is_crawled", INTEGER, default=0, server_default='0')
is_monitored = Column("is_monitored", INTEGER, default=0, server_default='0')
enable = Column("enable", INTEGER, default=1, server_default='1')
# 这里需要设置默认值,否则空的话可能会存储None,可能会引发未catch的异常
class KeyWords(Base):
# 关键词搜索表 keywords
__tablename__ = 'keywords'
id = Column(INTEGER, primary_key=True, autoincrement=True)
keyword = Column("keyword", String(200), unique=True)
enable = Column("enable", INTEGER, default=1, server_default='1')
class WeChatData(Base):
# 微博信息表 weibo_data
__tablename__ = 'wechat_data'
id = Column(Integer, primary_key=True, autoincrement=True)
title = Column("title", String(100), default='', server_default='')
content = Column("content", String(6000), default='', server_default='')
like_num = Column("like_num", INTEGER, default=0, server_default='0')
read_num = Column("read_num", INTEGER, default=0, server_default='0')
uname = Column("uname", String(20))
url = Column("url", String(300))
head_img = Column("head_img", String(500), default='', server_default='')
pub_time = Column("pub_time", DateTime)
Base.metadata.create_all(engine)#创建表
#Base.metadata.drop_all(engine) #删除表
|
[
"“[email protected]”"
] | |
216a2b17f45760c653834956d1a4df19aa73e94e
|
f9aa857868a2027eb6738e16318e84e2320c2947
|
/cookiecutter_django_test/contrib/sites/migrations/0003_set_site_domain_and_name.py
|
a36c0ace8376e9f56496ffe0c5d8e0ca91082f3e
|
[
"MIT"
] |
permissive
|
imsure/cookiecutter-django-test
|
44ecda7cb9449dcf78dc878a73f8eb9ba9d89f1b
|
853a46e6410fc9814cadbef828987f2c5b24fe4d
|
refs/heads/master
| 2020-03-12T18:55:37.943646 | 2018-04-24T00:29:23 | 2018-04-24T00:29:23 | 130,773,172 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,027 |
py
|
"""
To understand why this file is here, please read:
http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
"""
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
"domain": "www.metropia.com",
"name": "Cookiecutter Django Test",
},
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model("sites", "Site")
Site.objects.update_or_create(
id=settings.SITE_ID, defaults={"domain": "example.com", "name": "example.com"}
)
class Migration(migrations.Migration):
dependencies = [("sites", "0002_alter_domain_unique")]
operations = [migrations.RunPython(update_site_forward, update_site_backward)]
|
[
"[email protected]"
] | |
9929059b4cc7d12192264b8307b4f37220bc83da
|
a1728a475b8c4e64a3a629d9c1f43294999eb769
|
/oTree/__temp_migrations/slider_post/0011_auto_20210123_2211.py
|
e7115dbec75e88987bb50c95d1e413510375e0ba
|
[] |
no_license
|
Tslilon/experimental_instruments_labor_project
|
2fff71db7453b1d455c9f2984e899f6b3f92e5f2
|
489b85eacf145eec03999bd416c4394b4a4bbafa
|
refs/heads/master
| 2023-03-19T02:21:06.279317 | 2021-02-23T11:49:38 | 2021-02-23T11:49:38 | 325,325,614 | 0 | 0 | null | 2021-02-23T11:49:39 | 2020-12-29T15:46:15 |
Python
|
UTF-8
|
Python
| false | false | 538 |
py
|
# Generated by Django 2.2.12 on 2021-01-24 06:11
from django.db import migrations
import otree.db.models
class Migration(migrations.Migration):
dependencies = [
('slider_post', '0010_auto_20210123_2210'),
]
operations = [
migrations.AlterField(
model_name='player',
name='self_rating',
field=otree.db.models.IntegerField(default=6.966836560555607, null=True, verbose_name='How well do you think you did in the task? (0 = Very Badly, 10 = Very Well)'),
),
]
|
[
"[email protected]"
] | |
82ae6ef5b855de4f23fc4565b954b99b47490386
|
26bc83ba9481257be8ec47ea032c554feb6cb2ba
|
/setup.py
|
99a83a781060b2cdaa43e2d333a0ee84779b7231
|
[] |
no_license
|
irachex/zkpython
|
e8121e20b63db07dcc6e5ed2fcde94a0a1213a9b
|
a9a87747df9d316c2b26035f928900b72b9deb41
|
refs/heads/master
| 2021-01-23T12:16:43.143916 | 2014-10-26T14:22:04 | 2014-10-26T14:22:04 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,287 |
py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from distutils.core import setup, Extension
zookeepermodule = Extension("zookeeper",
sources=["zookeeper.c"],
include_dirs=["/usr/include/c-client-src", "/usr/local/include/c-client-src", "/usr/local/include/zookeeper"],
libraries=["zookeeper_mt"],
)
setup( name="zkpython",
version = "0.4",
description = "ZooKeeper Python bindings",
ext_modules=[zookeepermodule] )
|
[
"[email protected]"
] | |
6f9b320f93d5f90a21da0f426a8d2714b87c4352
|
70433b90af33ed71fc9ab6c7ba15fe1b1ec52a90
|
/tests/validation/operation/test_produces_validation.py
|
5c5b9effd418860017904483a3b07bee22867320
|
[
"MIT"
] |
permissive
|
dhilton/flex
|
f6b54ae2c4c28e760fdcc02e00b7a4df420fba99
|
e23e74e246f4ad89c8d8971b029dcdd2fa642526
|
refs/heads/master
| 2020-04-05T23:10:53.512712 | 2014-11-10T04:35:48 | 2014-11-10T04:35:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,083 |
py
|
import pytest
from flex.validation.operation import (
construct_operation_validators,
validate_operation,
)
from tests.factories import (
ResponseFactory,
SchemaFactory,
)
#
# produces mimetype validation.
#
def test_produces_validation_valid_mimetype_from_global_definition():
"""
Test that a response content_type that is in the global api produces
definitions is valid.
"""
response = ResponseFactory(content_type='application/json')
schema = SchemaFactory(
produces=['application/json'],
paths={
'/get': {'get': {'responses': {200: {'description': 'Success'}}}},
},
)
validators = construct_operation_validators(
'/get', schema['paths']['/get'], 'get', schema,
)
validate_operation(response, validators)
def test_produces_validation_invalid_mimetype_from_global_definition():
"""
Test that a response content_type that is in the global api produces
definitions is valid.
"""
from django.core.exceptions import ValidationError
response = ResponseFactory(content_type='application/json')
schema = SchemaFactory(
produces=['application/xml'],
paths={
'/get': {'get': {'responses': {200: {'description': 'Success'}}}},
},
)
validators = construct_operation_validators(
'/get', schema['paths']['/get'], 'get', schema,
)
with pytest.raises(ValidationError):
validate_operation(response, validators, inner=True)
def test_produces_validation_for_valid_mimetype_from_operation_definition():
"""
Test that when `produces` is defined in an operation definition, that the
local value is used in place of any global `produces` definition.
"""
response = ResponseFactory(content_type='application/json')
schema = SchemaFactory(
produces=['application/xml'],
paths={
'/get': {'get': {
'responses': {200: {'description': 'Success'}},
'produces': ['application/json'],
}},
},
)
validators = construct_operation_validators(
'/get', schema['paths']['/get'], 'get', schema,
)
validate_operation(response, validators)
def test_produces_validation_for_invalid_mimetype_from_operation_definition():
"""
Test the situation when the operation definition has overridden the global
allowed mimetypes, that that the local value is used for validation.
"""
from django.core.exceptions import ValidationError
response = ResponseFactory(content_type='application/xml')
schema = SchemaFactory(
produces=['application/xml'],
paths={
'/get': {'get': {
'responses': {200: {'description': 'Success'}},
'produces': ['application/json'],
}},
},
)
validators = construct_operation_validators(
'/get', schema['paths']['/get'], 'get', schema,
)
with pytest.raises(ValidationError):
validate_operation(response, validators, inner=True)
|
[
"[email protected]"
] | |
02883ec23d9f5f21ace7da9c6f6be38f2867bfea
|
e5889e31869f5c00335b6fefb48da4e79bea1801
|
/week_4/01_02_delete_max_heap.py
|
1a584705e1ab3e6092ff5f929e884704a774cd48
|
[] |
no_license
|
HoChangSUNG/sparta_algorithm
|
2f4d9767eec1c9e3c17e420be0bc905713bbdb8c
|
1572027a6278c5ba6b96f426c904d8739ae3649d
|
refs/heads/main
| 2023-02-02T22:47:22.449648 | 2020-12-21T06:56:20 | 2020-12-21T06:56:20 | 323,255,246 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,767 |
py
|
class MaxHeap:
def __init__(self):
self.items = [None]
def insert(self, value):
self.items.append(value)
cur_index = len(self.items) - 1
while cur_index > 1: # cur_index 가 1이 되면 정상을 찍은거라 다른 것과 비교 안하셔도 됩니다!
parent_index = cur_index // 2
if self.items[parent_index] < self.items[cur_index]:
self.items[parent_index], self.items[cur_index] = self.items[cur_index], self.items[parent_index]
cur_index = parent_index
else:
break
def delete(self):
self.items[1], self.items[-1] = self.items[-1], self.items[1]
prev_max = self.items.pop()
cur_index = 1
while cur_index <= len(self.items) - 1:
left_child_index = cur_index * 2
right_child_index = cur_index * 2 + 1
max_index = cur_index
if left_child_index <= len(self.items) - 1 and self.items[left_child_index] > self.items[max_index]:
max_index = left_child_index
if right_child_index <= len(self.items) - 1 and self.items[right_child_index] > self.items[max_index]:
max_index = right_child_index
if max_index == cur_index:
break
self.items[cur_index], self.items[max_index] = self.items[max_index], self.items[cur_index]
cur_index = max_index
return prev_max
max_heap = MaxHeap()
max_heap.insert(8)
max_heap.insert(7)
max_heap.insert(6)
max_heap.insert(2)
max_heap.insert(5)
max_heap.insert(4)
print(max_heap.items) # [None, 8, 7, 6, 2, 5, 4]
print(max_heap.delete()) # 8 을 반환해야 합니다!
print(max_heap.items) # [None, 7, 5, 6, 2, 4]
|
[
"[email protected]"
] | |
fb1adf613d3697bc017da6571a975c14f74e45d9
|
76a01339f7ca19536a07d66e18ff427762157a2a
|
/codeforces/Python/longestuncommensubsequence.py
|
7e25e9cb507cd7c7aef28c3e02f0708e98559392
|
[] |
no_license
|
shaarangg/CP-codes
|
75f99530921a380b93d8473a2f2a588dc35b0beb
|
94fc49d0f20c02da69f23c74e26c974dfe122b2f
|
refs/heads/main
| 2023-07-19T21:31:40.011853 | 2021-09-07T05:22:28 | 2021-09-07T05:22:28 | 332,644,437 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 100 |
py
|
a = input()
b = input()
if(a!=b):
print(len(a) if len(a)>len(b) else len(b))
else:
print(-1)
|
[
"[email protected]"
] | |
c38888fe571c3fb10c56219cfac5fd6893db8fc7
|
5e9576c368e98927e2965bd2fb23bd35d9993d69
|
/featuretools/primitives/standard/aggregation/max_min_delta.py
|
74dfcaafb3e71b4d1466672426d6f42b7b5f9afa
|
[
"BSD-3-Clause"
] |
permissive
|
alteryx/featuretools
|
c6e319e063e8e84e7684bf232376f95dc5272160
|
c284c2d27a95b81e0bae913ac90df2b02c8f3b37
|
refs/heads/main
| 2023-08-25T12:21:33.945418 | 2023-08-23T16:30:25 | 2023-08-23T16:30:25 | 102,908,804 | 1,783 | 201 |
BSD-3-Clause
| 2023-09-07T18:53:19 | 2017-09-08T22:15:17 |
Python
|
UTF-8
|
Python
| false | false | 1,126 |
py
|
from woodwork.column_schema import ColumnSchema
from featuretools.primitives.base import AggregationPrimitive
class MaxMinDelta(AggregationPrimitive):
"""Determines the difference between the max and min value.
Args:
skipna (bool): Determines if to use NA/null values.
Defaults to True to skip NA/null.
Examples:
>>> max_min_delta = MaxMinDelta()
>>> max_min_delta([7, 2, 5, 3, 10])
8
You can optionally specify how to handle NaN values
>>> max_min_delta_skipna = MaxMinDelta(skipna=False)
>>> max_min_delta_skipna([7, 2, None, 3, 10])
nan
"""
name = "max_min_delta"
input_types = [ColumnSchema(semantic_tags={"numeric"})]
return_type = ColumnSchema(semantic_tags={"numeric"})
stack_on_self = False
default_value = 0
def __init__(self, skipna=True):
self.skipna = skipna
def get_function(self):
def max_min_delta(x):
max_val = x.max(skipna=self.skipna)
min_val = x.min(skipna=self.skipna)
return max_val - min_val
return max_min_delta
|
[
"[email protected]"
] | |
245c4419bddb2b9c46b6e55d7a35a88d77a201d3
|
4c19eac6e53b2c1230257508370ad60c8d83d6a7
|
/dxm/lib/DxEngine/DxMaskingEngine.py
|
6311f914b659004df881526cf892913c889e0724
|
[
"Apache-2.0"
] |
permissive
|
rakesh-roshan/dxm-toolkit
|
2c7741c8a02952de1c23715eadb515d84fcaf954
|
2c6e6ebf8615526501767844edf06fb74d878f25
|
refs/heads/master
| 2020-04-27T19:05:11.293818 | 2019-03-01T13:49:34 | 2019-03-01T13:49:34 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,690 |
py
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (c) 2018 by Delphix. All rights reserved.
#
# Author : Edward de los Santos
# Author : Marcin Przepiorowski
# Date : Oct 2017
import logging
import requests
from packaging import version
from datetime import datetime, timedelta
#import pickle
from dxm.lib.DxEngine.DxConfig import DxConfig
from masking_apis.rest import ApiException
from masking_apis.configuration import Configuration
from masking_apis.api_client import ApiClient
from masking_apis.models.login import Login
from masking_apis.apis.login_api import LoginApi
from dxm.lib.DxLogging import print_error
from dxm.lib.DxLogging import print_message
from masking_apis.apis.application_api import ApplicationApi
from masking_apis.apis.system_information_api import SystemInformationApi
class DxMaskingEngine(object):
api_client = None
__address = None
__name = None
__username = None
__password = None
__port = None
__protocol = None
__logger = None
@classmethod
def __init__(self, name, address, username, password, port=8282,
protocol="http"):
"""
Constructor
:param address: Engine addess
:param username: username
:param password: password
:param port: masking port (default 8282)
:param protocol: connection protocol (default http)
:returns: this is a description of what is returned
:raises keyError: raises an exception
"""
self.__address = address
self.__name = name
self.__username = username
self.__password = password
self.__port = port
self.__protocol = protocol
self.__logger = logging.getLogger()
self.__logger.debug("creating DxMaskingEngine object")
self.__logger.debug(("parameters: %s %s %s %s %s"
% (address, username,
password, port, protocol)))
self.__base_url = self.__protocol + "://" + address + ":" \
+ str(self.__port) + "/masking/api"
config = Configuration()
config.host = self.__base_url
config.debug = False
if self.__logger.getEffectiveLevel() == logging.DEBUG:
for name, logger in config.logger.items():
logger.setLevel(logging.DEBUG)
logger.removeHandler(config.logger_stream_handler)
@classmethod
def get_username(self):
return self.__username
@classmethod
def get_name(self):
return self.__name
@classmethod
def get_session(self):
"""
Create a session with a Masking engine
:return autorization key for a session
"""
self.api_client = ApiClient()
#set number of retries to one
# set timeout on request level as it is overwritten anyway
# to do
# change all requests to add timeout
self.api_client.rest_client.pool_manager.connection_pool_kw['retries'] = 0
apikey = self.load()
try:
self.__logger.debug("Check if old session is valid")
self.api_client.set_default_header(header_name='authorization',
header_value=apikey)
app = ApplicationApi(self.api_client)
app.get_all_applications(_request_timeout=self.get_timeout())
except ApiException as e:
if e.status == 401:
self.__logger.debug("Logging into Delphix Masking")
login_api = LoginApi(self.api_client)
login = Login(self.__username, self.__password)
try:
self.__logger.debug("sending a login request. "
"Payload %s" % login)
login_response = login_api.login(
login,
_request_timeout=self.get_timeout())
self.__logger.debug("login response %s"
% login_response)
self.api_client.set_default_header(
header_name='authorization',
header_value=login_response.authorization
)
self.save(login_response.authorization)
return None
except ApiException as e:
print_error("Can't login to engine %s (IP: %s)"
% (self.__name, self.__address))
print_error(e.body)
return 1
else:
print_error("Something went wrong %s" % e)
self.__logger.error("Something went wrong %s" % e)
return 1
except Exception as e:
# if engine is down this one should kick off
print_error("Can't login to engine %s (IP: %s)"
% (self.__name, self.__address))
self.__logger.debug(str(e))
return 1
@classmethod
def get_version(self):
"""
Return version of engine
return: version of engine as string. ex 5.3.0.0 or 5.2.0.0
"""
try:
si = SystemInformationApi(self.api_client)
retobj = si.get_system_information()
ret = retobj.version
except ApiException:
ret = "5.2.0.0"
return ret
@classmethod
def version_ge(self, version_engine):
"""
Compare an input parameter with engine version.
param1: version_engine: version number to compare ex. "5.3"
return: True if engine has higher or equal version
"""
engine_ver = self.get_version()
return version.parse(engine_ver) >= version.parse(version_engine)
@classmethod
def save(self, apikey):
"""
Save session to file or database
param1: apikey: Authorization key
"""
# binary_file = open('engine.bin', mode='wb')
# pickle.dump(apikey, binary_file)
# binary_file.close()
config = DxConfig()
config.set_key(self.__name, None, apikey)
@classmethod
def load(self):
"""
Load session from file or database
"""
# try:
# binary_file = open('engine.bin', mode='rb')
# apikey = pickle.load(binary_file)
# binary_file.close()
# return apikey
# except IOError:
# print_error("Session file not found")
# self.__logger.error("Session file not found")
config = DxConfig()
auth_key = config.get_key(self.__name, None)
return auth_key
@classmethod
def get_timeout(self):
"""
Return timeout for query
Tuple (connect_timeout, read_timeout)
"""
return (5, 15)
@classmethod
def getlogs(self, outputlog):
"""
Temporary procedure using GUI hack to download logs
"""
base_url = self.__protocol + "://" + self.__address \
+ ":" + str(self.__port)
loginurl = base_url + '/dmsuite/login.do'
logsurl = base_url + '/dmsuite/logsReport.do'
dataurl = base_url + '/dmsuite/logsReport.do?action=download'
session = requests.session()
req_headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
formdata = {
'userName': self.__username,
'password': self.__password,
}
# Authenticate
session.post(
loginurl, data=formdata, headers=req_headers,
allow_redirects=False)
session.get(logsurl)
r2 = session.get(dataurl)
try:
outputlog.write(r2.text)
outputlog.close()
print_message("Log saved to file %s" % outputlog.name)
return 0
except Exception as e:
print_error("Problem with file %s Error: %s" %
(outputlog.name, str(e)))
return 1
# if datafound:
# print line
|
[
"[email protected]"
] | |
abc51ee3da4656a2513b1a39da9a2afc1f616f46
|
349893015430768ac3ad0f8ca6fa8007c2e88f65
|
/test_appium/cases/test_contact.py
|
4501706e8731a6ca095ba7bf88622cfeffc3cc02
|
[] |
no_license
|
github653224/HogwartsLG7
|
226aa5640d1862b26f2ddc938c84b50a9e49c6a3
|
17b20d72bab376ce85d76d7ee0cab03f4e176a01
|
refs/heads/main
| 2023-06-07T08:33:47.293126 | 2021-06-29T14:07:41 | 2021-06-29T14:07:41 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 879 |
py
|
"""
__author__ = 'hogwarts_xixi'
__time__ = '2021/4/16 8:48 下午'
"""
from time import sleep
import sys
sys.path.append('..')
from test_appium.pages.app import App
class TestContact:
def setup_class(self):
# 打开应用,进入到首页
self.app = App()
def setup(self):
self.main = self.app.start().goto_main()
def teardown_class(self):
self.app.stop()
def test_addcontact(self):
username = "hogwarts05"
phonenum = "13911111115"
self.main.goto_contactlist().goto_addmemberpage() \
.addmember_bymenual().edit_member(username, phonenum).verify_ok()
def test_addcontact1(self):
username = "hogwarts04"
phonenum = "13911111114"
self.main.goto_contactlist().goto_addmemberpage() \
.addmember_bymenual().edit_member(username, phonenum).verify_ok()
|
[
"[email protected]"
] | |
855f056f1316e0015b0e1b25a1ec3771972c8647
|
a46e23e8266f7569e059f4df0cc22f3752965c77
|
/slack/leetcode-challenge-alarm.py
|
1c5b4c28f593f1572146cae2130200e124d8e0ef
|
[] |
no_license
|
jhy979/all-solved-club
|
758ae94f8b144deb8af0eabfd8445445ea43f182
|
cc1494b85bc7600cf242cf6ab1a7f5e78698a126
|
refs/heads/main
| 2023-07-15T22:19:11.084766 | 2021-08-24T03:15:58 | 2021-08-24T03:15:58 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 314 |
py
|
import os
from slack_sdk.webhook import WebhookClient
url = os.environ.get("SLACK_WEBHOOK_URL")
webhook = WebhookClient(url)
response = webhook.send(text="LeetCode에서 새로운 챌린지 문제가 출제되었습니다. - https://leetcode.com")
assert response.status_code == 200
assert response.body == "ok"
|
[
"[email protected]"
] | |
16d50188284e895addc4ad7bb1afa19b64421511
|
b09ea1e78e1a1f5d053817af35599c9464e755fe
|
/filter_utterances.py
|
cce497ee940a03394c4e60d4ae149f586b515e8e
|
[] |
no_license
|
aeoling/reddit_tools
|
46f0e321bf72889c82b100eae5418ac7a71b9701
|
f7fa9f32e547ddb529d55017a7784d00542c3ca0
|
refs/heads/master
| 2021-06-13T17:57:40.606586 | 2017-03-10T01:34:43 | 2017-03-10T01:34:43 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,959 |
py
|
from argparse import ArgumentParser
from os import path, makedirs, listdir
from collections import defaultdict
import numpy as np
from nltk import word_tokenize
MIN_CONTENT_WORD_FREQUENCY = 10
def load_questions(in_stream):
result = set([])
for line in in_stream:
result.add(line.strip().lower())
return result
def build_freq_dictionary(in_questions_set):
dictionary = defaultdict(lambda: 0)
for question in in_questions_set:
for token in word_tokenize(question):
dictionary[token] += 1
return dictionary
def filter_dictionary(in_freq_dict):
frequencies = in_freq_dict.values()
mean, variance = np.mean(frequencies), np.std(frequencies)
min_frequency = max(MIN_CONTENT_WORD_FREQUENCY, mean - 2 * variance)
max_frequency = mean + 2 * variance
filtered_dictionary = {
word: frequency
for word, frequency in in_freq_dict.iteritems()
if min_frequency <= frequency <= max_frequency
}
return filtered_dictionary
def build_argument_parser():
parser = ArgumentParser()
parser.add_argument('src_root')
return parser
def main(in_text_root):
all_questions = set([])
for questions_file in listdir(in_text_root):
with open(path.join(in_text_root, questions_file)) as questions_in:
questions = load_questions(questions_in)
all_questions.update(questions)
freq_dict = build_freq_dictionary(all_questions)
filtered_dictionary = filter_dictionary(freq_dict)
filtered_questions = []
for question in all_questions:
tokens = word_tokenize(question)
for token in tokens:
if token in filtered_dictionary:
filtered_questions.append(question)
break
return filtered_questions
if __name__ == '__main__':
parser = build_argument_parser()
args = parser.parse_args()
for question in main(args.src_root):
print question
|
[
"[email protected]"
] | |
9884cd78a96910ea51c7b7436511c56069946ac6
|
df7f13ec34591fe1ce2d9aeebd5fd183e012711a
|
/hata/discord/message/attachment/fields.py
|
a18f011507b4f126fe1cfce5f33d927754dca5ce
|
[
"LicenseRef-scancode-warranty-disclaimer"
] |
permissive
|
HuyaneMatsu/hata
|
63e2f6a2d7a7539fd8f18498852d9d3fe5c41d2e
|
53f24fdb38459dc5a4fd04f11bdbfee8295b76a4
|
refs/heads/master
| 2023-08-20T15:58:09.343044 | 2023-08-20T13:09:03 | 2023-08-20T13:09:03 | 163,677,173 | 3 | 3 |
Apache-2.0
| 2019-12-18T03:46:12 | 2018-12-31T14:59:47 |
Python
|
UTF-8
|
Python
| false | false | 3,683 |
py
|
__all__ = ()
from ...field_parsers import (
bool_parser_factory, entity_id_parser_factory, flag_parser_factory, float_parser_factory,
force_string_parser_factory, int_parser_factory, nullable_string_parser_factory
)
from ...field_putters import (
bool_optional_putter_factory, entity_id_putter_factory, flag_optional_putter_factory, float_optional_putter_factory,
force_string_putter_factory, int_putter_factory, nullable_string_optional_putter_factory,
nullable_string_putter_factory, url_optional_putter_factory
)
from ...field_validators import (
bool_validator_factory, entity_id_validator_factory, flag_validator_factory, float_conditional_validator_factory,
force_string_validator_factory, int_conditional_validator_factory, nullable_string_validator_factory,
url_optional_validator_factory, url_required_validator_factory
)
from .constants import DESCRIPTION_LENGTH_MAX
from .flags import AttachmentFlag
# content_type
parse_content_type = nullable_string_parser_factory('content_type')
put_content_type_into = nullable_string_putter_factory('content_type')
validate_content_type = nullable_string_validator_factory('content_type', 0, 1024)
# description
parse_description = nullable_string_parser_factory('description')
put_description_into = nullable_string_putter_factory('description')
validate_description = nullable_string_validator_factory('description', 0, DESCRIPTION_LENGTH_MAX)
# duration
parse_duration = float_parser_factory('duration_sec', 0.0)
put_duration_into = float_optional_putter_factory('duration_sec', 0.0)
validate_duration = float_conditional_validator_factory(
'duration',
0.0,
lambda duration : duration >= 0.0,
'>= 0.0',
)
# flags
parse_flags = flag_parser_factory('flags', AttachmentFlag)
put_flags_into = flag_optional_putter_factory('flags', AttachmentFlag())
validate_flags = flag_validator_factory('flags', AttachmentFlag)
# height
parse_height = int_parser_factory('height', 0)
put_height_into = int_putter_factory('height')
validate_height = int_conditional_validator_factory(
'height',
0,
lambda height : height >= 0,
'>= 0',
)
# id
parse_id = entity_id_parser_factory('id')
put_id_into = entity_id_putter_factory('id')
validate_id = entity_id_validator_factory('id')
# name
parse_name = force_string_parser_factory('filename')
put_name_into = force_string_putter_factory('filename')
validate_name = force_string_validator_factory('name', 0, 1024)
# proxy_url
parse_proxy_url = nullable_string_parser_factory('proxy_url')
put_proxy_url_into = url_optional_putter_factory('proxy_url')
validate_proxy_url = url_optional_validator_factory('proxy_url')
# size
parse_size = int_parser_factory('size', 0)
put_size_into = int_putter_factory('size')
validate_size = int_conditional_validator_factory(
'size',
0,
lambda size : size >= 0,
'>= 0',
)
# temporary
parse_temporary = bool_parser_factory('ephemeral', False)
put_temporary_into = bool_optional_putter_factory('ephemeral', False)
validate_temporary = bool_validator_factory('temporary', False)
# url
parse_url = force_string_parser_factory('url')
put_url_into = url_optional_putter_factory('url')
validate_url = url_required_validator_factory('url')
# waveform
parse_waveform = nullable_string_parser_factory('waveform')
put_waveform_into = nullable_string_optional_putter_factory('waveform')
validate_waveform = nullable_string_validator_factory('waveform', 0, 4096)
# width
parse_width = int_parser_factory('width', 0)
put_width_into = int_putter_factory('width')
validate_width = int_conditional_validator_factory(
'width',
0,
lambda width : width >= 0,
'>= 0',
)
|
[
"[email protected]"
] | |
13f67ec412e8d337686f43866268cea66429a4fd
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03634/s109235916.py
|
458b74a4f790d735865b22558bf7c8d196e50401
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,326 |
py
|
from collections import deque
def bfs(start, g, visited):
q = deque([])
k_neibs = g[start]
visited[start] = (0,start)
for neib, dist in k_neibs:
q.append((neib, neib)) # node, root parent (k neib)
visited[neib] = (dist, neib) # dist, root parent (k neib)
while q:
curr_node, root_pare = q.popleft()
for next_node, dist in g[curr_node]:
if visited[next_node] != (-1,-1): continue
curr_dist = visited[curr_node][0]
visited[next_node] = (curr_dist+dist, root_pare)
q.append((next_node, root_pare))
def main():
n = int(input())
g = [ [] for _ in range(n+1)]
for _ in range(n-1):
a, b, c = map(int, input().split())
g[a].append((b,c))
g[b].append((a,c))
q,k = map(int, input().split())
visited = [(-1,-1)] * (n+1) # dist, root parent (k neib)
bfs(k, g, visited)
ansl = []
for _ in range(q):
x,y = map(int, input().split())
xd, xp = visited[x]
yd, yp = visited[y]
# めっちゃ無駄なことしてたーーーーーーーー!
if xp == yp:
ans = xd+yd
ansl.append(ans)
else:
ans = xd+yd
ansl.append(ans)
for a in ansl: print(a)
if __name__ == "__main__":
main()
|
[
"[email protected]"
] | |
c2fc1b0031863d92f615f5df5b863e07c4964440
|
0b2e54f19875e51519950c06c6110a8144d85cb4
|
/python_textbook_questions/Graph/graph_dfs__adjList_defaultdictlist.py
|
71c4e40cbb2c117d9fd4200802711d1817811a91
|
[] |
no_license
|
hemangbehl/Data-Structures-Algorithms_practice
|
b1f5d5995d4e02de2d3807e18ac0639b900d35af
|
e125ebd42dd4083701b13a319c368f4a622ca669
|
refs/heads/master
| 2020-08-06T09:03:42.253388 | 2020-07-10T23:55:39 | 2020-07-10T23:55:39 | 212,916,872 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,029 |
py
|
from collections import defaultdict
class Graph:
def __init__(self, vertices):
self.graph = defaultdict(list)
self.V = vertices
def add_edge(self, src, dest):
self.graph[src].append(dest)
def printAllEdges(self):
print("all edges:")
for i in self.graph:
print("vertex #", i, ": ", end='')
for j in self.graph[i]:
print(j, end='-> ')
print("")
def DFS(self, v):
stack = list()
stack.append(v)
visited = set()
visited.add(v)
while stack:
vertex = stack.pop()
print(vertex, end=' ')
for adjVertex in self.graph[vertex]:
if adjVertex not in visited:
visited.add(adjVertex)
stack.append(adjVertex)
print("")
#driver code
g = Graph(5)
g.add_edge(1, 0)
g.add_edge(2, 1)
g.add_edge(4, 0)
g.add_edge(3, 4)
g.add_edge(0, 3)
g.add_edge(0, 2)
g.DFS(0)
g.printAllEdges()
|
[
"[email protected]"
] | |
a1961d38a64e8fefe13f6f62bcd6d0ee1dd43572
|
101b0ae046501b9eb0990be2d86e5b5538aacd7a
|
/src/pcg/pcg/grids/square.py
|
f0f4c09fd1f7fa1e8b04ab8a2b3e8923c0ed3d3c
|
[
"BSD-3-Clause"
] |
permissive
|
Tiendil/pcg-python
|
ebd12dd7abba43d758c746dad8057130f61ac720
|
02b54932bdec4a13c179ebeee76fc8ed1b8eba34
|
refs/heads/master
| 2022-12-15T05:32:02.970053 | 2020-09-15T10:47:18 | 2020-09-15T10:47:18 | 263,430,688 | 2 | 0 | null | 2020-09-15T10:41:29 | 2020-05-12T19:20:30 |
Python
|
UTF-8
|
Python
| false | false | 3,575 |
py
|
import math
import dataclasses
from PIL import Image
from pcg import colors
from pcg import drawer
from pcg.topologies import BaseArea
from pcg.geometry import Point, BoundingBox
@dataclasses.dataclass(frozen=True, order=True)
class Cell:
__slots__ = ('x', 'y')
x: int
y: int
def __add__(self, cell: 'Cell'):
return Cell(self.x + cell.x,
self.y + cell.y)
def __sub__(self, cell: 'Cell'):
return Cell(self.x - cell.y,
self.y - cell.y)
def scale(self, scale: float):
return Cell(self.x * scale,
self.y * scale)
def cells_rectangle(width, height):
for y in range(height):
for x in range(width):
yield Cell(x, y)
def cell_center(cell):
return Point(cell.x + 0.5, cell.y + 0.5)
def cell_bounding_box(cell):
return BoundingBox(x_min=cell.x,
y_min=cell.y,
x_max=cell.x + 1,
y_max=cell.y + 1)
def cells_bounding_box(cells):
box = cell_bounding_box(cells[0])
for cell in cells[1:]:
box += cell_bounding_box(cell)
return box
def area_template(min_distance, max_distance, distance):
area = []
for dx in range(-max_distance, max_distance + 1):
for dy in range(-max_distance, max_distance + 1):
cell = Cell(dx, dy)
if min_distance <= distance(cell) <= max_distance:
area.append(cell)
return area
def area(topology, distance, min_distance, max_distance):
cache = [None] * topology.size()
template = area_template(min_distance, max_distance, distance)
for center, index in topology.indexes.items():
points = [center + point for point in template]
cache[index] = topology.area_indexes(points)
return cache
class Euclidean(BaseArea):
__slots__ = ()
def connectome(self, topology, min_distance, max_distance):
return area(topology, self.distance, min_distance, max_distance)
def distance(self, a, b=Cell(0, 0)):
return math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2)
class Manhattan(BaseArea):
__slots__ = ()
def connectome(self, topology, min_distance, max_distance):
return area(topology, self.distance, min_distance, max_distance)
def distance(self, a, b=Cell(0, 0)):
return abs(a.x-b.x) + abs(a.y-b.y)
class Ring(BaseArea):
__slots__ = ()
def connectome(self, topology, min_distance, max_distance):
return area(topology, self.distance, min_distance, max_distance)
def distance(self, a, b=Cell(0, 0)):
return max(abs(a.x-b.x), abs(a.y-b.y))
################################
# drawers
################################
@dataclasses.dataclass
class Sprite:
color: colors.Color = colors.BLACK
image: Image = dataclasses.field(default=None, init=False, compare=False)
def prepair(self, cell_size):
self.image = Image.new('RGBA', cell_size.xy, self.color.ints)
class Drawer(drawer.Drawer):
__slots__ = ('cell_size',)
def __init__(self, cell_size, **kwargs):
super().__init__(**kwargs)
self.cell_size = cell_size
def prepair_sprite(self, sprite):
sprite.prepair(self.cell_size)
def node_position(self, node, canvas_size):
return cell_center(node.coordinates) * self.cell_size - self.cell_size / 2
def calculate_canvas_size(self, nodes):
coordinates = [node.coordinates for node in nodes]
return (cells_bounding_box(coordinates).size * self.cell_size).round_up()
|
[
"[email protected]"
] | |
4528d0bdfaac5df372637ad0bc181a7caac37261
|
d35296adaf3942bdd402a4fc146629c6cdf516c0
|
/pymatgen/command_line/__init__.py
|
c3958c2df13754bdc0753e8387e4ca97441be247
|
[
"MIT"
] |
permissive
|
leicheng/pymatgen
|
76de5d9f09a16a2e1163e363e5a5602ce1907e2c
|
76fb4f3f38a3dd54138f793afaf38bbadbf16aa3
|
refs/heads/master
| 2020-12-24T21:27:13.990717 | 2013-08-23T21:57:26 | 2013-08-23T21:57:26 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 127 |
py
|
"""
This package contains various command line interfaces to programs used in
pymatgen that do not have Python equivalents.
"""
|
[
"[email protected]"
] | |
b8d13be6eb295ce9d73ae66e5400dde099535a2c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03946/s886596927.py
|
8ddaedd848e91074fcbb5e84cb91d83065d340d1
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 297 |
py
|
n, t = map(int, input().split())
ns = list(map(int, input().split()))
min_n = 10 ** 10
max_n = 0
max_l = []
for i in ns[::-1]:
max_n = max(max_n, i)
max_l.append(max_n)
max_l.reverse()
l = []
for i in range(n):
l.append(ns[i]-min_n)
min_n = min(min_n, ns[i])
print(l.count(max(l)))
|
[
"[email protected]"
] | |
9e4820dd94bd2272bd3c98ea0ed4ad3f032a4622
|
ac5e52a3fc52dde58d208746cddabef2e378119e
|
/exps-sblp-obt/sblp_ut=3.5_rd=0.8_rw=0.04_rn=4_u=0.075-0.325_p=harmonic-2/sched=RUN_trial=25/params.py
|
fff0f4ebe57c15d0241b34e8b819f2e1e8e6ba77
|
[] |
no_license
|
ricardobtxr/experiment-scripts
|
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
|
7bcebff7ac2f2822423f211f1162cd017a18babb
|
refs/heads/master
| 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 250 |
py
|
{'cpus': 4,
'duration': 30,
'final_util': '3.602405',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.8',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'RUN',
'trial': 25,
'utils': 'uni-medium-3'}
|
[
"[email protected]"
] | |
11d569aa09d84966613f28d3980d5e2949f5b07c
|
6fcfb638fa725b6d21083ec54e3609fc1b287d9e
|
/python/justdark_dml/dml-master/dml/CLUSTER/kmeans.py
|
67e0ce8d2b50cfe1a715e5a08df442b1f2ffb503
|
[] |
no_license
|
LiuFang816/SALSTM_py_data
|
6db258e51858aeff14af38898fef715b46980ac1
|
d494b3041069d377d6a7a9c296a14334f2fa5acc
|
refs/heads/master
| 2022-12-25T06:39:52.222097 | 2019-12-12T08:49:07 | 2019-12-12T08:49:07 | 227,546,525 | 10 | 7 | null | 2022-12-19T02:53:01 | 2019-12-12T07:29:39 |
Python
|
UTF-8
|
Python
| false | false | 1,095 |
py
|
from __future__ import division
import numpy as np
import scipy as sp
import pylab as py
from scipy.cluster.vq import kmeans2,whiten
'''
the scipy contain a kmeans,so there is no need to write one
bu for the convenience of using,I pack it with my code
I don't know how to translate the input space to whitened space
so If you need please add white
'''
class KMEANSC:
def __init__(self,X,K):
self.X=np.array(X)
self.K=K
self.labels=[]
self.centroids=[]
pass
def train(self,white=False):
'''
each train change everything
'''
if (white):
self.centroids,self.labels=kmeans2(whiten(self.X),self.K,minit='random', missing='warn')
else:
self.centroids,self.labels=kmeans2(self.X,self.K,minit='random', missing='warn')
def result(self):
return self.centroids,self.labels
def bfWhiteCen(self):
''' if you use whiten on self.X in train,you need this to get the real controids
'''
Wcentroid=self.centroids
print Wcentroid
for i in range(self.K):
Wcentroid[i]=np.sum(self.X[self.labels==i],axis=0)/list(self.labels).count(i)
return Wcentroid
|
[
"[email protected]"
] | |
2fc943c950de622134f2630688a4ea910d09ef57
|
918963bcb425328076ce17400966378bd66f5e7e
|
/python-openshift-release/openshift_release/models/com_github_vfreex_release_apiserver_pkg_apis_art_v1alpha1_build_list.py
|
9de3643f43abfbf8f53738b297933a91c21d3ece
|
[] |
no_license
|
vfreex/release-apiserver
|
884d5ce6c1ef936fc746b668884e8f97a27305d8
|
a6e1df25a38f808fc194397fcd63628a999fed95
|
refs/heads/master
| 2023-03-09T19:05:38.011434 | 2020-05-21T09:25:47 | 2020-05-21T09:25:47 | 247,671,819 | 2 | 0 | null | 2023-02-24T23:14:38 | 2020-03-16T10:20:42 |
Go
|
UTF-8
|
Python
| false | false | 7,822 |
py
|
# coding: utf-8
"""
Api
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from openshift_release.configuration import Configuration
class ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'items': 'list[ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1Build]',
'kind': 'str',
'metadata': 'IoK8sApimachineryPkgApisMetaV1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None, local_vars_configuration=None): # noqa: E501
"""ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""Gets the api_version of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""Gets the items of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:return: The items of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:rtype: list[ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1Build]
"""
return self._items
@items.setter
def items(self, items):
"""Sets the items of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList.
:param items: The items of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:type: list[ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1Build]
"""
if self.local_vars_configuration.client_side_validation and items is None: # noqa: E501
raise ValueError("Invalid value for `items`, must not be `None`") # noqa: E501
self._items = items
@property
def kind(self):
"""Gets the kind of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:return: The metadata of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:rtype: IoK8sApimachineryPkgApisMetaV1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList.
:param metadata: The metadata of this ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList. # noqa: E501
:type: IoK8sApimachineryPkgApisMetaV1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ComGithubVfreexReleaseApiserverPkgApisArtV1alpha1BuildList):
return True
return self.to_dict() != other.to_dict()
|
[
"[email protected]"
] | |
5895a8dbb45ecb20dda14f898b76342ed79c1685
|
a73f6f964bd903ab75e1d2cf5010d641b86be520
|
/deeplearning/LSTM_OnlyPatch/visualization.py
|
4a7f22d50f7e5bd08b04e2f306cd03a1fbb9d080
|
[] |
no_license
|
UpCoder/MICCAI2018
|
457fd1aca1841b1b2e39c242af7b3a7945da4d39
|
a09a44def100ad0d991beb5265cdae1b7fecd297
|
refs/heads/master
| 2021-05-09T02:43:21.636074 | 2018-02-08T14:00:04 | 2018-02-08T14:00:04 | 119,219,492 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 9,387 |
py
|
# -*- coding=utf-8 -*-
import tensorflow as tf
import os
import sys
from load_liver_density import load_raw_liver_density
import numpy as np
from resnet import inference_small
from deeplearning.LSTM.Config import Config as net_config
from utils.Tools import shuffle_image_label, read_mhd_image, get_boundingbox, convert2depthlaster, calculate_acc_error
from PIL import Image
from glob import glob
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_integer('batch_size', net_config.BATCH_SIZE, "batch size")
features = []
def load_patch(patch_path, return_roi=False, parent_dir=None):
if not return_roi:
if patch_path.endswith('.jpg'):
return Image.open(patch_path)
if patch_path.endswith('.npy'):
return np.load(patch_path)
else:
phasenames = ['NC', 'ART', 'PV']
if patch_path.endswith('.jpg'):
basename = os.path.basename(patch_path)
basename = basename[: basename.rfind('_')]
mask_images = []
mhd_images = []
for phasename in phasenames:
image_path = glob(os.path.join(parent_dir, basename, phasename + '_Image*.mhd'))[0]
mask_path = os.path.join(parent_dir, basename, phasename + '_Registration.mhd')
mhd_image = read_mhd_image(image_path, rejust=True)
mhd_image = np.squeeze(mhd_image)
# show_image(mhd_image)
mask_image = read_mhd_image(mask_path)
mask_image = np.squeeze(mask_image)
[xmin, xmax, ymin, ymax] = get_boundingbox(mask_image)
# xmin -= 15
# xmax += 15
# ymin -= 15
# ymax += 15
mask_image = mask_image[xmin: xmax, ymin: ymax]
mhd_image = mhd_image[xmin: xmax, ymin: ymax]
mhd_image[mask_image != 1] = 0
mask_images.append(mask_image)
mhd_images.append(mhd_image)
mhd_images = convert2depthlaster(mhd_images)
return mhd_images
if patch_path.endswith('.npy'):
basename = os.path.basename(patch_path)
basename = basename[: basename.rfind('_')]
mask_images = []
mhd_images = []
for phasename in phasenames:
image_path = glob(os.path.join(parent_dir, basename, phasename + '_Image*.mhd'))[0]
mask_path = os.path.join(parent_dir, basename, phasename + '_Registration.mhd')
mhd_image = read_mhd_image(image_path, rejust=False) # 因为存储的是npy格式,所以不进行窗宽窗位的调整
mhd_image = np.squeeze(mhd_image)
# show_image(mhd_image)
mask_image = read_mhd_image(mask_path)
mask_image = np.squeeze(mask_image)
[xmin, xmax, ymin, ymax] = get_boundingbox(mask_image)
# xmin -= 15
# xmax += 15
# ymin -= 15
# ymax += 15
mask_image = mask_image[xmin: xmax, ymin: ymax]
mhd_image = mhd_image[xmin: xmax, ymin: ymax]
mhd_image[mask_image != 1] = 0
mask_images.append(mask_image)
mhd_images.append(mhd_image)
mhd_images = convert2depthlaster(mhd_images)
return mhd_images
def resize_images(images, size, rescale=True):
res = np.zeros(
[
len(images),
size,
size,
3
],
np.float32
)
for i in range(len(images)):
img = Image.fromarray(np.asarray(images[i], np.uint8))
# data augment
random_int = np.random.randint(0, 4)
img = img.rotate(random_int * 90)
random_int = np.random.randint(0, 2)
if random_int == 1:
img = img.transpose(Image.FLIP_LEFT_RIGHT)
random_int = np.random.randint(0, 2)
if random_int == 1:
img = img.transpose(Image.FLIP_TOP_BOTTOM)
img = img.resize([size, size])
if rescale:
res[i, :, :, :] = np.asarray(img, np.float32) / 255.0
res[i, :, :, :] = res[i, :, :, :] - 0.5
res[i, :, :, :] = res[i, :, :, :] * 2.0
else:
res[i, :, :, :] = np.asarray(img, np.float32)
return res
def main(_):
roi_images = tf.placeholder(
shape=[
None,
net_config.ROI_SIZE_W,
net_config.ROI_SIZE_H,
net_config.IMAGE_CHANNEL
],
dtype=np.float32,
name='roi_input'
)
expand_roi_images = tf.placeholder(
shape=[
None,
net_config.EXPAND_SIZE_W,
net_config.EXPAND_SIZE_H,
net_config.IMAGE_CHANNEL
],
dtype=np.float32,
name='expand_roi_input'
)
batch_size_tensor = tf.placeholder(dtype=tf.int32, shape=[])
is_training_tensor = tf.placeholder(dtype=tf.bool, shape=[])
logits, _, _, representor_tensor = inference_small(
roi_images,
expand_roi_images,
phase_names=['NC', 'ART', 'PV'],
num_classes=4,
is_training=is_training_tensor,
batch_size=batch_size_tensor
)
model_path = '/home/give/PycharmProjects/MICCAI2018/deeplearning/LSTM/parameters/0/0.0001'
# model_path = '/home/give/PycharmProjects/MedicalImage/Net/forpatch/cross_validation/model/multiscale/parallel/0/2200.0'
predictions = tf.nn.softmax(logits)
saver = tf.train.Saver(tf.all_variables())
print predictions
predicted_label_tensor = tf.argmax(predictions, axis=1)
print predicted_label_tensor
init = tf.initialize_all_variables()
sess = tf.Session(config=tf.ConfigProto(log_device_placement=False))
sess.run(init)
tf.train.start_queue_runners(sess=sess)
latest = tf.train.latest_checkpoint(model_path)
if not latest:
print "No checkpoint to continue from in", model_path
sys.exit(1)
print "resume", latest
saver.restore(sess, latest)
data_dir = '/home/give/Documents/dataset/MICCAI2018/Patches/crossvalidation/0/test'
slice_dir = '/home/give/Documents/dataset/MICCAI2018/Slices/crossvalidation/0/test'
labels = []
paths = []
for typeid in [0, 1, 2, 3]:
cur_path = os.path.join(data_dir, str(typeid))
names = os.listdir(cur_path)
labels.extend([typeid] * len(names))
paths.extend([os.path.join(cur_path, name) for name in names])
paths, labels = shuffle_image_label(paths, labels)
start_index = 0
predicted_labels = []
liver_density = load_raw_liver_density()
while True:
if start_index >= len(paths):
break
print start_index, len(paths)
end_index = start_index + net_config.BATCH_SIZE
cur_paths = paths[start_index: end_index]
cur_roi_images = [np.asarray(load_patch(path)) for path in cur_paths]
cur_expand_roi_images = [
np.asarray(load_patch(path, return_roi=True, parent_dir=slice_dir)) for path in
cur_paths]
cur_roi_images = resize_images(cur_roi_images, net_config.ROI_SIZE_W, True)
cur_expand_roi_images = resize_images(cur_expand_roi_images, net_config.EXPAND_SIZE_W, True)
# cur_liver_densitys = [liver_density[os.path.basename(path)[:os.path.basename(path).rfind('_')]] for
# path in cur_paths]
# for i in range(len(cur_roi_images)):
# for j in range(3):
# cur_roi_images[i, :, :, j] = (1.0 * cur_roi_images[i, :, :, j]) / (1.0 * cur_liver_densitys[i][j])
# cur_expand_roi_images[i, :, :, j] = (1.0 * cur_expand_roi_images[i, :, :, j]) / (
# 1.0 * cur_liver_densitys[i][j])
predicted_batch_labels, representor_value, logits_value = sess.run([predicted_label_tensor, representor_tensor, logits], feed_dict={
roi_images: cur_roi_images,
expand_roi_images: cur_expand_roi_images,
is_training_tensor: False,
batch_size_tensor: len(cur_roi_images)
})
features.extend(representor_value)
batch_labels = labels[start_index: end_index]
predicted_labels.extend(predicted_batch_labels)
start_index = end_index
calculate_acc_error(predicted_batch_labels, batch_labels)
calculate_acc_error(predicted_labels, labels)
# get the feature, visualize it
# first dimension reduction
from sklearn.decomposition import PCA
dim = 2
from plot import plot_scatter, plot_scatter3D
pca_obj = PCA(n_components=dim)
visualized_data = pca_obj.fit_transform(features)
if dim == 3:
plot_scatter3D(visualized_data[:, 0], visualized_data[:, 1], visualized_data[:, 2], labels=labels,
category_num=4)
else:
plot_scatter(visualized_data[:, 0], visualized_data[:, 1], labels=labels, category_num=4)
dim = 3
pca_obj = PCA(n_components=dim)
visualized_data = pca_obj.fit_transform(features)
if dim == 3:
plot_scatter3D(visualized_data[:, 0], visualized_data[:, 1], visualized_data[:, 2], labels=labels,
category_num=4)
else:
plot_scatter(visualized_data[:, 0], visualized_data[:, 1], labels=labels, category_num=4)
if __name__ == '__main__':
tf.app.run()
|
[
"[email protected]"
] | |
c6444febe09bdfdf0db44534477486ada7928f63
|
286c7b7dd9bd48c73fd94f8e89bde99a8d3f74c5
|
/modelscript/interfaces/modelc/execution.py
|
ac1e9752664b2e49d14a9f0f73e4a059fc6b1343
|
[
"MIT"
] |
permissive
|
ScribesZone/ModelScript
|
e7738471eff24a74ee59ec88d8b66a81aae16cdc
|
a36be1047283f2e470dc2dd4353f2a714377bb7d
|
refs/heads/master
| 2023-03-18T02:43:57.953318 | 2021-03-08T15:26:40 | 2021-03-08T15:26:40 | 31,960,218 | 1 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,599 |
py
|
# coding=utf-8
""""""
__all__ = (
'ExecutionContext'
)
import os
from typing import List, Any, Dict, Optional, ClassVar
from collections import OrderedDict
import argparse
# initialize the megamodel with metamodels and scripts
from modelscript.base.files import filesInTree
from modelscript.interfaces.modelc.options import getOptions
from modelscript.megamodels import Megamodel
from modelscript.base.issues import WithIssueList, OrderedIssueBoxList
class ExecutionContext(WithIssueList):
"""Execution context of modelscript session."""
args: ClassVar[List[str]]
"""The list of command line arguments"""
options: argparse.Namespace
"""The options derived from args."""
sourceMap: ClassVar[Dict[str, Optional['SourceFile']]]
"""For each source file name, the corresponding SourceFile
or None if there was an error
"""
issueBoxList: OrderedIssueBoxList
def __init__(self, args):
super(ExecutionContext, self).__init__()
assert args is not None
# extract the command options from the command line arguments
self.args = args
self.options = getOptions(args)
# self.hasManySourceFiles=len(self.options.sources)>=2
self.sourceMap = OrderedDict()
self._execute()
self.issueBoxList = OrderedIssueBoxList(
self.allSourceFileList
+ [self])
def _displayVersion(self):
print(('ModelScript - version %s' % Megamodel.model.version))
def _processSource(self, path):
"""Process a given source file or a given directory.
If a directory is given, then get all source files in
this directory recursively."""
if os.path.isdir(path):
# A directory is given: process all nested source files.
extensions = Megamodel.model.metamodelExtensions()
filenames = filesInTree(path, suffix=extensions)
if self.options.verbose:
print(('%s/ %i model files found.'
% (path, len(filenames))))
print((' '+'\n '.join(filenames)))
for filename in filenames:
self._processSource(filename)
else:
# Load a given source file
source = Megamodel.loadFile(path, self)
self.sourceMap[path] = source
def _execute(self):
# --- deal with --version -----------------------------------------
if self.options.version:
self._displayVersion()
# --- deal with --mode --------------------------------------------
print((
{'justAST': 'Checking syntax',
'justASTDep': 'Checking syntax and dependencies',
'full': 'Checking models'}
[self.options.mode]))
Megamodel.analysisLevel = self.options.mode
# --- deal with source files or source dir
for path in self.options.sources:
self._processSource(path)
@property
def validSourceFiles(self):
return (
s for s in list(self.sourceMap.values())
if s is not None)
@property
def nbIssues(self):
return self.issueBoxList.nbIssues
@property
def allSourceFileList(self):
"""
The list of all source files involved in this build,
directly or not. The list is in a topological order.
"""
return Megamodel.sourceFileList(
origins=self.validSourceFiles)
def label(self):
return 'executionContext'
def display(self, styled=True):
print((self.issueBoxList.str(styled=styled)))
# displayIssueBoxContainers(
# self.allSourceFileList+[self]
# )
# for source in self.allSourceFileList:
# print(source.issues.str(
# summary=False,
# styled=True,
# pattern='{origin}:{level}:{line}:{message}'))
# if self.hasIssues:
# print(self.issues.str(
# summary=False,
# styled=True,
# pattern='{origin}:{level}:{line}:{message}'
# ))
# # TODO:3 move this to issue.py
# def displayIssueBoxContainers(containerList):
# for container in containerList:
# if container.hasIssues:
# print(container.issues.str(
# summary=False,
# styled=True,
# pattern='{origin}:{level}:{line}:{message}'))
# else:
# if not isinstance(container, ExecutionContext):
# cprint(container.label+':'+'OK', 'green')
|
[
"[email protected]"
] | |
a0b3f3473e96a829362eb4a6e2795c317ea237ae
|
ae7ba9c83692cfcb39e95483d84610715930fe9e
|
/bmw9t/nltk/ch_three/34.py
|
887616cb690d8b5f25ba28819605a36c6f85e6a4
|
[] |
no_license
|
xenron/sandbox-github-clone
|
364721769ea0784fb82827b07196eaa32190126b
|
5eccdd8631f8bad78eb88bb89144972dbabc109c
|
refs/heads/master
| 2022-05-01T21:18:43.101664 | 2016-09-12T12:38:32 | 2016-09-12T12:38:32 | 65,951,766 | 5 | 7 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,289 |
py
|
# ◑ Write code to convert nationality adjectives like Canadian and Australian to their corresponding nouns Canada and Australia (see http://en.wikipedia.org/wiki/List_of_adjectival_forms_of_place_names).
import re
import nltk
import pycountry
countries = [country.name for country in pycountry.countries]
def convert(word):
"""converts an adjectival nationality to its corresponding noun form."""
# list of regex things to check
patterns = ['ese', 'ian', 'an', 'ean', 'n', 'ic', 'ern']
#list of suffixes for appending to country names that get damaged when they are split.
suffixes = ['a', 'o']
# for every potential way of forming a nationality adjective, test them.
for pattern in patterns:
tup = re.findall(r'^(.*)(' + pattern + ')', word)
#if the regex finds a pattern, set the country to the stem of the word.
if tup:
country = tup[0][0]
# check to see if the country is in the list of countries returned by pycountry. If it is, return it.
if country in countries:
return country
# if the stem is not a country, try adding suffixes to it to see if you can pull out a real country.
else:
for suffix in suffixes:
new_country = country + suffix
if new_country in countries:
return new_country
print(convert('Mexican'))
|
[
"[email protected]"
] | |
a87c4be49b0e91e03f99718dedf309a616007fbe
|
c1d5ca6194e4cf1bf3d3a3dfe758a95dfb87a8bc
|
/Python-Itertools-Itertools_Combinations_with_replacement.py
|
ee8e460ebbc0f7edab0aae542397cea4e79f5c83
|
[] |
no_license
|
maheshkrishnagopal/HackerRankSolutions
|
6924b9ea92f86cab0885016482e08e5b4f801dad
|
ca16aa8fe726e4a9dac149d407cfde3620cdf96a
|
refs/heads/master
| 2020-08-02T10:51:46.568749 | 2019-06-21T06:15:15 | 2019-06-21T06:15:15 | 211,324,575 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,135 |
py
|
# itertools.combinations_with_replacement(iterable, r)
# This tool returns length subsequences of elements from the input iterable allowing individual elements to be repeated more than once.
# Combinations are emitted in lexicographic sorted order. So, if the input iterable is sorted, the combination tuples will be produced in sorted order.
# Task
# You are given a string .
# Your task is to print all possible size replacement combinations of the string in lexicographic sorted order.
# Input Format
# A single line containing the string and integer value separated by a space.
# Constraints
# The string contains only UPPERCASE characters.
# Output Format
# Print the combinations with their replacements of string on separate lines.
# Sample Input
# HACK 2
# Sample Output
# AA
# AC
# AH
# AK
# CC
# CH
# CK
# HH
# HK
# KK
"""
-----------------------------------------------------------------------------------------------------------
"""
from itertools import combinations_with_replacement
string, num = input().split()
for i in combinations_with_replacement(sorted(string),int(num)):
print(''.join(i))
|
[
"[email protected]"
] | |
ae2420c44c43955f3134b17593b131ef1ca3bda9
|
9522d6962376dccf073f59e447ce20c993dff5f2
|
/src/api/views/articles.py
|
a66c0bcc5d9f03bb49410ea4f65f1accfc9292bf
|
[] |
no_license
|
bolekhan1508/django_intro
|
6f51085905306989f3fc86c72e01b752d1917d4e
|
147bbeef2dc3f3c93771f757b44689a4103bf618
|
refs/heads/master
| 2022-10-30T09:06:46.476738 | 2020-06-18T10:02:21 | 2020-06-18T10:02:21 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,043 |
py
|
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import generics, mixins
from api.serializers.v1.article import ArticleSerializer, ArticleModelSerializer
from apps.articles.models import Article
class ArticleApiView(APIView):
def get(self, request, **kwargs):
serializer = ArticleModelSerializer(instance=Article.objects.last())
return Response(serializer.data)
def post(self, request, **kwargs):
serializer = ArticleModelSerializer(data=request.data)
if serializer.is_valid():
article = serializer.create(serializer.validated_data)
return Response(serializer.data, status=status.HTTP_201_CREATED)
else:
return Response(data=serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class ArticleGenericView(mixins.ListModelMixin, mixins.CreateModelMixin, generics.GenericAPIView):
queryset = Article.objects.all()
serializer_class = ArticleModelSerializer
|
[
"[email protected]"
] | |
209938778c9851b71332d8689c820720afece9d7
|
0add7953d3e3ce2df9e8265102be39b758579753
|
/built-in/TensorFlow/Research/cv/image_classification/Cars_for_TensorFlow/automl/vega/algorithms/nas/sm_nas/smnas_trainer_callback.py
|
8cfff6490cdcbc92ef01f25108da398c4f8e260f
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
Huawei-Ascend/modelzoo
|
ae161c0b4e581f8b62c77251e9204d958c4cf6c4
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
refs/heads/master
| 2023-04-08T08:17:40.058206 | 2020-12-07T08:04:57 | 2020-12-07T08:04:57 | 319,219,518 | 1 | 1 |
Apache-2.0
| 2023-03-24T22:22:00 | 2020-12-07T06:01:32 |
Python
|
UTF-8
|
Python
| false | false | 5,125 |
py
|
"""Trainer for SMNas."""
import logging
import os
import mmcv
import torch
from pycocotools.coco import COCO
from pycocotools.cocoeval import COCOeval
from vega.core.trainer.callbacks import Callback
from vega.core.common.class_factory import ClassFactory, ClassType
from vega.core.common.file_ops import FileOps
@ClassFactory.register(ClassType.CALLBACK)
class SMNasTrainerCallback(Callback):
"""Trainer for SMNas."""
disable_callbacks = ["ModelStatistics", "MetricsEvaluator", "ModelCheckpoint", "PerformanceSaver",
"LearningRateScheduler", "ProgressLogger", "ReportCallback"]
def __init__(self):
super(SMNasTrainerCallback, self).__init__()
self.alg_policy = None
def set_trainer(self, trainer):
"""Set trainer object for current callback."""
self.trainer = trainer
self.trainer._train_loop = self._train_process
self.cfg = self.trainer.config
self._worker_id = self.trainer._worker_id
self.gpus = self.cfg.gpus
if hasattr(self.cfg, "kwargs") and "smnas_sample" in self.cfg.kwargs:
self.sample_result = self.cfg.kwargs["smnas_sample"]
self.local_worker_path = self.trainer.get_local_worker_path()
self.output_path = self.trainer.local_output_path
config_path = os.path.join(self.local_worker_path, 'config.py')
with open(config_path, 'w') as f:
f.write(self.trainer.model.desc)
self.config_path = config_path
self.cost_value = self.trainer.model.cost if self.trainer.model is not None else 0.0
dir_path = os.path.dirname(os.path.abspath(__file__))
self._train_script = os.path.join(dir_path, 'tools/dist_train.sh')
self._eval_script = os.path.join(dir_path, 'tools/dist_test.sh')
self.epochs = self.cfg.epochs
def _train_process(self):
"""Process of train and test."""
logging.info("start training")
self._train()
torch.cuda.empty_cache()
logging.info("start evaluation")
performance = self._valid()
performance.append(self.cost_value)
self.save_performance(performance)
def _train(self):
"""Train the network."""
cmd = ['bash', self._train_script, self.config_path, str(self.gpus),
'--total_epochs', str(self.epochs),
'--work_dir', self.local_worker_path]
cmd_str = ''
for item in cmd:
cmd_str += (item + ' ')
logging.info(cmd_str)
os.system(cmd_str)
def _valid(self):
"""Get performance on validate dataset."""
checkpoint_path = os.path.join(self.local_worker_path, 'latest.pth')
eval_prefix = os.path.join(self.local_worker_path, 'eval.pkl')
cmd = ['bash', self._eval_script, self.config_path, checkpoint_path,
str(self.gpus),
'--out', eval_prefix, '--eval', 'bbox']
cmd_str = ''
for item in cmd:
cmd_str += (item + ' ')
logging.info(cmd_str)
os.system(cmd_str)
eval_file = os.path.join(self.local_worker_path, 'eval.pkl.bbox.json')
model_desc = mmcv.Config.fromfile(self.config_path)
try:
performance = self.coco_eval(
eval_file, model_desc.data.test.anno_file)
except BaseException:
performance = 0.0
return [performance]
def save_performance(self, performance):
"""Save performance results."""
if isinstance(performance, int) or isinstance(performance, float):
performance_dir = os.path.join(self.local_worker_path,
'performance')
if not os.path.exists(performance_dir):
FileOps.make_dir(performance_dir)
with open(os.path.join(performance_dir, 'performance.txt'),
'w') as f:
f.write("{}".format(performance))
elif isinstance(performance, list):
performance_dir = os.path.join(self.local_worker_path,
'performance')
if not os.path.exists(performance_dir):
FileOps.make_dir(performance_dir)
with open(os.path.join(performance_dir, 'performance.txt'),
'w') as f:
for p in performance:
if not isinstance(p, int) and not isinstance(p, float):
logging.error("performance must be int or float!")
return
f.write("{}\n".format(p))
def coco_eval(self, result_file, coco):
"""Eval result_file by coco."""
if mmcv.is_str(coco):
coco = COCO(coco)
assert isinstance(coco, COCO)
assert result_file.endswith('.json')
coco_dets = coco.loadRes(result_file)
img_ids = coco.getImgIds()
cocoEval = COCOeval(coco, coco_dets, 'bbox')
cocoEval.params.imgIds = img_ids
cocoEval.evaluate()
cocoEval.accumulate()
cocoEval.summarize()
return cocoEval.stats[0] * 100
|
[
"[email protected]"
] | |
443d05667fa143590ce4377290bb9f55e10a6f94
|
70433b90af33ed71fc9ab6c7ba15fe1b1ec52a90
|
/tests/validation/response/test_request_parameter_validation.py
|
4a1f85474beb93ce6a2064d0611a4370fa5a897c
|
[
"MIT"
] |
permissive
|
dhilton/flex
|
f6b54ae2c4c28e760fdcc02e00b7a4df420fba99
|
e23e74e246f4ad89c8d8971b029dcdd2fa642526
|
refs/heads/master
| 2020-04-05T23:10:53.512712 | 2014-11-10T04:35:48 | 2014-11-10T04:35:48 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,656 |
py
|
import pytest
from flex.validation.response import (
validate_response,
)
from flex.error_messages import MESSAGES
from flex.constants import (
PATH,
QUERY,
STRING,
INTEGER,
)
from tests.factories import (
SchemaFactory,
ResponseFactory,
)
from tests.utils import assert_error_message_equal
def test_response_parameter_validation():
"""
Test that request validation does parameter validation. This is largely a
smoke test to ensure that parameter validation is wired into request
validation correctly.
"""
from django.core.exceptions import ValidationError
schema = SchemaFactory(
paths={
'/get/{id}/': {
'parameters': [
{
'name': 'id',
'in': PATH,
'description': 'id',
'required': True,
'type': STRING,
'format': 'uuid',
},
{
'name': 'page',
'in': QUERY,
'type': INTEGER,
},
],
'get': {
'responses': {200: {'description': "Success"}},
},
},
},
)
response = ResponseFactory(url='http://www.example.com/get/32/?page=abcd')
with pytest.raises(ValidationError) as err:
validate_response(
response,
paths=schema['paths'],
base_path=schema.get('base_path', ''),
context=schema,
inner=True,
)
assert 'request' in err.value.messages[0]
assert 'parameters' in err.value.messages[0]['request'][0][0]
assert 'path' in err.value.messages[0]['request'][0][0]['parameters'][0]
assert 'id' in err.value.messages[0]['request'][0][0]['parameters'][0]['path'][0]
assert 'format' in err.value.messages[0]['request'][0][0]['parameters'][0]['path'][0]['id'][0]
assert_error_message_equal(
err.value.messages[0]['request'][0][0]['parameters'][0]['path'][0]['id'][0]['format'][0],
MESSAGES['format']['invalid_uuid'],
)
assert 'query' in err.value.messages[0]['request'][0][0]['parameters'][0]
assert 'page' in err.value.messages[0]['request'][0][0]['parameters'][0]['query'][0]
assert 'type' in err.value.messages[0]['request'][0][0]['parameters'][0]['query'][0]['page'][0]
assert_error_message_equal(
err.value.messages[0]['request'][0][0]['parameters'][0]['query'][0]['page'][0]['type'][0],
MESSAGES['type']['invalid'],
)
|
[
"[email protected]"
] | |
b3b02cd1d0710b21962d9239ab72e90a5965cb4a
|
fdc0b72a3782a06952df4d723783dfa1bae65753
|
/query_deluxe/models/query_deluxe.py
|
4719ada08e5ea3a5d8cde3dbb94ce8c223b2cde6
|
[] |
no_license
|
Denbho/vendor_portal
|
0878ad82bf3c40d38f6e123f6b25a358bfebce4f
|
341a7ca77cbd310f3835d4b43de5012354a307c5
|
refs/heads/main
| 2023-04-19T21:26:56.115346 | 2021-05-17T04:16:53 | 2021-05-17T04:16:53 | 364,744,567 | 2 | 0 | null | 2021-05-06T04:34:51 | 2021-05-06T00:52:09 |
Python
|
UTF-8
|
Python
| false | false | 3,605 |
py
|
from odoo import api, fields, models, _
from odoo.exceptions import UserError
class QueryDeluxe(models.Model):
_name = "querydeluxe"
_description = "Postgres queries from Odoo interface"
_inherit = ['mail.thread', 'mail.activity.mixin']
tips = fields.Many2one('tipsqueries', string="Examples")
tips_description = fields.Text(related='tips.description')
rowcount = fields.Text(string='Rowcount')
html = fields.Html(string='HTML')
name = fields.Char(string='Type a query : ')
valid_query_name = fields.Char()
show_raw_output = fields.Boolean(string='Show the raw output of the query')
raw_output = fields.Text(string='Raw output')
def print_result(self):
return {
'name': _("Select orientation of the PDF's result"),
'view_mode': 'form',
'res_model': 'pdforientation',
'type': 'ir.actions.act_window',
'target': 'new',
'context': {
'default_query_name': self.valid_query_name
},
}
def copy_query(self):
if self.tips:
self.name = self.tips.name
def execute(self):
self.show_raw_output = False
self.raw_output = ''
self.rowcount = ''
self.html = '<br></br>'
self.valid_query_name = ''
if self.name:
self.tips = False
self.message_post(body=str(self.name))
headers = []
datas = []
try:
self.env.cr.execute(self.name)
except Exception as e:
raise UserError(e)
try:
no_fetching = ['update', 'delete', 'create', 'insert', 'alter', 'drop']
max_n = len(max(no_fetching))
is_insides = [(o in self.name.lower().strip()[:max_n]) for o in no_fetching]
if True not in is_insides:
headers = [d[0] for d in self.env.cr.description]
datas = self.env.cr.fetchall()
except Exception as e:
raise UserError(e)
rowcount = self.env.cr.rowcount
self.rowcount = "{0} row{1} processed".format(rowcount, 's' if 1 < rowcount else '')
if headers and datas:
self.valid_query_name = self.name
self.raw_output = datas
header_html = "".join(["<th style='border: 1px solid'>"+str(header)+"</th>" for header in headers])
header_html = "<tr>"+"<th style='background-color:white !important'/>"+header_html+"</tr>"
body_html = ""
i = 0
for data in datas:
i += 1
body_line = "<tr>"+"<td style='border-right: 3px double; border-bottom: 1px solid; background-color: yellow'>{0}</td>".format(i)
for value in data:
body_line += "<td style='border: 1px solid; background-color: {0}'>{1}</td>".format('cyan' if i%2 == 0 else 'white', str(value) if (value is not None) else '')
body_line += "</tr>"
body_html += body_line
self.html = """
<table style="text-align: center">
<thead style="background-color: lightgrey">
{0}
</thead>
<tbody>
{1}
</tbody>
</table>
""".format(header_html, body_html)
class TipsQueries(models.Model):
_name = 'tipsqueries'
_description = "Tips for queries"
_order = 'create_date desc, id'
name = fields.Char(string='Query', required=True)
description = fields.Text(string="Description")
|
[
"[email protected]"
] | |
0f9430ff53318e3f34a0143b0e232380dcbd2abf
|
eb722922339781fa6bd9937e69383fcd06256738
|
/day1/kapua-python-client/swagger_client/models/access_permission.py
|
10bb5782a236f4f03f16b8c20aed316153d4307e
|
[
"MIT"
] |
permissive
|
mrsrinivas/diec
|
6a0c5da26ff23170b71217bfbc810bb98a897a83
|
ae9a5203b506d5cc18cb381666351bf9ce6b9b6c
|
refs/heads/master
| 2021-01-05T05:41:19.394898 | 2020-01-15T06:24:33 | 2020-01-15T06:24:33 | 240,901,175 | 1 | 0 |
MIT
| 2020-02-16T13:59:53 | 2020-02-16T13:59:52 | null |
UTF-8
|
Python
| false | false | 7,327 |
py
|
# coding: utf-8
"""
Eclipse Kapua REST API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from swagger_client.models.permission import Permission # noqa: F401,E501
class AccessPermission(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'access_info_id': 'str',
'type': 'str',
'permission': 'Permission',
'scope_id': 'str',
'id': 'str',
'created_on': 'datetime',
'created_by': 'str'
}
attribute_map = {
'access_info_id': 'accessInfoId',
'type': 'type',
'permission': 'permission',
'scope_id': 'scopeId',
'id': 'id',
'created_on': 'createdOn',
'created_by': 'createdBy'
}
def __init__(self, access_info_id=None, type=None, permission=None, scope_id=None, id=None, created_on=None, created_by=None): # noqa: E501
"""AccessPermission - a model defined in Swagger""" # noqa: E501
self._access_info_id = None
self._type = None
self._permission = None
self._scope_id = None
self._id = None
self._created_on = None
self._created_by = None
self.discriminator = None
if access_info_id is not None:
self.access_info_id = access_info_id
if type is not None:
self.type = type
if permission is not None:
self.permission = permission
if scope_id is not None:
self.scope_id = scope_id
if id is not None:
self.id = id
if created_on is not None:
self.created_on = created_on
if created_by is not None:
self.created_by = created_by
@property
def access_info_id(self):
"""Gets the access_info_id of this AccessPermission. # noqa: E501
:return: The access_info_id of this AccessPermission. # noqa: E501
:rtype: str
"""
return self._access_info_id
@access_info_id.setter
def access_info_id(self, access_info_id):
"""Sets the access_info_id of this AccessPermission.
:param access_info_id: The access_info_id of this AccessPermission. # noqa: E501
:type: str
"""
self._access_info_id = access_info_id
@property
def type(self):
"""Gets the type of this AccessPermission. # noqa: E501
:return: The type of this AccessPermission. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this AccessPermission.
:param type: The type of this AccessPermission. # noqa: E501
:type: str
"""
self._type = type
@property
def permission(self):
"""Gets the permission of this AccessPermission. # noqa: E501
:return: The permission of this AccessPermission. # noqa: E501
:rtype: Permission
"""
return self._permission
@permission.setter
def permission(self, permission):
"""Sets the permission of this AccessPermission.
:param permission: The permission of this AccessPermission. # noqa: E501
:type: Permission
"""
self._permission = permission
@property
def scope_id(self):
"""Gets the scope_id of this AccessPermission. # noqa: E501
:return: The scope_id of this AccessPermission. # noqa: E501
:rtype: str
"""
return self._scope_id
@scope_id.setter
def scope_id(self, scope_id):
"""Sets the scope_id of this AccessPermission.
:param scope_id: The scope_id of this AccessPermission. # noqa: E501
:type: str
"""
self._scope_id = scope_id
@property
def id(self):
"""Gets the id of this AccessPermission. # noqa: E501
:return: The id of this AccessPermission. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this AccessPermission.
:param id: The id of this AccessPermission. # noqa: E501
:type: str
"""
self._id = id
@property
def created_on(self):
"""Gets the created_on of this AccessPermission. # noqa: E501
:return: The created_on of this AccessPermission. # noqa: E501
:rtype: datetime
"""
return self._created_on
@created_on.setter
def created_on(self, created_on):
"""Sets the created_on of this AccessPermission.
:param created_on: The created_on of this AccessPermission. # noqa: E501
:type: datetime
"""
self._created_on = created_on
@property
def created_by(self):
"""Gets the created_by of this AccessPermission. # noqa: E501
:return: The created_by of this AccessPermission. # noqa: E501
:rtype: str
"""
return self._created_by
@created_by.setter
def created_by(self, created_by):
"""Sets the created_by of this AccessPermission.
:param created_by: The created_by of this AccessPermission. # noqa: E501
:type: str
"""
self._created_by = created_by
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(AccessPermission, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AccessPermission):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"[email protected]"
] | |
0da4a4157e589feceb1276afdc4affd5eff7eb9c
|
58ca273d2a9ee7f75b4bde3990513f74441143f0
|
/Python/Cellular Automata and FSMs/Turing Machine 1.0.py
|
253213abdb96bfc82a197a43c610dfd0947ce9bc
|
[] |
no_license
|
Al153/Programming
|
6db4b3c9d34747e1248aed2b0ee7fb7a35cef8d2
|
26de0b5607c6329bed2d6454090f00b098bc837f
|
refs/heads/master
| 2021-04-12T04:13:00.945021 | 2019-07-12T17:47:08 | 2019-07-12T17:47:08 | 12,439,227 | 2 | 2 | null | 2014-08-13T19:21:46 | 2013-08-28T16:47:08 |
Game Maker Language
|
UTF-8
|
Python
| false | false | 1,414 |
py
|
#Turing machine I
def str_list(a):
b =[]
for i in range(len(a)):
c = a[i]
b.append (str(c))
return b
R,r = 1,1
L,l = -1,-1
HALT, Halt, halt = 'halt','halt','halt'
tape = [0,0,1,0,0,0,1,1,1,0,1,0,1,1,0,1,0,0,0]
program = (
([R,0,0],[R,1,1]),
([R,0,1],[L,0,2]),
([R,1,3],[R,1,3]),
([R,0,4],[L,0,2]),
([R,0,5],[L,0,2]),
([halt,0,0],[L,0,2]),
)
done = 0
state = 0
pointer = 0
while done == 0:
data = tape[pointer]
stateTemp = program[state][data]
if stateTemp[0] == 'halt': #Halt sequence
print 'HALT'
done = 1
else:
printList = str_list(tape)
state = stateTemp[2]
tape[pointer] = stateTemp[1]
stateTemp2 = str(stateTemp[1])
stateTemp2 = '[' + stateTemp2 + ']'
printList[pointer] = stateTemp2
pointer = pointer + stateTemp[0]
if pointer == -1: #Extensision of tape
tape.append(0)
for i in range (len(tape)-1):
t = len(tape)-(i+1)
tape[t] = tape[t-1]
pointer = 0
tape[0] = 0
if pointer > (len(tape) -1):
tape.append(0)
go = raw_input()
printList = ''.join(printList)
print printList
|
[
"[email protected]"
] | |
4d76334adfcd5e688e9261bdf384c7c71abf7550
|
a46d135ba8fd7bd40f0b7d7a96c72be446025719
|
/packages/python/plotly/plotly/validators/pointcloud/hoverlabel/font/_size.py
|
0f201eeb18dd469693d4636f5f0b526e02c1f9b0
|
[
"MIT"
] |
permissive
|
hugovk/plotly.py
|
5e763fe96f225d964c4fcd1dea79dbefa50b4692
|
cfad7862594b35965c0e000813bd7805e8494a5b
|
refs/heads/master
| 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 |
MIT
| 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null |
UTF-8
|
Python
| false | false | 509 |
py
|
import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="size", parent_name="pointcloud.hoverlabel.font", **kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "none"),
min=kwargs.pop("min", 1),
**kwargs
)
|
[
"[email protected]"
] | |
577b554e54aa6ae7fe700e352c980e9e1a182971
|
3c000380cbb7e8deb6abf9c6f3e29e8e89784830
|
/venv/Lib/site-packages/cobra/modelimpl/fv/oobepp.py
|
2b9f0053741cbbccfaf4af8a3e6d001662078b1b
|
[] |
no_license
|
bkhoward/aciDOM
|
91b0406f00da7aac413a81c8db2129b4bfc5497b
|
f2674456ecb19cf7299ef0c5a0887560b8b315d0
|
refs/heads/master
| 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 39,240 |
py
|
# coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class OoBEpP(Mo):
"""
An out-of-band management endpoint profile for a fabric node management endpoint group.
"""
meta = ClassMeta("cobra.model.fv.OoBEpP")
meta.moClassName = "fvOoBEpP"
meta.rnFormat = "oob-[%(epgPKey)s]"
meta.category = MoCategory.REGULAR
meta.label = "Out-of-band Management Endpoint Profile"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x2000610021020683
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.childClasses.add("cobra.model.l2.EgrPktsAg15min")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist15min")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1h")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1mo")
meta.childClasses.add("cobra.model.fv.RsEppToMonPol")
meta.childClasses.add("cobra.model.l2.EgrBytesPart15min")
meta.childClasses.add("cobra.model.fv.RtIpEppAtt")
meta.childClasses.add("cobra.model.fv.RsEppToEpCP")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1qtr")
meta.childClasses.add("cobra.model.fv.StorageIssues")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1w")
meta.childClasses.add("cobra.model.fv.RsEPgDefToL2Dom")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1qtr")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1h")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1w")
meta.childClasses.add("cobra.model.fv.REpPCont")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1d")
meta.childClasses.add("cobra.model.fv.RtNtpProvToEpp")
meta.childClasses.add("cobra.model.fv.RtVlanEppAtt")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1qtr")
meta.childClasses.add("cobra.model.mgmt.InstPDef")
meta.childClasses.add("cobra.model.l2.EgrBytesAg15min")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1year")
meta.childClasses.add("cobra.model.dhcp.ProvAddrDef")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1d")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist15min")
meta.childClasses.add("cobra.model.vns.SvcContDef")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1mo")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist5min")
meta.childClasses.add("cobra.model.fv.DomCont")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1h")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1qtr")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1year")
meta.childClasses.add("cobra.model.qos.CustomPolDef")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1w")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1d")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1h")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1year")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1w")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1d")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1year")
meta.childClasses.add("cobra.model.fv.SvcDepl")
meta.childClasses.add("cobra.model.fv.ExtLocaleCont")
meta.childClasses.add("cobra.model.fv.RtFvEppOob")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1h")
meta.childClasses.add("cobra.model.fault.Inst")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1d")
meta.childClasses.add("cobra.model.l2.IngrBytesAg15min")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1w")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1h")
meta.childClasses.add("cobra.model.fv.RtQinqEppAtt")
meta.childClasses.add("cobra.model.fv.RtMonToFvEppOobEvent")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1qtr")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1w")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1mo")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1d")
meta.childClasses.add("cobra.model.fv.L3extConsLblDepl")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1w")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1year")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist15min")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1w")
meta.childClasses.add("cobra.model.l2.IngrPktsPart15min")
meta.childClasses.add("cobra.model.l2.EgrBytesPart5min")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1d")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1d")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1d")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1h")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1h")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1d")
meta.childClasses.add("cobra.model.fvtopo.EpCont")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1year")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1w")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist15min")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1year")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1year")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist15min")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1d")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist15min")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1h")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1mo")
meta.childClasses.add("cobra.model.fv.RtMonToFvEppOob")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1h")
meta.childClasses.add("cobra.model.fv.PullREpPCont")
meta.childClasses.add("cobra.model.fv.NwIssues")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1mo")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1mo")
meta.childClasses.add("cobra.model.fv.RtMacBaseEppAtt")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1w")
meta.childClasses.add("cobra.model.l2.IngrPktsPart5min")
meta.childClasses.add("cobra.model.fault.Counts")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1mo")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist5min")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist5min")
meta.childClasses.add("cobra.model.fv.MgmtDepl")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1year")
meta.childClasses.add("cobra.model.fv.PolResolver")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1qtr")
meta.childClasses.add("cobra.model.fv.AttEntPDepl")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1qtr")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1qtr")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1mo")
meta.childClasses.add("cobra.model.l2.IngrBytesPart5min")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1year")
meta.childClasses.add("cobra.model.fv.RtClientGrpToEpp")
meta.childClasses.add("cobra.model.fv.RtVxlanEppAtt")
meta.childClasses.add("cobra.model.fv.RsEPgDefToL3Dom")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1qtr")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1qtr")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1h")
meta.childClasses.add("cobra.model.health.Inst")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1w")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1d")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1h")
meta.childClasses.add("cobra.model.l2.IngrBytesAg1year")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist1d")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1qtr")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1year")
meta.childClasses.add("cobra.model.l2.IngrBytesPart1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist15min")
meta.childClasses.add("cobra.model.fv.LEpP")
meta.childClasses.add("cobra.model.l2.EgrBytesPartHist1w")
meta.childClasses.add("cobra.model.l2.EgrPktsPart15min")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1year")
meta.childClasses.add("cobra.model.l2.EgrPktsPartHist1h")
meta.childClasses.add("cobra.model.fv.StDepl")
meta.childClasses.add("cobra.model.fv.RtARemoteHostToEpp")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1d")
meta.childClasses.add("cobra.model.l2.EgrPktsPart1qtr")
meta.childClasses.add("cobra.model.dhcp.InfraProvPDef")
meta.childClasses.add("cobra.model.l2.EgrPktsPart5min")
meta.childClasses.add("cobra.model.mgmt.AddrCont")
meta.childClasses.add("cobra.model.l2.IngrPktsAg15min")
meta.childClasses.add("cobra.model.l2.IngrBytesPart15min")
meta.childClasses.add("cobra.model.fv.ConfigLocale")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1h")
meta.childClasses.add("cobra.model.fv.NodeReqDepl")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1d")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1mo")
meta.childClasses.add("cobra.model.l2.EgrBytesPart1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1qtr")
meta.childClasses.add("cobra.model.l2.EgrPktsAg1w")
meta.childClasses.add("cobra.model.fv.RtProfileToEpp")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1qtr")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1w")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1h")
meta.childClasses.add("cobra.model.l2.EgrBytesAgHist1qtr")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsPart1year")
meta.childClasses.add("cobra.model.fv.RtCtxToEpP")
meta.childClasses.add("cobra.model.fv.RtMacEppAtt")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1w")
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childClasses.add("cobra.model.l2.EgrPktsAgHist1mo")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1year")
meta.childClasses.add("cobra.model.fv.ExtNwDepl")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist1d")
meta.childClasses.add("cobra.model.l2.IngrPktsAg1w")
meta.childClasses.add("cobra.model.l2.IngrPktsPartHist1mo")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1qtr")
meta.childClasses.add("cobra.model.l2.EgrBytesAg1year")
meta.childClasses.add("cobra.model.fv.InfraDepl")
meta.childClasses.add("cobra.model.l2.IngrPktsAgHist1h")
meta.childClasses.add("cobra.model.l2.IngrBytesPartHist5min")
meta.childClasses.add("cobra.model.l2.IngrBytesAgHist15min")
meta.childClasses.add("cobra.model.fv.Locale")
meta.childClasses.add("cobra.model.fv.AttrDefCont")
meta.childClasses.add("cobra.model.fv.RtProvToEpp")
meta.childClasses.add("cobra.model.fv.CompIssues")
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtMonToFvEppOobEvent", "rtfaultMonToFvEppOobEvent-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtNtpProvToEpp", "rtdatetimeNtpProvToEpp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1year", "HDl2IngrBytesPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist15min", "HDl2IngrBytesPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1year", "CDl2IngrBytesPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtARemoteHostToEpp", "rtfileARemoteHostToEpp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart15min", "CDl2IngrBytesPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart15min", "CDl2EgrBytesPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1qtr", "HDl2IngrBytesPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1year", "CDl2EgrBytesPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist15min", "HDl2IngrPktsPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart15min", "CDl2IngrPktsPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1year", "HDl2EgrBytesPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist15min", "HDl2EgrBytesPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1qtr", "CDl2IngrBytesPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart5min", "CDl2IngrBytesPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1year", "HDl2IngrPktsPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1year", "CDl2IngrPktsPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist5min", "HDl2IngrBytesPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1mo", "HDl2IngrBytesPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1qtr", "CDl2EgrBytesPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1year", "HDl2EgrPktsPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1qtr", "HDl2EgrBytesPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart5min", "CDl2EgrBytesPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1year", "CDl2EgrPktsPart1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist15min", "HDl2EgrPktsPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart5min", "CDl2IngrPktsPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist5min", "HDl2EgrBytesPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist5min", "HDl2IngrPktsPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1qtr", "CDl2IngrPktsPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1year", "HDl2IngrBytesAg1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtClientGrpToEpp", "rtsnmpClientGrpToEpp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1qtr", "HDl2IngrPktsPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1mo", "CDl2IngrBytesPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart15min", "CDl2EgrPktsPart15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist15min", "HDl2IngrBytesAg15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist15min", "HDl2EgrBytesAg15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1w", "HDl2IngrBytesPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1qtr", "HDl2IngrBytesAg1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist5min", "HDl2EgrPktsPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1w", "CDl2IngrBytesPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1h", "CDl2IngrBytesPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPart1d", "CDl2IngrBytesPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg15min", "CDl2IngrBytesAg15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1year", "HDl2EgrBytesAg1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1mo", "HDl2EgrBytesPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1year", "HDl2IngrPktsAg1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1qtr", "HDl2EgrPktsPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1h", "HDl2IngrBytesPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1year", "CDl2IngrBytesAg1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesPartHist1d", "HDl2IngrBytesPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist15min", "HDl2IngrPktsAg15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1qtr", "CDl2EgrPktsPart1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart5min", "CDl2EgrPktsPart5min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1mo", "CDl2EgrBytesPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1mo", "CDl2IngrPktsPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1mo", "HDl2IngrPktsPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1h", "CDl2IngrPktsPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg15min", "CDl2EgrBytesAg15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1d", "CDl2IngrPktsPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPart1w", "CDl2IngrPktsPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1year", "HDl2EgrPktsAg1year-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1w", "CDl2EgrBytesPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1d", "CDl2EgrBytesPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist15min", "HDl2EgrPktsAg15min-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPart1h", "CDl2EgrBytesPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1mo", "HDl2EgrPktsPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtMonToFvEppOob", "rtpolMonToFvEppOob-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1h", "HDl2IngrPktsPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1w", "HDl2IngrPktsPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1mo", "HDl2IngrBytesAg1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1qtr", "CDl2IngrBytesAg1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1mo", "CDl2EgrPktsPart1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtVxlanEppAtt", "rtvxlanVxlanEppAtt-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1d", "HDl2EgrBytesPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1h", "HDl2EgrBytesPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesPartHist1w", "HDl2EgrBytesPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsPartHist1d", "HDl2IngrPktsPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg15min", "CDl2IngrPktsAg15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1qtr", "HDl2IngrPktsAg1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1qtr", "HDl2EgrBytesAg1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1year", "CDl2IngrPktsAg1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1year", "CDl2EgrBytesAg1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg15min", "CDl2EgrPktsAg15min"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1qtr", "CDl2IngrPktsAg1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1mo", "HDl2EgrBytesAg1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1year", "CDl2EgrPktsAg1year"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1w", "CDl2EgrPktsPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1h", "CDl2EgrPktsPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPart1d", "CDl2EgrPktsPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1d", "HDl2EgrPktsPart1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1mo", "CDl2IngrBytesAg1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtMacBaseEppAtt", "rtl2MacBaseEppAtt-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1w", "HDl2EgrPktsPart1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsPartHist1h", "HDl2EgrPktsPart1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1mo", "HDl2IngrPktsAg1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtProfileToEpp", "rtdnsProfileToEpp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1qtr", "HDl2EgrPktsAg1qtr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1h", "HDl2IngrBytesAg1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1w", "HDl2IngrBytesAg1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAgHist1d", "HDl2IngrBytesAg1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1qtr", "CDl2EgrBytesAg1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1w", "CDl2IngrBytesAg1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1h", "CDl2IngrBytesAg1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrBytesAg1d", "CDl2IngrBytesAg1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtVlanEppAtt", "rtvlanVlanEppAtt-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtQinqEppAtt", "rtqinqQinqEppAtt-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1mo", "CDl2IngrPktsAg1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1w", "HDl2EgrBytesAg1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1d", "HDl2IngrPktsAg1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1h", "HDl2EgrBytesAg1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAgHist1d", "HDl2EgrBytesAg1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1mo", "CDl2EgrBytesAg1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1qtr", "CDl2EgrPktsAg1qtr"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1w", "HDl2IngrPktsAg1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1mo", "HDl2EgrPktsAg1mo-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAgHist1h", "HDl2IngrPktsAg1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1h", "CDl2EgrBytesAg1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1d", "CDl2EgrBytesAg1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrBytesAg1w", "CDl2EgrBytesAg1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1h", "HDl2EgrPktsAg1h-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1d", "HDl2EgrPktsAg1d-"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAgHist1w", "HDl2EgrPktsAg1w-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.L3extConsLblDepl", "l3extConsLblDepl"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1d", "CDl2IngrPktsAg1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1h", "CDl2IngrPktsAg1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1mo", "CDl2EgrPktsAg1mo"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.IngrPktsAg1w", "CDl2IngrPktsAg1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsEPgDefToL2Dom", "rsEPgDefToL2Dom"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsEPgDefToL3Dom", "rsEPgDefToL3Dom"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1h", "CDl2EgrPktsAg1h"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1d", "CDl2EgrPktsAg1d"))
meta.childNamesAndRnPrefix.append(("cobra.model.l2.EgrPktsAg1w", "CDl2EgrPktsAg1w"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtProvToEpp", "rtaaaProvToEpp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtFvEppOob", "rtaaaFvEppOob-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtMacEppAtt", "rtl2MacEppAtt-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsEppToMonPol", "rseppToMonPol"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtIpEppAtt", "rtl3IpEppAtt-"))
meta.childNamesAndRnPrefix.append(("cobra.model.dhcp.InfraProvPDef", "infraprovpdef"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.PullREpPCont", "pullReppCont"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.ConfigLocale", "configLocale"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RsEppToEpCP", "rseppToEpCP"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.PolResolver", "polresolver"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.NodeReqDepl", "nodeReqDepl"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.StorageIssues", "strgissues"))
meta.childNamesAndRnPrefix.append(("cobra.model.vns.SvcContDef", "svcContDef"))
meta.childNamesAndRnPrefix.append(("cobra.model.qos.CustomPolDef", "qoscustom-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.RtCtxToEpP", "rtctxToEpP"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.CompIssues", "compissues"))
meta.childNamesAndRnPrefix.append(("cobra.model.dhcp.ProvAddrDef", "provaddr-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.AttEntPDepl", "aentdepl-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.ExtNwDepl", "extnwdepl"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.InfraDepl", "infradepl"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.REpPCont", "reppcont"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.ExtLocaleCont", "extlcont"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.NwIssues", "nwissues"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.MgmtDepl", "mgmtdepl"))
meta.childNamesAndRnPrefix.append(("cobra.model.mgmt.AddrCont", "addrcont"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.AttrDefCont", "attrcont"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.DomCont", "domcont"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.SvcDepl", "svcdepl"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Counts", "fltCnts"))
meta.childNamesAndRnPrefix.append(("cobra.model.mgmt.InstPDef", "instp-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Inst", "fault-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fvtopo.EpCont", "epcont"))
meta.childNamesAndRnPrefix.append(("cobra.model.health.Inst", "health"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.StDepl", "stdepl"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.Locale", "node-"))
meta.childNamesAndRnPrefix.append(("cobra.model.fv.LEpP", "lEpP"))
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.parentClasses.add("cobra.model.fv.EpPCont")
meta.superClasses.add("cobra.model.fv.EPgDef")
meta.superClasses.add("cobra.model.fv.AMgmtEpP")
meta.superClasses.add("cobra.model.fv.AEPgDef")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.superClasses.add("cobra.model.pol.Def")
meta.superClasses.add("cobra.model.fv.AEpP")
meta.superClasses.add("cobra.model.fv.AREpP")
meta.superClasses.add("cobra.model.fv.EPgCont")
meta.rnPrefixes = [
('oob-', True),
]
prop = PropMeta("str", "bdDefDn", "bdDefDn", 1811, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("bdDefDn", prop)
prop = PropMeta("str", "bdDefStQual", "bdDefStQual", 1812, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("default-target", "default-target", 2)
prop._addConstant("mismatch-target", "mismatch-target", 1)
prop._addConstant("none", "none", 0)
meta.props.add("bdDefStQual", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "crtrnEnabled", "crtrnEnabled", 20969, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = False
prop.defaultValueStr = "no"
prop._addConstant("no", None, False)
prop._addConstant("yes", None, True)
meta.props.add("crtrnEnabled", prop)
prop = PropMeta("str", "ctxDefDn", "ctxDefDn", 1813, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("ctxDefDn", prop)
prop = PropMeta("str", "ctxDefStQual", "ctxDefStQual", 1814, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("default-target", "default-target", 2)
prop._addConstant("mismatch-target", "mismatch-target", 1)
prop._addConstant("none", "none", 0)
meta.props.add("ctxDefStQual", prop)
prop = PropMeta("str", "ctxSeg", "ctxSeg", 1809, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("ctxSeg", prop)
prop = PropMeta("str", "deplSt", "deplSt", 1730, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "not-deployable"
prop._addConstant("deployable", "deployable", 1)
prop._addConstant("not-deployable", "not-deployable", 0)
meta.props.add("deplSt", prop)
prop = PropMeta("str", "descr", "descr", 5579, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "enfPref", "enfPref", 1735, PropCategory.REGULAR)
prop.label = "Switching Preference"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 2
prop.defaultValueStr = "hw"
prop._addConstant("hw", "hardware", 2)
prop._addConstant("sw", "software", 1)
prop._addConstant("unknown", "unknwon", 3)
meta.props.add("enfPref", prop)
prop = PropMeta("str", "epgDn", "epgDn", 1807, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("epgDn", prop)
prop = PropMeta("str", "epgPKey", "epgPKey", 6543, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.isCreateOnly = True
prop.isNaming = True
meta.props.add("epgPKey", prop)
prop = PropMeta("str", "floodOnEncap", "floodOnEncap", 35419, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "disabled"
prop._addConstant("disabled", "disabled", 0)
prop._addConstant("enabled", "enabled", 1)
meta.props.add("floodOnEncap", prop)
prop = PropMeta("str", "intraSecConf", "intraSecConf", 33370, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "none"
prop._addConstant("none", "none", 0)
prop._addConstant("security", "security", 1)
meta.props.add("intraSecConf", prop)
prop = PropMeta("str", "l2FDSeg", "l2FDSeg", 1731, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("l2FDSeg", prop)
prop = PropMeta("str", "l3CtxEncap", "l3CtxEncap", 1815, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("l3CtxEncap", prop)
prop = PropMeta("str", "lcOwn", "lcOwn", 9, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "local"
prop._addConstant("implicit", "implicit", 4)
prop._addConstant("local", "local", 0)
prop._addConstant("policy", "policy", 1)
prop._addConstant("replica", "replica", 2)
prop._addConstant("resolveOnBehalf", "resolvedonbehalf", 3)
meta.props.add("lcOwn", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "monPolDn", "monPolDn", 14269, PropCategory.REGULAR)
prop.label = "Monitoring policy attached to this observable object"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("monPolDn", prop)
prop = PropMeta("str", "name", "name", 4991, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9_.:-]+']
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "npNameAlias", "npNameAlias", 30054, PropCategory.REGULAR)
prop.label = "Network Profile Name Alias"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("npNameAlias", prop)
prop = PropMeta("str", "operSt", "operSt", 1734, PropCategory.REGULAR)
prop.label = "Operational State"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "allocated"
prop._addConstant("allocated", "allocated", 0)
prop._addConstant("deallocated", "deallocated", 1)
meta.props.add("operSt", prop)
prop = PropMeta("str", "ownerKey", "ownerKey", 15230, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerKey", prop)
prop = PropMeta("str", "ownerTag", "ownerTag", 15231, PropCategory.REGULAR)
prop.label = "None"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("ownerTag", prop)
prop = PropMeta("str", "pcEnfPref", "pcEnfPref", 23078, PropCategory.REGULAR)
prop.label = "Policy Control Enforcement"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 2
prop.defaultValueStr = "unenforced"
prop._addConstant("enforced", "enforced", 1)
prop._addConstant("unenforced", "unenforced", 2)
meta.props.add("pcEnfPref", prop)
prop = PropMeta("str", "pcTag", "pcTag", 1808, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("any", "any", 0)
meta.props.add("pcTag", prop)
prop = PropMeta("str", "prefGrMemb", "prefGrMemb", 27680, PropCategory.REGULAR)
prop.label = "Preferred Group Member"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 2
prop.defaultValueStr = "exclude"
prop._addConstant("exclude", "exclude", 2)
prop._addConstant("include", "include", 1)
meta.props.add("prefGrMemb", prop)
prop = PropMeta("str", "prio", "prio", 1733, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 9)]
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("level1", "level1", 3)
prop._addConstant("level2", "level2", 2)
prop._addConstant("level3", "level3-(default)", 1)
prop._addConstant("level4", "level4", 9)
prop._addConstant("level5", "level5", 8)
prop._addConstant("level6", "level6", 7)
prop._addConstant("unspecified", "unspecified", 0)
meta.props.add("prio", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "scopeId", "scopeId", 1810, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(1, 16777215)]
prop.defaultValue = 1
prop.defaultValueStr = "1"
meta.props.add("scopeId", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "tnNameAlias", "tnNameAlias", 30053, PropCategory.REGULAR)
prop.label = "Tenant Name Alias"
prop.isImplicit = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("tnNameAlias", prop)
prop = PropMeta("str", "txId", "txId", 37052, PropCategory.REGULAR)
prop.label = "Transaction Id when EPg was created"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("txId", prop)
meta.namingProps.append(getattr(meta.props, "epgPKey"))
getattr(meta.props, "epgPKey").needDelimiter = True
def __init__(self, parentMoOrDn, epgPKey, markDirty=True, **creationProps):
namingVals = [epgPKey]
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
|
[
"[email protected]"
] | |
f7f6381a89dd15a2e03160b3ce89d1620f1846dd
|
38382e23bf57eab86a4114b1c1096d0fc554f255
|
/hazelcast/protocol/codec/map_remove_codec.py
|
61850d484dc922697b6864594cb5d06d9be6f29c
|
[
"Apache-2.0"
] |
permissive
|
carbonblack/hazelcast-python-client
|
e303c98dc724233376ab54270832bfd916426cea
|
b39bfaad138478e9a25c8a07f56626d542854d0c
|
refs/heads/gevent-3.12.3.1
| 2023-04-13T09:43:30.626269 | 2020-09-18T17:37:17 | 2020-09-18T17:37:17 | 110,181,474 | 3 | 1 |
Apache-2.0
| 2020-12-01T17:45:42 | 2017-11-10T00:21:55 |
Python
|
UTF-8
|
Python
| false | false | 1,196 |
py
|
from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import ClientMessage
from hazelcast.protocol.codec.map_message_type import *
REQUEST_TYPE = MAP_REMOVE
RESPONSE_TYPE = 105
RETRYABLE = False
def calculate_size(name, key, thread_id):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
data_size += calculate_size_data(key)
data_size += LONG_SIZE_IN_BYTES
return data_size
def encode_request(name, key, thread_id):
""" Encode request into client_message"""
client_message = ClientMessage(payload_size=calculate_size(name, key, thread_id))
client_message.set_message_type(REQUEST_TYPE)
client_message.set_retryable(RETRYABLE)
client_message.append_str(name)
client_message.append_data(key)
client_message.append_long(thread_id)
client_message.update_frame_length()
return client_message
def decode_response(client_message, to_object=None):
""" Decode response from client message"""
parameters = dict(response=None)
if not client_message.read_bool():
parameters['response'] = to_object(client_message.read_data())
return parameters
|
[
"[email protected]"
] | |
e46b45ed01d05ceb5c23f7d30e315dacbc49ecb7
|
0c6990136391d72d3768d5a8a4a6919bd2f6ce6a
|
/0x16-api_advanced/2-recurse.py
|
3de8da02f221e1d898e945b3e379747e81ed5132
|
[] |
no_license
|
s0m35h1t/holberton-system_engineering-devops
|
eef99bcf0297938a20470a72b12e9d603c6153ab
|
3aea10e71c49b2a97c7ed02bfae2231fcede1a92
|
refs/heads/master
| 2020-07-24T02:51:02.279869 | 2020-05-28T22:33:31 | 2020-05-28T22:33:31 | 207,779,574 | 0 | 2 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,136 |
py
|
#!/usr/bin/python3
"""
Define: recurse function
"""
import requests
def recurse(subreddit, hot_list=[], after=None):
"""get a list of all hot post titles for a subreddit
Arguments:
subreddit, hot_list, after
Returns:
(list)
"""
if subreddit is None or type(subreddit) is not str:
return None
url = "http://www.reddit.com/r/{}/hot.json".format(subreddit)
headers = {
'User-Agent': 'Python/requests:api.advanced:v1.0.0 (by /u/aleix)'}
params = {"after": after, "limit": 100}
req = requests.get(url, headers=headers, params=params).json()
after = req.get('data', {}).get('after', None)
posts = req.get('data', {}).get('children', None)
if posts is None or (len(posts) > 0 and posts[0].get('kind') != 't3'):
if len(hot_list) == 0:
return None
return hot_list
else:
for post in posts:
hot_list.append(post.get('data', {}).get('title', None))
if after is None:
if len(hot_list) == 0:
return None
return hot_list
else:
return recurse(subreddit, hot_list, after)
|
[
"[email protected]"
] | |
2ae4c9670b68771f35af5acc4c3578cfdcd7c4ef
|
478422b7042926f243a6dcfa90d9c8640e37ec83
|
/PyAlgoTradeCN/01_SamplesFromPyAlgoTradeCN/stratlib/thrSMA_live.py
|
952ac0bff90d021982742eb2af5b50f33f4102fc
|
[
"MIT"
] |
permissive
|
JohnnyDankseed/midProjects
|
c70e4c19680af50c1a3869726cca4e9ea2cd4de7
|
ed6086e74f68b1b89f725abe0b270e67cf8993a8
|
refs/heads/master
| 2021-06-03T05:24:32.691310 | 2016-07-25T17:13:04 | 2016-07-25T17:13:04 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,764 |
py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 03 13:06:56 2015
@author: Eunice
"""
if __name__ == '__main__':
import sys
sys.path.append("..")
from pyalgotrade import bar
from pyalgotrade import plotter
# 以上模块仅测试用
from pyalgotrade.broker.fillstrategy import DefaultStrategy
from pyalgotrade.broker.backtesting import TradePercentage
from pyalgotrade import strategy
from pyalgotrade.technical import ma
from pyalgotrade.technical import cross
class thrSMA(strategy.BacktestingStrategy):
def __init__(self, feed, instrument, short_l, mid_l, long_l, up_cum):
strategy.BacktestingStrategy.__init__(self, feed)
self.__instrument = instrument
self.getBroker().setFillStrategy(DefaultStrategy(None))
self.getBroker().setCommission(TradePercentage(0.001))
self.__position = None
self.__prices = feed[instrument].getPriceDataSeries()
self.__malength1 = int(short_l)
self.__malength2 = int(mid_l)
self.__malength3 = int(long_l)
self.__circ = int(up_cum)
self.__ma1 = ma.SMA(self.__prices, self.__malength1)
self.__ma2 = ma.SMA(self.__prices, self.__malength2)
self.__ma3 = ma.SMA(self.__prices, self.__malength3)
def getPrice(self):
return self.__prices
def getSMA(self):
return self.__ma1,self.__ma2, self.__ma3
def onEnterCanceled(self, position):
self.__position = None
def onEnterOK(self):
pass
def onExitOk(self, position):
self.__position = None
#self.info("long close")
def onExitCanceled(self, position):
self.__position.exitMarket()
def buyCon1(self):
if cross.cross_above(self.__ma1, self.__ma2) > 0:
return True
def buyCon2(self):
m1 = 0
m2 = 0
for i in range(self.__circ):
if self.__ma1[-i-1] > self.__ma3[-i-1]:
m1 += 1
if self.__ma2[-i-1] > self.__ma3[-i-1]:
m2 += 1
if m1 >= self.__circ and m2 >= self.__circ:
return True
def sellCon1(self):
if cross.cross_below(self.__ma1, self.__ma2) > 0:
return True
def onBars(self, bars):
# If a position was not opened, check if we should enter a long position.
if self.__ma2[-1]is None:
return
if self.__position is not None:
if not self.__position.exitActive() and cross.cross_below(self.__ma1, self.__ma2) > 0:
self.__position.exitMarket()
#self.info("sell %s" % (bars.getDateTime()))
if self.__position is None:
if self.buyCon1() and self.buyCon2():
shares = int(self.getBroker().getCash() * 0.2 / bars[self.__instrument].getPrice())
self.__position = self.enterLong(self.__instrument, shares)
print bars[self.__instrument].getDateTime(), bars[self.__instrument].getPrice()
#self.info("buy %s" % (bars.getDateTime()))
def runStratOnTushare(strat, paras, security_id, market, frequency):
import sys,os
path = os.path.abspath(os.path.join(os.path.dirname(__file__),os.pardir,'tushare'))
sys.path.append(path)
from barfeed import TuShareLiveFeed
liveFeed = TuShareLiveFeed([security_id], frequency, 1024, 20)
strat = strat(liveFeed, security_id, *paras)
strat.run()
if __name__ == "__main__":
strat = thrSMA
security_id = '000001'
market = 'SZ'
frequency = bar.Frequency.MINUTE
paras = [2, 20, 60, 10]
runStratOnTushare(strat, paras, security_id, market, frequency)
|
[
"[email protected]"
] | |
4b6856479ab0e4d71379ae725d7527d7ddf1a2fc
|
57d177fbd7d1bac2d97acaf081ff4fa75ed8ba53
|
/chrome/chrome_tests_unit.gypi
|
f781cb2a902dfe5083da2252fc4354d5a8960c78
|
[
"BSD-3-Clause"
] |
permissive
|
venkatarajasekhar/chromium-42.0.2311.135
|
676241bb228810a892e3074968c1844d26d187df
|
8d6b0c3b1481b2d4bf6ec75a08b686c55e06c707
|
refs/heads/master
| 2021-01-13T11:57:38.761197 | 2015-05-04T12:43:44 | 2015-05-11T12:58:42 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 151,024 |
gypi
|
# Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'variables': {
'chrome_unit_tests_sources': [
'../components/autofill/content/renderer/test_password_autofill_agent.cc',
'../components/autofill/content/renderer/test_password_autofill_agent.h',
'../components/autofill/content/renderer/test_password_generation_agent.cc',
'../components/autofill/content/renderer/test_password_generation_agent.h',
# histograms.xml is analyzed by AboutFlagsHistogramTest, so this
# dependency is needed to make commit bots run unit_tests on
# histograms.xml changes.
'../tools/metrics/histograms/histograms.xml',
# All unittests in browser, common, renderer and service.
'browser/about_flags_unittest.cc',
'browser/android/banners/app_banner_manager_unittest.cc',
'browser/android/bookmarks/partner_bookmarks_shim_unittest.cc',
'browser/android/manifest_icon_selector_unittest.cc',
# TODO(newt): move this to test_support_unit?
'browser/android/mock_location_settings.cc',
'browser/android/mock_location_settings.h',
'browser/android/preferences/pref_service_bridge_unittest.cc',
'browser/android/thumbnail/scoped_ptr_expiring_cache_unittest.cc',
'browser/app_controller_mac_unittest.mm',
'browser/autocomplete/autocomplete_provider_unittest.cc',
'browser/autocomplete/bookmark_provider_unittest.cc',
'browser/autocomplete/builtin_provider_unittest.cc',
'browser/autocomplete/history_quick_provider_unittest.cc',
'browser/autocomplete/history_url_provider_unittest.cc',
'browser/autocomplete/search_provider_unittest.cc',
'browser/autocomplete/shortcuts_backend_unittest.cc',
'browser/autocomplete/shortcuts_database_unittest.cc',
'browser/autocomplete/shortcuts_provider_unittest.cc',
'browser/autocomplete/zero_suggest_provider_unittest.cc',
'browser/autofill/autofill_cc_infobar_delegate_unittest.cc',
'browser/banners/app_banner_settings_helper_unittest.cc',
'browser/bitmap_fetcher/bitmap_fetcher_service_unittest.cc',
'browser/bookmarks/chrome_bookmark_client_unittest.cc',
'browser/browser_about_handler_unittest.cc',
'browser/browsing_data/browsing_data_appcache_helper_unittest.cc',
'browser/browsing_data/browsing_data_channel_id_helper_unittest.cc',
'browser/browsing_data/browsing_data_cookie_helper_unittest.cc',
'browser/browsing_data/browsing_data_database_helper_unittest.cc',
'browser/browsing_data/browsing_data_file_system_helper_unittest.cc',
'browser/browsing_data/browsing_data_helper_unittest.cc',
'browser/browsing_data/browsing_data_indexed_db_helper_unittest.cc',
'browser/browsing_data/browsing_data_local_storage_helper_unittest.cc',
'browser/browsing_data/browsing_data_quota_helper_unittest.cc',
'browser/browsing_data/browsing_data_remover_test_util.cc',
'browser/browsing_data/browsing_data_remover_test_util.h',
'browser/browsing_data/browsing_data_remover_unittest.cc',
'browser/browsing_data/browsing_data_service_worker_helper_unittest.cc',
'browser/browsing_data/cookies_tree_model_unittest.cc',
'browser/chrome_browser_application_mac_unittest.mm',
'browser/chrome_content_browser_client_unittest.cc',
'browser/chrome_elf_init_unittest_win.cc',
'browser/chrome_process_singleton_win_unittest.cc',
'browser/command_updater_unittest.cc',
'browser/component_updater/test/cld_component_installer_unittest.cc',
'browser/component_updater/test/component_updater_service_unittest.cc',
'browser/component_updater/test/supervised_user_whitelist_installer_unittest.cc',
'browser/content_settings/content_settings_default_provider_unittest.cc',
'browser/content_settings/content_settings_mock_observer.cc',
'browser/content_settings/content_settings_mock_observer.h',
'browser/content_settings/content_settings_origin_identifier_value_map_unittest.cc',
'browser/content_settings/content_settings_policy_provider_unittest.cc',
'browser/content_settings/content_settings_pref_provider_unittest.cc',
'browser/content_settings/content_settings_usages_state_unittest.cc',
'browser/content_settings/cookie_settings_unittest.cc',
'browser/content_settings/host_content_settings_map_unittest.cc',
'browser/content_settings/mock_settings_observer.cc',
'browser/content_settings/mock_settings_observer.h',
'browser/content_settings/permission_context_base_unittest.cc',
'browser/content_settings/permission_queue_controller_unittest.cc',
'browser/content_settings/tab_specific_content_settings_unittest.cc',
'browser/custom_handlers/protocol_handler_registry_unittest.cc',
'browser/devtools/device/webrtc/devtools_bridge_instances_request_unittest.cc',
'browser/devtools/devtools_network_controller_unittest.cc',
'browser/download/all_download_item_notifier_unittest.cc',
'browser/download/chrome_download_manager_delegate_unittest.cc',
'browser/download/download_history_unittest.cc',
'browser/download/download_item_model_unittest.cc',
'browser/download/download_path_reservation_tracker_unittest.cc',
'browser/download/download_query_unittest.cc',
'browser/download/download_request_infobar_delegate_unittest.cc',
'browser/download/download_request_limiter_unittest.cc',
'browser/download/download_status_updater_unittest.cc',
'browser/download/download_target_determiner_unittest.cc',
'browser/download/download_ui_controller_unittest.cc',
'browser/enumerate_modules_model_unittest_win.cc',
'browser/external_protocol/external_protocol_handler_unittest.cc',
'browser/favicon/favicon_handler_unittest.cc',
'browser/file_select_helper_unittest.cc',
'browser/geolocation/geolocation_permission_context_unittest.cc',
'browser/global_keyboard_shortcuts_mac_unittest.mm',
'browser/google/google_search_counter_android_unittest.cc',
'browser/google/google_search_counter_unittest.cc',
'browser/google/google_update_settings_unittest.cc',
'browser/google/google_update_win_unittest.cc',
'browser/history/android/android_cache_database_unittest.cc',
'browser/history/android/android_history_provider_service_unittest.cc',
'browser/history/android/android_provider_backend_unittest.cc',
'browser/history/android/android_urls_database_unittest.cc',
'browser/history/android/bookmark_model_sql_handler_unittest.cc',
'browser/history/android/sqlite_cursor_unittest.cc',
'browser/history/android/urls_sql_handler_unittest.cc',
'browser/history/android/visit_sql_handler_unittest.cc',
'browser/history/expire_history_backend_unittest.cc',
'browser/history/history_backend_unittest.cc',
'browser/history/history_database_unittest.cc',
'browser/history/history_querying_unittest.cc',
'browser/history/history_unittest.cc',
'browser/history/in_memory_url_index_unittest.cc',
'browser/autocomplete/scored_history_match_builder_impl_unittest.cc',
'browser/history/thumbnail_database_unittest.cc',
'browser/history/top_sites_impl_unittest.cc',
'browser/history/typed_url_syncable_service_unittest.cc',
'browser/history/web_history_service_unittest.cc',
'browser/image_holder_unittest.cc',
'browser/install_verification/win/imported_module_verification_unittest.cc',
'browser/install_verification/win/loaded_module_verification_unittest.cc',
'browser/install_verification/win/loaded_modules_snapshot_unittest.cc',
'browser/install_verification/win/module_ids_unittest.cc',
'browser/install_verification/win/module_info_unittest.cc',
'browser/install_verification/win/module_list_unittest.cc',
'browser/install_verification/win/module_verification_test.cc',
'browser/install_verification/win/module_verification_test.h',
'browser/internal_auth_unittest.cc',
'browser/invalidation/invalidator_storage_unittest.cc',
'browser/io_thread_unittest.cc',
'browser/logging_chrome_unittest.cc',
'browser/mac/keystone_glue_unittest.mm',
'browser/media/native_desktop_media_list_unittest.cc',
'browser/metrics/chrome_metrics_service_accessor_unittest.cc',
'browser/metrics/cloned_install_detector_unittest.cc',
'browser/metrics/metrics_services_manager_unittest.cc',
'browser/metrics/rappor/sampling_unittest.cc',
'browser/metrics/signin_status_metrics_provider_chromeos_unittest.cc',
'browser/metrics/signin_status_metrics_provider_unittest.cc',
'browser/metrics/thread_watcher_android_unittest.cc',
'browser/metrics/thread_watcher_unittest.cc',
'browser/metrics/time_ticks_experiment_unittest.cc',
'browser/metrics/variations/generated_resources_map_lookup_unittest.cc',
'browser/metrics/variations/variations_request_scheduler_mobile_unittest.cc',
'browser/metrics/variations/variations_request_scheduler_unittest.cc',
'browser/metrics/variations/variations_seed_store_unittest.cc',
'browser/metrics/variations/variations_service_unittest.cc',
'browser/net/chrome_fraudulent_certificate_reporter_unittest.cc',
'browser/net/chrome_network_delegate_unittest.cc',
'browser/net/client_hints_unittest.cc',
'browser/net/connection_tester_unittest.cc',
'browser/net/dns_probe_runner_unittest.cc',
'browser/net/dns_probe_service_unittest.cc',
'browser/net/evicted_domain_cookie_counter_unittest.cc',
'browser/net/net_error_tab_helper_unittest.cc',
'browser/net/net_log_temp_file_unittest.cc',
'browser/net/network_stats_unittest.cc',
'browser/net/predictor_unittest.cc',
'browser/net/pref_proxy_config_tracker_impl_unittest.cc',
'browser/net/probe_message_unittest.cc',
'browser/net/quota_policy_channel_id_store_unittest.cc',
'browser/net/safe_search_util_unittest.cc',
'browser/net/spdyproxy/data_reduction_proxy_chrome_settings_unittest.cc',
'browser/net/spdyproxy/data_reduction_proxy_settings_unittest_android.cc',
'browser/net/ssl_config_service_manager_pref_unittest.cc',
'browser/net/url_info_unittest.cc',
'browser/password_manager/chrome_password_manager_client_unittest.cc',
'browser/password_manager/password_manager_internals_service_unittest.cc',
'browser/password_manager/password_manager_metrics_util_unittest.cc',
'browser/password_manager/password_store_mac_unittest.cc',
'browser/password_manager/password_store_win_unittest.cc',
'browser/password_manager/password_store_x_unittest.cc',
'browser/predictors/autocomplete_action_predictor_table_unittest.cc',
'browser/predictors/autocomplete_action_predictor_unittest.cc',
'browser/predictors/resource_prefetch_common_unittest.cc',
'browser/predictors/resource_prefetch_predictor_tables_unittest.cc',
'browser/predictors/resource_prefetch_predictor_unittest.cc',
'browser/predictors/resource_prefetcher_unittest.cc',
'browser/prefs/chrome_pref_service_unittest.cc',
'browser/prefs/command_line_pref_store_unittest.cc',
'browser/prefs/incognito_mode_prefs_unittest.cc',
'browser/prefs/leveldb_pref_store_unittest.cc',
'browser/prefs/pref_model_associator_unittest.cc',
'browser/prefs/prefs_syncable_service_unittest.cc',
'browser/prefs/profile_pref_store_manager_unittest.cc',
'browser/prefs/proxy_config_dictionary_unittest.cc',
'browser/prefs/proxy_prefs_unittest.cc',
'browser/prefs/session_startup_pref_unittest.cc',
'browser/prefs/tracked/device_id_unittest.cc',
'browser/prefs/tracked/mock_validation_delegate.cc',
'browser/prefs/tracked/mock_validation_delegate.h',
'browser/prefs/tracked/pref_hash_calculator_unittest.cc',
'browser/prefs/tracked/pref_hash_filter_unittest.cc',
'browser/prefs/tracked/pref_hash_store_impl_unittest.cc',
'browser/prefs/tracked/pref_service_hash_store_contents_unittest.cc',
'browser/prefs/tracked/segregated_pref_store_unittest.cc',
'browser/prefs/tracked/tracked_preferences_migration_unittest.cc',
'browser/prerender/prerender_history_unittest.cc',
'browser/prerender/prerender_tracker_unittest.cc',
'browser/prerender/prerender_unittest.cc',
'browser/prerender/prerender_util_unittest.cc',
'browser/process_info_snapshot_mac_unittest.cc',
'browser/profiles/file_path_verifier_win_unittest.cc',
'browser/profiles/gaia_info_update_service_unittest.cc',
'browser/profiles/profile_avatar_icon_util_unittest.cc',
'browser/profiles/profile_downloader_unittest.cc',
'browser/profiles/profile_info_cache_unittest.cc',
'browser/profiles/profile_info_cache_unittest.h',
'browser/profiles/profile_manager_unittest.cc',
'browser/profiles/profile_shortcut_manager_unittest_win.cc',
'browser/renderer_context_menu/render_view_context_menu_test_util.cc',
'browser/renderer_context_menu/render_view_context_menu_test_util.h',
'browser/renderer_host/chrome_render_widget_host_view_mac_history_swiper_unit_test.mm',
'browser/resources/google_now/background.js',
'browser/resources/google_now/background_test_util.js',
'browser/resources/google_now/background_unittest.gtestjs',
'browser/resources/google_now/cards.js',
'browser/resources/google_now/cards_unittest.gtestjs',
'browser/resources/google_now/common_test_util.js',
'browser/resources/google_now/utility.js',
'browser/resources/google_now/utility_test_util.js',
'browser/resources/google_now/utility_unittest.gtestjs',
'browser/resources/print_preview/data/measurement_system.js',
'browser/resources/print_preview/data/measurement_system_unittest.gtestjs',
'browser/resources/print_preview/print_preview_utils.js',
'browser/resources/print_preview/print_preview_utils_unittest.gtestjs',
'browser/resources_util_unittest.cc',
'browser/rlz/rlz_unittest.cc',
'browser/search/contextual_search_policy_handler_android_unittest.cc',
'browser/search/iframe_source_unittest.cc',
'browser/search/instant_unittest_base.cc',
'browser/search/instant_unittest_base.h',
'browser/search/most_visited_iframe_source_unittest.cc',
'browser/search/search_android_unittest.cc',
'browser/search_engines/default_search_pref_migration_unittest.cc',
'browser/search_engines/search_provider_install_data_unittest.cc',
'browser/service_process/service_process_control_mac_unittest.mm',
'browser/services/gcm/fake_gcm_profile_service.cc',
'browser/services/gcm/fake_gcm_profile_service.h',
'browser/services/gcm/fake_signin_manager.cc',
'browser/services/gcm/fake_signin_manager.h',
'browser/services/gcm/gcm_account_tracker_unittest.cc',
'browser/services/gcm/push_messaging_application_id_unittest.cc',
'browser/services/gcm/push_messaging_permission_context_unittest.cc',
'browser/shell_integration_win_unittest.cc',
'browser/signin/account_reconcilor_unittest.cc',
'browser/signin/account_service_flag_fetcher_unittest.cc',
'browser/signin/local_auth_unittest.cc',
'browser/signin/signin_global_error_unittest.cc',
'browser/signin/signin_manager_unittest.cc',
'browser/signin/signin_names_io_thread_unittest.cc',
'browser/signin/signin_tracker_unittest.cc',
'browser/signin/test_signin_client_builder.cc',
'browser/ssl/ssl_error_classification_unittest.cc',
'browser/ssl/ssl_error_handler_unittest.cc',
'browser/status_icons/status_icon_menu_model_unittest.cc',
'browser/status_icons/status_icon_unittest.cc',
'browser/status_icons/status_tray_unittest.cc',
'browser/sync/about_sync_util_unittest.cc',
'browser/sync/abstract_profile_sync_service_test.cc',
'browser/sync/abstract_profile_sync_service_test.h',
'browser/sync/backend_migrator_unittest.cc',
'browser/sync/backup_rollback_controller_unittest.cc',
'browser/sync/glue/autofill_data_type_controller_unittest.cc',
'browser/sync/glue/bookmark_data_type_controller_unittest.cc',
'browser/sync/glue/browser_thread_model_worker_unittest.cc',
'browser/sync/glue/favicon_cache_unittest.cc',
'browser/sync/glue/frontend_data_type_controller_mock.cc',
'browser/sync/glue/frontend_data_type_controller_mock.h',
'browser/sync/glue/frontend_data_type_controller_unittest.cc',
'browser/sync/glue/local_device_info_provider_unittest.cc',
'browser/sync/glue/non_frontend_data_type_controller_mock.cc',
'browser/sync/glue/non_frontend_data_type_controller_mock.h',
'browser/sync/glue/non_frontend_data_type_controller_unittest.cc',
'browser/sync/glue/search_engine_data_type_controller_unittest.cc',
'browser/sync/glue/sync_backend_host_impl_unittest.cc',
'browser/sync/glue/sync_backend_host_mock.cc',
'browser/sync/glue/sync_backend_host_mock.h',
'browser/sync/glue/sync_backend_registrar_unittest.cc',
'browser/sync/glue/synced_session_tracker_unittest.cc',
'browser/sync/glue/typed_url_model_associator_unittest.cc',
'browser/sync/glue/ui_model_worker_unittest.cc',
'browser/sync/profile_sync_auth_provider_unittest.cc',
'browser/sync/profile_sync_components_factory_impl_unittest.cc',
'browser/sync/profile_sync_service_android_unittest.cc',
'browser/sync/profile_sync_service_autofill_unittest.cc',
'browser/sync/profile_sync_service_bookmark_unittest.cc',
'browser/sync/profile_sync_service_factory_unittest.cc',
'browser/sync/profile_sync_service_startup_unittest.cc',
'browser/sync/profile_sync_service_typed_url_unittest.cc',
'browser/sync/profile_sync_service_unittest.cc',
'browser/sync/profile_sync_test_util.cc',
'browser/sync/profile_sync_test_util.h',
'browser/sync/sessions/session_data_type_controller_unittest.cc',
'browser/sync/sessions/tab_node_pool_unittest.cc',
'browser/sync/startup_controller_unittest.cc',
'browser/sync/sync_startup_tracker_unittest.cc',
'browser/sync/sync_stopped_reporter_unittest.cc',
'browser/sync/test/test_http_bridge_factory.cc',
'browser/sync/test/test_http_bridge_factory.h',
'browser/sync/test_profile_sync_service.cc',
'browser/sync/test_profile_sync_service.h',
'browser/task_profiler/task_profiler_data_serializer_unittest.cc',
'browser/thumbnails/content_analysis_unittest.cc',
'browser/thumbnails/content_based_thumbnailing_algorithm_unittest.cc',
'browser/thumbnails/simple_thumbnail_crop_unittest.cc',
'browser/thumbnails/thumbnail_service_unittest.cc',
'browser/translate/translate_service_unittest.cc',
'browser/ui/android/tab_model/tab_model_list_unittest.cc',
'browser/ui/android/tab_model/tab_model_unittest.cc',
'browser/ui/autofill/autofill_dialog_models_unittest.cc',
'browser/ui/autofill/autofill_dialog_types_unittest.cc',
'browser/ui/autofill/popup_controller_common_unittest.cc',
'browser/ui/autofill/test_popup_controller_common.cc',
'browser/ui/autofill/test_popup_controller_common.h',
'browser/ui/bookmarks/bookmark_editor_unittest.cc',
'browser/ui/bookmarks/bookmark_ui_utils_unittest.cc',
'browser/ui/bookmarks/recently_used_folders_combo_model_unittest.cc',
'browser/ui/browser_unittest.cc',
'browser/ui/chrome_select_file_policy_unittest.cc',
# It is safe to list */cocoa/* files in the "common" file list
# without an explicit exclusion since gyp is smart enough to
# exclude them from non-Mac builds.
'browser/ui/cocoa/accelerators_cocoa_unittest.mm',
'browser/ui/cocoa/animatable_image_unittest.mm',
'browser/ui/cocoa/animatable_view_unittest.mm',
'browser/ui/cocoa/applescript/apple_event_util_unittest.mm',
'browser/ui/cocoa/applescript/bookmark_applescript_utils_unittest.h',
'browser/ui/cocoa/applescript/bookmark_applescript_utils_unittest.mm',
'browser/ui/cocoa/applescript/bookmark_folder_applescript_unittest.mm',
'browser/ui/cocoa/applescript/bookmark_item_applescript_unittest.mm',
'browser/ui/cocoa/autofill/autofill_account_chooser_unittest.mm',
'browser/ui/cocoa/autofill/autofill_bubble_controller_unittest.mm',
'browser/ui/cocoa/autofill/autofill_details_container_unittest.mm',
'browser/ui/cocoa/autofill/autofill_main_container_unittest.mm',
'browser/ui/cocoa/autofill/autofill_notification_container_unittest.mm',
'browser/ui/cocoa/autofill/autofill_notification_controller_unittest.mm',
'browser/ui/cocoa/autofill/autofill_overlay_controller_unittest.mm',
'browser/ui/cocoa/autofill/autofill_pop_up_button_unittest.mm',
'browser/ui/cocoa/autofill/autofill_section_container_unittest.mm',
'browser/ui/cocoa/autofill/autofill_section_view_unittest.mm',
'browser/ui/cocoa/autofill/autofill_sign_in_container_unittest.mm',
'browser/ui/cocoa/autofill/autofill_suggestion_container_unittest.mm',
'browser/ui/cocoa/autofill/autofill_textfield_unittest.mm',
'browser/ui/cocoa/autofill/autofill_tooltip_controller_unittest.mm',
'browser/ui/cocoa/autofill/down_arrow_popup_menu_cell_unittest.mm',
'browser/ui/cocoa/autofill/layout_view_unittest.mm',
'browser/ui/cocoa/autofill/password_generation_popup_view_cocoa_unittest.mm',
'browser/ui/cocoa/autofill/simple_grid_layout_unittest.mm',
'browser/ui/cocoa/background_gradient_view_unittest.mm',
'browser/ui/cocoa/base_bubble_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_all_tabs_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_bridge_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_folder_button_cell_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_folder_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_folder_hover_state_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_folder_view_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_folder_window_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_toolbar_view_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_unittest_helper.h',
'browser/ui/cocoa/bookmarks/bookmark_bar_unittest_helper.mm',
'browser/ui/cocoa/bookmarks/bookmark_bar_view_cocoa_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_bubble_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_button_cell_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_button_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_editor_base_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_editor_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_folder_target_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_menu_bridge_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_menu_cocoa_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_model_observer_for_cocoa_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_name_folder_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_sync_promo_controller_unittest.mm',
'browser/ui/cocoa/bookmarks/bookmark_tree_browser_cell_unittest.mm',
'browser/ui/cocoa/browser/edit_search_engine_cocoa_controller_unittest.mm',
'browser/ui/cocoa/browser/zoom_bubble_controller_unittest.mm',
'browser/ui/cocoa/browser_window_cocoa_unittest.mm',
'browser/ui/cocoa/browser_window_controller_unittest.mm',
'browser/ui/cocoa/browser_window_layout_unittest.mm',
'browser/ui/cocoa/bubble_view_unittest.mm',
'browser/ui/cocoa/chrome_browser_window_unittest.mm',
'browser/ui/cocoa/chrome_event_processing_window_unittest.mm',
'browser/ui/cocoa/clickhold_button_cell_unittest.mm',
'browser/ui/cocoa/cocoa_profile_test.h',
'browser/ui/cocoa/cocoa_profile_test.mm',
'browser/ui/cocoa/cocoa_test_helper.h',
'browser/ui/cocoa/cocoa_test_helper.mm',
'browser/ui/cocoa/command_observer_bridge_unittest.mm',
'browser/ui/cocoa/confirm_bubble_controller_unittest.mm',
'browser/ui/cocoa/confirm_quit_panel_controller_unittest.mm',
'browser/ui/cocoa/constrained_window/constrained_window_alert_unittest.mm',
'browser/ui/cocoa/constrained_window/constrained_window_animation_unittest.mm',
'browser/ui/cocoa/constrained_window/constrained_window_button_unittest.mm',
'browser/ui/cocoa/constrained_window/constrained_window_custom_window_unittest.mm',
'browser/ui/cocoa/constrained_window/constrained_window_sheet_controller_unittest.mm',
'browser/ui/cocoa/content_settings/collected_cookies_mac_unittest.mm',
'browser/ui/cocoa/content_settings/cookie_details_unittest.mm',
'browser/ui/cocoa/content_settings/cookie_details_view_controller_unittest.mm',
'browser/ui/cocoa/custom_frame_view_unittest.mm',
'browser/ui/cocoa/download/download_item_button_unittest.mm',
'browser/ui/cocoa/download/download_item_cell_unittest.mm',
'browser/ui/cocoa/download/download_item_controller_unittest.mm',
'browser/ui/cocoa/download/download_shelf_controller_unittest.mm',
'browser/ui/cocoa/download/download_shelf_mac_unittest.mm',
'browser/ui/cocoa/download/download_shelf_view_cocoa_unittest.mm',
'browser/ui/cocoa/download/download_util_mac_unittest.mm',
'browser/ui/cocoa/draggable_button_unittest.mm',
'browser/ui/cocoa/exclusive_access_bubble_window_controller_unittest.mm',
'browser/ui/cocoa/extensions/browser_actions_container_view_unittest.mm',
'browser/ui/cocoa/extensions/extension_install_prompt_test_utils.h',
'browser/ui/cocoa/extensions/extension_install_prompt_test_utils.mm',
'browser/ui/cocoa/extensions/extension_install_view_controller_unittest.mm',
'browser/ui/cocoa/extensions/extension_installed_bubble_controller_unittest.mm',
'browser/ui/cocoa/extensions/media_galleries_dialog_cocoa_unittest.mm',
'browser/ui/cocoa/extensions/test_toolbar_actions_bar_helper_cocoa.mm',
'browser/ui/cocoa/find_bar/find_bar_bridge_unittest.mm',
'browser/ui/cocoa/find_bar/find_bar_cocoa_controller_unittest.mm',
'browser/ui/cocoa/find_bar/find_bar_text_field_cell_unittest.mm',
'browser/ui/cocoa/find_bar/find_bar_text_field_unittest.mm',
'browser/ui/cocoa/find_bar/find_bar_view_unittest.mm',
'browser/ui/cocoa/find_pasteboard_unittest.mm',
'browser/ui/cocoa/first_run_bubble_controller_unittest.mm',
'browser/ui/cocoa/floating_bar_backing_view_unittest.mm',
'browser/ui/cocoa/framed_browser_window_unittest.mm',
'browser/ui/cocoa/fullscreen_window_unittest.mm',
'browser/ui/cocoa/gradient_button_cell_unittest.mm',
'browser/ui/cocoa/history_menu_bridge_unittest.mm',
'browser/ui/cocoa/history_menu_cocoa_controller_unittest.mm',
'browser/ui/cocoa/history_overlay_controller_unittest.mm',
'browser/ui/cocoa/hover_close_button_unittest.mm',
'browser/ui/cocoa/hung_renderer_controller_unittest.mm',
'browser/ui/cocoa/image_button_cell_unittest.mm',
'browser/ui/cocoa/info_bubble_view_unittest.mm',
'browser/ui/cocoa/info_bubble_window_unittest.mm',
'browser/ui/cocoa/infobars/confirm_infobar_controller_unittest.mm',
'browser/ui/cocoa/infobars/infobar_container_controller_unittest.mm',
'browser/ui/cocoa/infobars/infobar_gradient_view_unittest.mm',
'browser/ui/cocoa/infobars/mock_confirm_infobar_delegate.cc',
'browser/ui/cocoa/infobars/mock_confirm_infobar_delegate.h',
'browser/ui/cocoa/infobars/translate_infobar_unittest.mm',
'browser/ui/cocoa/location_bar/autocomplete_text_field_cell_unittest.mm',
'browser/ui/cocoa/location_bar/autocomplete_text_field_editor_unittest.mm',
'browser/ui/cocoa/location_bar/autocomplete_text_field_unittest.mm',
'browser/ui/cocoa/location_bar/autocomplete_text_field_unittest_helper.mm',
'browser/ui/cocoa/location_bar/ev_bubble_decoration_unittest.mm',
'browser/ui/cocoa/location_bar/image_decoration_unittest.mm',
'browser/ui/cocoa/location_bar/keyword_hint_decoration_unittest.mm',
'browser/ui/cocoa/location_bar/manage_passwords_decoration_unittest.mm',
'browser/ui/cocoa/location_bar/selected_keyword_decoration_unittest.mm',
'browser/ui/cocoa/location_bar/zoom_decoration_unittest.mm',
'browser/ui/cocoa/media_picker/desktop_media_picker_controller_unittest.mm',
'browser/ui/cocoa/menu_button_unittest.mm',
'browser/ui/cocoa/notifications/message_center_tray_bridge_unittest.mm',
'browser/ui/cocoa/nsmenuitem_additions_unittest.mm',
'browser/ui/cocoa/omnibox/omnibox_popup_cell_unittest.mm',
'browser/ui/cocoa/omnibox/omnibox_popup_matrix_unittest.mm',
'browser/ui/cocoa/omnibox/omnibox_popup_separator_view_unittest.mm',
'browser/ui/cocoa/omnibox/omnibox_popup_view_mac_unittest.mm',
'browser/ui/cocoa/omnibox/omnibox_view_mac_unittest.mm',
'browser/ui/cocoa/panels/panel_cocoa_unittest.mm',
'browser/ui/cocoa/passwords/manage_password_item_view_controller_unittest.mm',
'browser/ui/cocoa/passwords/manage_passwords_bubble_blacklist_view_controller_unittest.mm',
'browser/ui/cocoa/passwords/manage_passwords_bubble_cocoa_unittest.mm',
'browser/ui/cocoa/passwords/manage_passwords_bubble_confirmation_view_controller_unittest.mm',
'browser/ui/cocoa/passwords/manage_passwords_bubble_controller_unittest.mm',
'browser/ui/cocoa/passwords/manage_passwords_bubble_manage_view_controller_unittest.mm',
'browser/ui/cocoa/passwords/manage_passwords_bubble_never_save_view_controller_unittest.mm',
'browser/ui/cocoa/passwords/manage_passwords_bubble_pending_view_controller_unittest.mm',
'browser/ui/cocoa/passwords/manage_passwords_controller_test.h',
'browser/ui/cocoa/passwords/manage_passwords_controller_test.mm',
'browser/ui/cocoa/profiles/avatar_button_unittest.mm',
'browser/ui/cocoa/profiles/avatar_button_controller_unittest.mm',
'browser/ui/cocoa/profiles/avatar_icon_controller_unittest.mm',
'browser/ui/cocoa/profiles/avatar_label_button_unittest.mm',
'browser/ui/cocoa/profiles/avatar_menu_bubble_controller_unittest.mm',
'browser/ui/cocoa/profiles/profile_chooser_controller_unittest.mm',
'browser/ui/cocoa/profiles/profile_menu_controller_unittest.mm',
'browser/ui/cocoa/profiles/user_manager_mac_unittest.mm',
'browser/ui/cocoa/run_loop_testing_unittest.mm',
'browser/ui/cocoa/screen_capture_notification_ui_cocoa_unittest.mm',
'browser/ui/cocoa/sprite_view_unittest.mm',
'browser/ui/cocoa/status_bubble_mac_unittest.mm',
'browser/ui/cocoa/status_icons/status_icon_mac_unittest.mm',
'browser/ui/cocoa/styled_text_field_cell_unittest.mm',
'browser/ui/cocoa/styled_text_field_test_helper.h',
'browser/ui/cocoa/styled_text_field_test_helper.mm',
'browser/ui/cocoa/styled_text_field_unittest.mm',
'browser/ui/cocoa/tab_contents/sad_tab_controller_unittest.mm',
'browser/ui/cocoa/tab_contents/sad_tab_view_cocoa_unittest.mm',
'browser/ui/cocoa/table_row_nsimage_cache_unittest.mm',
'browser/ui/cocoa/tabs/media_indicator_button_cocoa_unittest.mm',
'browser/ui/cocoa/tabs/tab_controller_unittest.mm',
'browser/ui/cocoa/tabs/tab_strip_controller_unittest.mm',
'browser/ui/cocoa/tabs/tab_strip_view_unittest.mm',
'browser/ui/cocoa/tabs/tab_view_unittest.mm',
'browser/ui/cocoa/task_manager_mac_unittest.mm',
'browser/ui/cocoa/toolbar/reload_button_unittest.mm',
'browser/ui/cocoa/toolbar/toolbar_button_unittest.mm',
'browser/ui/cocoa/toolbar/toolbar_controller_unittest.mm',
'browser/ui/cocoa/toolbar/toolbar_view_unittest.mm',
'browser/ui/cocoa/toolbar/wrench_toolbar_button_cell_unittest.mm',
'browser/ui/cocoa/translate/translate_bubble_controller_unittest.mm',
'browser/ui/cocoa/validation_message_bubble_controller_unittest.mm',
'browser/ui/cocoa/vertical_gradient_view_unittest.mm',
'browser/ui/cocoa/view_resizer_pong.h',
'browser/ui/cocoa/view_resizer_pong.mm',
'browser/ui/cocoa/web_dialog_window_controller_unittest.mm',
'browser/ui/cocoa/website_settings/permission_bubble_controller_unittest.mm',
'browser/ui/cocoa/website_settings/permission_selector_button_unittest.mm',
'browser/ui/cocoa/website_settings/website_settings_bubble_controller_unittest.mm',
'browser/ui/cocoa/window_size_autosaver_unittest.mm',
'browser/ui/cocoa/wrench_menu/menu_tracked_root_view_unittest.mm',
'browser/ui/cocoa/wrench_menu/wrench_menu_button_cell_unittest.mm',
'browser/ui/cocoa/wrench_menu/wrench_menu_controller_unittest.mm',
'browser/ui/find_bar/find_backend_unittest.cc',
'browser/ui/global_error/global_error_service_unittest.cc',
'browser/ui/login/login_prompt_unittest.cc',
'browser/ui/passwords/manage_passwords_bubble_model_unittest.cc',
'browser/ui/passwords/manage_passwords_icon_mock.cc',
'browser/ui/passwords/manage_passwords_ui_controller_unittest.cc',
'browser/ui/passwords/manage_passwords_view_utils_unittest.cc',
'browser/ui/passwords/password_bubble_experiment_unittest.cc',
'browser/ui/passwords/password_manager_presenter_unittest.cc',
'browser/ui/search_engines/keyword_editor_controller_unittest.cc',
'browser/ui/sync/profile_signin_confirmation_helper_unittest.cc',
'browser/ui/sync/sync_promo_ui_unittest.cc',
'browser/ui/tests/ui_gfx_image_unittest.cc',
'browser/ui/tests/ui_gfx_image_unittest.mm',
'browser/ui/website_settings/mock_permission_bubble_view.cc',
'browser/ui/website_settings/mock_permission_bubble_view.h',
'browser/ui/website_settings/permission_bubble_manager_unittest.cc',
'browser/ui/website_settings/website_settings_unittest.cc',
'browser/ui/webui/fileicon_source_unittest.cc',
'browser/ui/webui/history_ui_unittest.cc',
'browser/update_client/chrome_update_query_params_delegate_unittest.cc',
'browser/upload_list_unittest.cc',
'browser/web_resource/promo_resource_service_mobile_ntp_unittest.cc',
'browser/web_resource/promo_resource_service_unittest.cc',
'common/chrome_content_client_unittest.cc',
'common/chrome_paths_unittest.cc',
'common/cloud_print/cloud_print_helpers_unittest.cc',
'common/crash_keys_unittest.cc',
'common/favicon/favicon_url_parser_unittest.cc',
'common/ini_parser_unittest.cc',
'common/instant_types_unittest.cc',
'common/mac/cfbundle_blocker_unittest.mm',
'common/mac/mock_launchd.cc',
'common/mac/mock_launchd.h',
'common/mac/objc_zombie_unittest.mm',
'common/partial_circular_buffer_unittest.cc',
'common/pref_names_util_unittest.cc',
'common/search_urls_unittest.cc',
'common/switch_utils_unittest.cc',
'common/variations/experiment_labels_unittest.cc',
'common/worker_thread_ticker_unittest.cc',
'renderer/chrome_content_renderer_client_unittest.cc',
'renderer/content_settings_observer_unittest.cc',
'renderer/instant_restricted_id_cache_unittest.cc',
'renderer/plugins/plugin_uma_unittest.cc',
'renderer/prerender/prerender_dispatcher_unittest.cc',
'renderer/resources/extensions/notifications_custom_bindings.js',
'renderer/resources/extensions/notifications_custom_bindings.gtestjs',
'renderer/resources/extensions/notifications_test_util.js',
'renderer/searchbox/search_bouncer_unittest.cc',
'renderer/searchbox/searchbox_extension_unittest.cc',
'renderer/searchbox/searchbox_unittest.cc',
'renderer/web_apps_unittest.cc',
'test/base/chrome_render_view_test.cc',
'test/base/chrome_render_view_test.h',
'test/base/menu_model_test.cc',
'test/base/menu_model_test.h',
'test/base/v8_unit_test.cc',
'test/base/v8_unit_test.h',
'test/data/unit/framework_unittest.gtestjs',
'test/logging/win/mof_data_parser_unittest.cc',
'utility/chrome_content_utility_client_unittest.cc',
'utility/cloud_print/pwg_encoder_unittest.cc',
# Duplicate these tests here because PathService has more items in
# unit_tests than in base_unittests.
'../base/path_service_unittest.cc',
'../components/search_engines/template_url_fetcher_unittest.cc',
'../components/search_engines/template_url_parser_unittest.cc',
'../components/search_engines/template_url_service_sync_unittest.cc',
'../components/search_engines/template_url_service_unittest.cc',
'../testing/gtest_mac_unittest.mm',
'../third_party/zlib/google/zip_reader_unittest.cc',
'../third_party/zlib/google/zip_unittest.cc',
'../tools/json_schema_compiler/test/additional_properties_unittest.cc',
'../tools/json_schema_compiler/test/any_unittest.cc',
'../tools/json_schema_compiler/test/arrays_unittest.cc',
'../tools/json_schema_compiler/test/callbacks_unittest.cc',
'../tools/json_schema_compiler/test/choices_unittest.cc',
'../tools/json_schema_compiler/test/crossref_unittest.cc',
'../tools/json_schema_compiler/test/enums_unittest.cc',
'../tools/json_schema_compiler/test/error_generation_unittest.cc',
'../tools/json_schema_compiler/test/functions_as_parameters_unittest.cc',
'../tools/json_schema_compiler/test/functions_on_types_unittest.cc',
'../tools/json_schema_compiler/test/idl_schemas_unittest.cc',
'../tools/json_schema_compiler/test/objects_unittest.cc',
'../tools/json_schema_compiler/test/simple_api_unittest.cc',
'../ui/webui/resources/js/cr.js',
],
'chrome_unit_tests_one_click_signin_sources': [
'browser/ui/cocoa/one_click_signin_bubble_controller_unittest.mm',
'browser/ui/sync/one_click_signin_helper_unittest.cc',
'browser/ui/sync/one_click_signin_sync_observer_unittest.cc',
'browser/ui/sync/one_click_signin_sync_starter_unittest.cc',
'browser/ui/views/sync/one_click_signin_bubble_view_unittest.cc',
],
'chrome_unit_tests_spellchecker_sources': [
'browser/spellchecker/spellcheck_action_unittest.cc',
'browser/spellchecker/spellcheck_custom_dictionary_unittest.cc',
'browser/spellchecker/spellcheck_host_metrics_unittest.cc',
'browser/spellchecker/spellcheck_message_filter_mac_unittest.cc',
'browser/spellchecker/spellcheck_message_filter_unittest.cc',
'browser/spellchecker/spellcheck_platform_mac_unittest.cc',
'browser/spellchecker/spellcheck_service_unittest.cc',
'browser/spellchecker/spelling_service_client_unittest.cc',
'renderer/spellchecker/custom_dictionary_engine_unittest.cc',
'renderer/spellchecker/spellcheck_provider_hunspell_unittest.cc',
'renderer/spellchecker/spellcheck_provider_mac_unittest.cc',
'renderer/spellchecker/spellcheck_provider_test.cc',
'renderer/spellchecker/spellcheck_provider_test.h',
'renderer/spellchecker/spellcheck_unittest.cc',
'renderer/spellchecker/spellcheck_worditerator_unittest.cc',
'tools/convert_dict/convert_dict_unittest.cc',
],
'chrome_unit_tests_background_sources': [
'browser/background/background_application_list_model_unittest.cc',
'browser/background/background_contents_service_unittest.cc',
'browser/background/background_mode_manager_unittest.cc',
],
'chrome_unit_tests_extensions_sources': [
'../apps/saved_files_service_unittest.cc',
'browser/apps/app_shim/app_shim_host_mac_unittest.cc',
'browser/apps/app_shim/extension_app_shim_handler_mac_unittest.cc',
'browser/apps/drive/drive_app_mapping_unittest.cc',
'browser/apps/ephemeral_app_service_unittest.cc',
'browser/autocomplete/keyword_extensions_delegate_impl_unittest.cc',
'browser/drive/drive_api_util_unittest.cc',
'browser/drive/drive_app_registry_unittest.cc',
'browser/drive/drive_uploader_unittest.cc',
'browser/drive/event_logger_unittest.cc',
'browser/drive/fake_drive_service_unittest.cc',
'browser/extensions/active_script_controller_unittest.cc',
'browser/extensions/active_tab_unittest.cc',
'browser/extensions/activity_log/activity_database_unittest.cc',
'browser/extensions/activity_log/activity_log_enabled_unittest.cc',
'browser/extensions/activity_log/activity_log_policy_unittest.cc',
'browser/extensions/activity_log/activity_log_unittest.cc',
'browser/extensions/activity_log/ad_injection_unittest.cc',
'browser/extensions/activity_log/counting_policy_unittest.cc',
'browser/extensions/activity_log/database_string_table_unittest.cc',
'browser/extensions/activity_log/fullstream_ui_policy_unittest.cc',
'browser/extensions/activity_log/hashed_ad_network_database_unittest.cc',
'browser/extensions/activity_log/uma_policy_unittest.cc',
'browser/extensions/api/activity_log_private/activity_log_private_api_unittest.cc',
'browser/extensions/api/audio_modem/audio_modem_api_unittest.cc',
'browser/extensions/api/bookmarks/bookmark_api_helpers_unittest.cc',
'browser/extensions/api/content_settings/content_settings_store_unittest.cc',
'browser/extensions/api/content_settings/content_settings_unittest.cc',
'browser/extensions/api/cookies/cookies_unittest.cc',
'browser/extensions/api/copresence/copresence_api_unittest.cc',
'browser/extensions/api/cryptotoken_private/cryptotoken_private_api_unittest.cc',
'browser/extensions/api/declarative/rules_registry_service_unittest.cc',
'browser/extensions/api/declarative/rules_registry_with_cache_unittest.cc',
'browser/extensions/api/declarative_content/chrome_content_rules_registry_unittest.cc',
'browser/extensions/api/declarative_content/content_action_unittest.cc',
'browser/extensions/api/declarative_content/content_condition_unittest.cc',
'browser/extensions/api/declarative_webrequest/webrequest_action_unittest.cc',
'browser/extensions/api/declarative_webrequest/webrequest_rules_registry_unittest.cc',
'browser/extensions/api/device_permissions_manager_unittest.cc',
'browser/extensions/api/dial/dial_device_data_unittest.cc',
'browser/extensions/api/dial/dial_registry_unittest.cc',
'browser/extensions/api/dial/dial_service_unittest.cc',
'browser/extensions/api/easy_unlock_private/easy_unlock_private_api_chromeos_unittest.cc',
'browser/extensions/api/experience_sampling_private/experience_sampling_private_api_unittest.cc',
'browser/extensions/api/extension_action/extension_action_prefs_unittest.cc',
'browser/extensions/api/file_handlers/api_file_handler_util_unittest.cc',
'browser/extensions/api/file_handlers/mime_util_unittest.cc',
'browser/extensions/api/file_system/file_system_api_unittest.cc',
'browser/extensions/api/identity/extension_token_key_unittest.cc',
'browser/extensions/api/identity/gaia_web_auth_flow_unittest.cc',
'browser/extensions/api/identity/identity_mint_queue_unittest.cc',
'browser/extensions/api/image_writer_private/destroy_partitions_operation_unittest.cc',
'browser/extensions/api/image_writer_private/operation_manager_unittest.cc',
'browser/extensions/api/image_writer_private/operation_unittest.cc',
'browser/extensions/api/image_writer_private/removable_storage_provider_chromeos_unittest.cc',
'browser/extensions/api/image_writer_private/test_utils.cc',
'browser/extensions/api/image_writer_private/write_from_file_operation_unittest.cc',
'browser/extensions/api/image_writer_private/write_from_url_operation_unittest.cc',
'browser/extensions/api/mdns/dns_sd_registry_unittest.cc',
'browser/extensions/api/omnibox/omnibox_unittest.cc',
'browser/extensions/api/permissions/permissions_api_helpers_unittest.cc',
'browser/extensions/api/preference/preference_api_prefs_unittest.cc',
'browser/extensions/api/proxy/proxy_api_helpers_unittest.cc',
'browser/extensions/api/push_messaging/obfuscated_gaia_id_fetcher_unittest.cc',
'browser/extensions/api/push_messaging/push_messaging_invalidation_handler_unittest.cc',
'browser/extensions/api/signed_in_devices/id_mapping_helper_unittest.cc',
'browser/extensions/api/signed_in_devices/signed_in_devices_api_unittest.cc',
'browser/extensions/api/signed_in_devices/signed_in_devices_manager_unittest.cc',
'browser/extensions/api/socket/socket_api_unittest.cc',
'browser/extensions/api/socket/tcp_socket_unittest.cc',
'browser/extensions/api/socket/tls_socket_unittest.cc',
'browser/extensions/api/socket/udp_socket_unittest.cc',
'browser/extensions/api/sockets_tcp_server/sockets_tcp_server_api_unittest.cc',
'browser/extensions/api/storage/settings_sync_unittest.cc',
'browser/extensions/api/streams_private/streams_private_manifest_unittest.cc',
'browser/extensions/api/web_navigation/frame_navigation_state_unittest.cc',
'browser/extensions/api/web_request/web_request_api_unittest.cc',
'browser/extensions/api/web_request/web_request_permissions_unittest.cc',
'browser/extensions/app_data_migrator_unittest.cc',
'browser/extensions/app_sync_data_unittest.cc',
'browser/extensions/blacklist_state_fetcher_unittest.cc',
'browser/extensions/bookmark_app_helper_unittest.cc',
'browser/extensions/browser_permissions_policy_delegate_unittest.cc',
'browser/extensions/chrome_app_sorting_unittest.cc',
'browser/extensions/chrome_component_extension_resource_manager_unittest.cc',
'browser/extensions/chrome_info_map_unittest.cc',
'browser/extensions/component_loader_unittest.cc',
'browser/extensions/convert_user_script_unittest.cc',
'browser/extensions/convert_web_app_unittest.cc',
'browser/extensions/default_apps_unittest.cc',
'browser/extensions/display_info_provider_chromeos_unittest.cc',
'browser/extensions/error_console/error_console_unittest.cc',
'browser/extensions/event_router_forwarder_unittest.cc',
'browser/extensions/extension_action_icon_factory_unittest.cc',
'browser/extensions/extension_action_manager_unittest.cc',
'browser/extensions/extension_action_unittest.cc',
'browser/extensions/extension_api_unittest.cc',
'browser/extensions/extension_api_unittest.h',
'browser/extensions/extension_context_menu_model_unittest.cc',
'browser/extensions/extension_creator_filter_unittest.cc',
'browser/extensions/extension_error_controller_unittest.cc',
'browser/extensions/extension_function_test_utils.cc',
'browser/extensions/extension_function_test_utils.h',
'browser/extensions/extension_garbage_collector_chromeos_unittest.cc',
'browser/extensions/extension_garbage_collector_unittest.cc',
'browser/extensions/extension_gcm_app_handler_unittest.cc',
'browser/extensions/extension_icon_manager_unittest.cc',
'browser/extensions/extension_install_checker_unittest.cc',
'browser/extensions/extension_install_prompt_unittest.cc',
'browser/extensions/extension_management_test_util.cc',
'browser/extensions/extension_management_test_util.h',
'browser/extensions/extension_management_unittest.cc',
'browser/extensions/extension_message_bubble_controller_unittest.cc',
'browser/extensions/extension_path_util_unittest.cc',
'browser/extensions/extension_prefs_unittest.cc',
'browser/extensions/extension_prefs_unittest.h',
'browser/extensions/extension_protocols_unittest.cc',
'browser/extensions/extension_reenabler_unittest.cc',
'browser/extensions/extension_service_test_base.cc',
'browser/extensions/extension_service_test_base.h',
'browser/extensions/extension_service_unittest.cc',
'browser/extensions/extension_special_storage_policy_unittest.cc',
'browser/extensions/extension_sync_data_unittest.cc',
'browser/extensions/extension_test_message_listener_unittest.cc',
'browser/extensions/extension_toolbar_model_unittest.cc',
'browser/extensions/extension_ui_unittest.cc',
'browser/extensions/extension_user_script_loader_unittest.cc',
'browser/extensions/extension_warning_badge_service_unittest.cc',
'browser/extensions/extension_web_ui_unittest.cc',
'browser/extensions/external_policy_loader_unittest.cc',
'browser/extensions/external_provider_impl_chromeos_unittest.cc',
'browser/extensions/external_provider_impl_unittest.cc',
'browser/extensions/favicon_downloader_unittest.cc',
'browser/extensions/install_tracker_unittest.cc',
'browser/extensions/location_bar_controller_unittest.cc',
'browser/extensions/menu_manager_unittest.cc',
'browser/extensions/pack_extension_unittest.cc',
'browser/extensions/permission_message_combinations_unittest.cc',
'browser/extensions/permission_messages_unittest.cc',
'browser/extensions/permissions_based_management_policy_provider_unittest.cc',
'browser/extensions/permissions_updater_unittest.cc',
'browser/extensions/shared_module_service_unittest.cc',
'browser/extensions/standard_management_policy_provider_unittest.cc',
'browser/extensions/token_cache/token_cache_service_unittest.cc',
'browser/extensions/updater/extension_cache_fake.cc',
'browser/extensions/updater/extension_cache_fake.h',
'browser/extensions/updater/extension_updater_unittest.cc',
'browser/extensions/user_script_listener_unittest.cc',
'browser/extensions/webstore_inline_installer_unittest.cc',
'browser/extensions/webstore_installer_unittest.cc',
'browser/extensions/zipfile_installer_unittest.cc',
'browser/media/cast_transport_host_filter_unittest.cc',
'browser/metrics/extensions_metrics_provider_unittest.cc',
'browser/notifications/extension_welcome_notification_unittest.cc',
'browser/notifications/notification_conversion_helper_unittest.cc',
'browser/renderer_context_menu/context_menu_content_type_unittest.cc',
'browser/search/hotword_service_unittest.cc',
'browser/signin/easy_unlock_app_manager_unittest.cc',
'browser/signin/easy_unlock_auth_attempt_unittest.cc',
'browser/signin/easy_unlock_screenlock_state_handler_unittest.cc',
'browser/signin/easy_unlock_service_unittest_chromeos.cc',
'browser/sync/glue/extensions_activity_monitor_unittest.cc',
'browser/sync_file_system/drive_backend/callback_helper_unittest.cc',
'browser/sync_file_system/drive_backend/callback_tracker_unittest.cc',
'browser/sync_file_system/drive_backend/conflict_resolver_unittest.cc',
'browser/sync_file_system/drive_backend/drive_backend_sync_unittest.cc',
'browser/sync_file_system/drive_backend/drive_backend_test_util.cc',
'browser/sync_file_system/drive_backend/drive_backend_test_util.h',
'browser/sync_file_system/drive_backend/fake_drive_service_helper.cc',
'browser/sync_file_system/drive_backend/fake_drive_service_helper.h',
'browser/sync_file_system/drive_backend/fake_drive_uploader.cc',
'browser/sync_file_system/drive_backend/fake_drive_uploader.h',
'browser/sync_file_system/drive_backend/fake_sync_worker.cc',
'browser/sync_file_system/drive_backend/fake_sync_worker.h',
'browser/sync_file_system/drive_backend/leveldb_wrapper_unittest.cc',
'browser/sync_file_system/drive_backend/list_changes_task_unittest.cc',
'browser/sync_file_system/drive_backend/local_to_remote_syncer_unittest.cc',
'browser/sync_file_system/drive_backend/metadata_database_index_on_disk_unittest.cc',
'browser/sync_file_system/drive_backend/metadata_database_index_unittest.cc',
'browser/sync_file_system/drive_backend/metadata_database_unittest.cc',
'browser/sync_file_system/drive_backend/metadata_db_migration_util_unittest.cc',
'browser/sync_file_system/drive_backend/register_app_task_unittest.cc',
'browser/sync_file_system/drive_backend/remote_to_local_syncer_unittest.cc',
'browser/sync_file_system/drive_backend/sync_engine_initializer_unittest.cc',
'browser/sync_file_system/drive_backend/sync_engine_unittest.cc',
'browser/sync_file_system/drive_backend/sync_task_manager_unittest.cc',
'browser/sync_file_system/drive_backend/sync_worker_unittest.cc',
'browser/sync_file_system/drive_backend/task_dependency_manager_unittest.cc',
'browser/sync_file_system/fake_remote_change_processor.cc',
'browser/sync_file_system/fake_remote_change_processor.h',
'browser/sync_file_system/file_change_unittest.cc',
'browser/sync_file_system/local/canned_syncable_file_system.cc',
'browser/sync_file_system/local/canned_syncable_file_system.h',
'browser/sync_file_system/local/local_file_change_tracker_unittest.cc',
'browser/sync_file_system/local/local_file_sync_context_unittest.cc',
'browser/sync_file_system/local/local_file_sync_service_unittest.cc',
'browser/sync_file_system/local/local_file_sync_status_unittest.cc',
'browser/sync_file_system/local/mock_sync_status_observer.cc',
'browser/sync_file_system/local/mock_sync_status_observer.h',
'browser/sync_file_system/local/syncable_file_operation_runner_unittest.cc',
'browser/sync_file_system/local/syncable_file_system_unittest.cc',
'browser/sync_file_system/logger_unittest.cc',
'browser/sync_file_system/mock_local_change_processor.cc',
'browser/sync_file_system/mock_local_change_processor.h',
'browser/sync_file_system/mock_remote_change_processor.cc',
'browser/sync_file_system/mock_remote_change_processor.h',
'browser/sync_file_system/mock_remote_file_sync_service.cc',
'browser/sync_file_system/mock_remote_file_sync_service.h',
'browser/sync_file_system/subtree_set_unittest.cc',
'browser/sync_file_system/sync_file_system_service_unittest.cc',
'browser/sync_file_system/sync_file_system_test_util.cc',
'browser/sync_file_system/sync_file_system_test_util.h',
'browser/sync_file_system/sync_process_runner_unittest.cc',
'browser/sync_file_system/syncable_file_system_util_unittest.cc',
'browser/web_applications/web_app_mac_unittest.mm',
'browser/web_applications/web_app_unittest.cc',
'common/extensions/api/commands/commands_manifest_unittest.cc',
'common/extensions/api/extension_action/browser_action_manifest_unittest.cc',
'common/extensions/api/extension_action/page_action_manifest_unittest.cc',
'common/extensions/api/extension_api_unittest.cc',
'common/extensions/api/file_browser_handlers/file_browser_handler_manifest_unittest.cc',
'common/extensions/api/storage/storage_schema_manifest_handler_unittest.cc',
'common/extensions/chrome_extensions_client_unittest.cc',
'common/extensions/command_unittest.cc',
'common/extensions/extension_icon_set_unittest.cc',
'common/extensions/extension_unittest.cc',
'common/extensions/feature_switch_unittest.cc',
'common/extensions/features/chrome_channel_feature_filter_unittest.cc',
'common/extensions/manifest_handlers/automation_unittest.cc',
'common/extensions/manifest_handlers/content_scripts_manifest_unittest.cc',
'common/extensions/manifest_handlers/exclude_matches_manifest_unittest.cc',
'common/extensions/manifest_handlers/settings_overrides_handler_unittest.cc',
'common/extensions/manifest_handlers/ui_overrides_handler_unittest.cc',
'common/extensions/manifest_tests/chrome_manifest_test.cc',
'common/extensions/manifest_tests/chrome_manifest_test.h',
'common/extensions/manifest_tests/extension_manifests_about_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_background_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_chromepermission_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_contentsecuritypolicy_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_default_extent_path_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_devtools_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_dummy_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_experimental_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_homepage_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_icons_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_initvalue_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_isolatedapp_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_kiosk_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_launch_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_launcher_page_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_manifest_version_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_offline_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_old_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_options_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_override_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_platformapp_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_portsinpermissions_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_requirements_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_sandboxed_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_storage_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_ui_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_update_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_validapp_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_web_accessible_resources_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_web_unittest.cc',
'common/extensions/manifest_tests/extension_manifests_webview_accessible_resources_unittest.cc',
'common/extensions/manifest_unittest.cc',
'common/extensions/permissions/chrome_permission_message_provider_unittest.cc',
'common/extensions/permissions/media_galleries_permission_unittest.cc',
'common/extensions/permissions/permission_set_unittest.cc',
'common/extensions/permissions/permissions_data_unittest.cc',
'common/extensions/permissions/settings_override_permission_unittest.cc',
'common/extensions/sync_type_unittest.cc',
'common/extensions/update_manifest_unittest.cc',
'common/extensions/value_counter_unittest.cc',
'renderer/extensions/extension_localization_peer_unittest.cc',
'renderer/extensions/renderer_permissions_policy_delegate_unittest.cc',
'renderer/media/cast_ipc_dispatcher_unittest.cc',
'utility/image_writer/image_writer_unittest.cc',
'utility/media_galleries/image_metadata_extractor_unittest.cc',
],
'chrome_unit_tests_extensions_non_chromeos_sources': [
'browser/extensions/api/messaging/native_message_process_host_unittest.cc',
'browser/extensions/api/messaging/native_messaging_host_manifest_unittest.cc',
'browser/extensions/api/messaging/native_messaging_policy_handler_unittest.cc',
],
'chrome_unit_tests_ash_sources': [
'browser/media/desktop_media_list_ash_unittest.cc',
'browser/signin/signin_error_notifier_ash_unittest.cc',
'browser/sync/sync_error_notifier_ash_unittest.cc',
# Ash implies the app list is enabled (only disabled on mobile).
'browser/ui/app_list/search/app_search_provider_unittest.cc',
'browser/ui/app_list/search/history_unittest.cc',
'browser/ui/ash/accessibility/ax_tree_source_ash_unittest.cc',
'browser/ui/ash/chrome_screenshot_grabber_unittest.cc',
'browser/ui/ash/ime_controller_chromeos_unittest.cc',
'browser/ui/ash/launcher/chrome_launcher_controller_unittest.cc',
'browser/ui/ash/launcher/launcher_context_menu_unittest.cc',
'browser/ui/ash/multi_user/multi_user_context_menu_chromeos_unittest.cc',
'browser/ui/ash/multi_user/multi_user_notification_blocker_chromeos_unittest.cc',
'browser/ui/ash/multi_user/multi_user_window_manager_chromeos_unittest.cc',
'browser/ui/ash/session_state_delegate_chromeos_unittest.cc',
'browser/ui/ash/window_positioner_unittest.cc',
'browser/ui/window_sizer/window_sizer_ash_unittest.cc',
],
'chrome_unit_tests_task_manager_sources': [
'browser/task_manager/task_manager_unittest.cc',
'browser/task_manager/task_manager_util_unittest.cc',
],
'chrome_unit_tests_mdns_sources': [
'browser/local_discovery/privet_device_lister_unittest.cc',
'browser/local_discovery/privet_local_printer_lister_unittest.cc',
'browser/local_discovery/privet_notifications_unittest.cc',
'common/local_discovery/local_domain_resolver_unittest.cc',
'common/local_discovery/service_discovery_client_unittest.cc',
],
'chrome_unit_tests_service_discovery_sources': [
'browser/local_discovery/cloud_device_list_unittest.cc',
'browser/local_discovery/cloud_print_printer_list_unittest.cc',
'browser/local_discovery/gcd_api_flow_unittest.cc',
'browser/local_discovery/gcd_registration_ticket_request_unittest.cc',
'browser/local_discovery/privet_confirm_api_flow_unittest.cc',
'browser/local_discovery/privet_http_unittest.cc',
'browser/local_discovery/privet_url_fetcher_unittest.cc',
'browser/local_discovery/privetv3_session_unittest.cc',
'browser/local_discovery/service_discovery_client_mac_unittest.mm',
],
'chrome_unit_tests_configuration_policy_sources': [
'browser/net/proxy_policy_handler_unittest.cc',
'browser/policy/cloud/cloud_policy_invalidator_unittest.cc',
'browser/policy/cloud/user_policy_signin_service_unittest.cc',
'browser/policy/file_selection_dialogs_policy_handler_unittest.cc',
'browser/policy/javascript_policy_handler_unittest.cc',
'browser/policy/managed_bookmarks_policy_handler_unittest.cc',
'browser/policy/policy_path_parser_unittest.cc',
'browser/policy/profile_policy_connector_unittest.cc',
'browser/policy/url_blacklist_manager_unittest.cc',
'browser/prefs/proxy_policy_unittest.cc',
'browser/profiles/incognito_mode_policy_handler_unittest.cc',
'browser/sessions/restore_on_startup_policy_handler_unittest.cc',
'browser/sync/sync_policy_handler_unittest.cc',
],
# This is extensions && policy sources.
'chrome_unit_tests_extensions_policy_sources': [
'browser/extensions/api/storage/policy_value_store_unittest.cc',
'browser/extensions/policy_handlers_unittest.cc',
],
'chrome_unit_tests_speech_sources': [
'browser/speech/extension_api/extension_manifests_tts_unittest.cc',
'browser/speech/tts_controller_unittest.cc',
],
'chrome_unit_tests_notifications_sources': [
'browser/notifications/desktop_notification_profile_util_unittest.cc',
'browser/notifications/message_center_notifications_unittest.cc',
'browser/notifications/message_center_settings_controller_unittest.cc',
'browser/notifications/platform_notification_service_unittest.cc',
],
'chrome_unit_tests_full_safe_browsing_sources': [
'browser/safe_browsing/binary_feature_extractor_unittest.cc',
'browser/safe_browsing/binary_feature_extractor_win_unittest.cc',
'browser/safe_browsing/browser_feature_extractor_unittest.cc',
'browser/safe_browsing/chunk_range_unittest.cc',
'browser/safe_browsing/client_side_detection_host_unittest.cc',
'browser/safe_browsing/client_side_detection_service_unittest.cc',
'browser/safe_browsing/database_manager_unittest.cc',
'browser/safe_browsing/download_feedback_service_unittest.cc',
'browser/safe_browsing/download_feedback_unittest.cc',
'browser/safe_browsing/download_protection_service_unittest.cc',
'browser/safe_browsing/incident_reporting/binary_integrity_analyzer_win_unittest.cc',
'browser/safe_browsing/incident_reporting/binary_integrity_incident_unittest.cc',
'browser/safe_browsing/incident_reporting/blacklist_load_analyzer_win_unittest.cc',
'browser/safe_browsing/incident_reporting/blacklist_load_incident_unittest.cc',
'browser/safe_browsing/incident_reporting/delayed_callback_runner_unittest.cc',
'browser/safe_browsing/incident_reporting/download_metadata_manager_unittest.cc',
'browser/safe_browsing/incident_reporting/environment_data_collection_win_unittest.cc',
'browser/safe_browsing/incident_reporting/incident_report_uploader_impl_unittest.cc',
'browser/safe_browsing/incident_reporting/incident_reporting_service_unittest.cc',
'browser/safe_browsing/incident_reporting/last_download_finder_unittest.cc',
'browser/safe_browsing/incident_reporting/mock_incident_receiver.cc',
'browser/safe_browsing/incident_reporting/mock_incident_receiver.h',
'browser/safe_browsing/incident_reporting/module_integrity_unittest_util_win.cc',
'browser/safe_browsing/incident_reporting/module_integrity_unittest_util_win.h',
'browser/safe_browsing/incident_reporting/module_integrity_verifier_win_unittest.cc',
'browser/safe_browsing/incident_reporting/off_domain_inclusion_detector_unittest.cc',
'browser/safe_browsing/incident_reporting/preference_validation_delegate_unittest.cc',
'browser/safe_browsing/incident_reporting/script_request_detector_unittest.cc',
'browser/safe_browsing/incident_reporting/tracked_preference_incident_unittest.cc',
'browser/safe_browsing/incident_reporting/variations_seed_signature_incident_unittest.cc',
'browser/safe_browsing/local_two_phase_testserver.cc',
'browser/safe_browsing/malware_details_unittest.cc',
'browser/safe_browsing/path_sanitizer_unittest.cc',
'browser/safe_browsing/pe_image_reader_win_unittest.cc',
'browser/safe_browsing/ping_manager_unittest.cc',
'browser/safe_browsing/prefix_set_unittest.cc',
'browser/safe_browsing/protocol_manager_unittest.cc',
'browser/safe_browsing/protocol_parser_unittest.cc',
'browser/safe_browsing/safe_browsing_blocking_page_unittest.cc',
'browser/safe_browsing/safe_browsing_database_unittest.cc',
'browser/safe_browsing/safe_browsing_store_file_unittest.cc',
'browser/safe_browsing/safe_browsing_store_unittest.cc',
'browser/safe_browsing/safe_browsing_util_unittest.cc',
'browser/safe_browsing/two_phase_uploader_unittest.cc',
'renderer/safe_browsing/features_unittest.cc',
'renderer/safe_browsing/murmurhash3_util_unittest.cc',
'renderer/safe_browsing/phishing_term_feature_extractor_unittest.cc',
'renderer/safe_browsing/phishing_url_feature_extractor_unittest.cc',
'renderer/safe_browsing/scorer_unittest.cc',
],
# These are the enable_autofill_dialog = 1 sources. Some autofill tests
# are always compiled.
'chrome_unit_tests_autofill_dialog_sources': [
'browser/ui/autofill/account_chooser_model_unittest.cc',
'browser/ui/autofill/autofill_dialog_controller_unittest.cc',
'browser/ui/autofill/autofill_dialog_i18n_input_unittest.cc',
'browser/ui/autofill/country_combobox_model_unittest.cc',
'browser/ui/autofill/data_model_wrapper_unittest.cc',
'browser/ui/autofill/mock_address_validator.cc',
'browser/ui/autofill/mock_address_validator.h',
'browser/ui/autofill/mock_autofill_dialog_view_delegate.cc',
'browser/ui/autofill/mock_autofill_dialog_view_delegate.h',
],
'chrome_unit_tests_plugins_sources': [
'browser/component_updater/test/component_installers_unittest.cc',
'browser/metrics/plugin_metrics_provider_unittest.cc',
'browser/plugins/plugin_finder_unittest.cc',
'browser/plugins/plugin_info_message_filter_unittest.cc',
'browser/plugins/plugin_installer_unittest.cc',
'browser/plugins/plugin_metadata_unittest.cc',
'browser/plugins/plugin_prefs_unittest.cc',
'common/pepper_permission_util_unittest.cc',
],
'chrome_unit_tests_print_preview_sources': [
'browser/printing/cloud_print/cloud_print_proxy_service_unittest.cc',
'browser/printing/print_dialog_cloud_unittest.cc',
'browser/printing/print_job_unittest.cc',
'browser/printing/print_preview_dialog_controller_unittest.cc',
'browser/printing/print_preview_test.cc',
'browser/printing/print_preview_test.h',
'browser/ui/webui/print_preview/print_preview_ui_unittest.cc',
'common/service_process_util_unittest.cc',
'service/cloud_print/cloud_print_service_helpers_unittest.cc',
'service/cloud_print/cloud_print_token_store_unittest.cc',
'service/cloud_print/cloud_print_url_fetcher_unittest.cc',
'service/cloud_print/connector_settings_unittest.cc',
'service/cloud_print/printer_job_handler_unittest.cc',
'service/cloud_print/printer_job_queue_handler_unittest.cc',
'service/service_process_prefs_unittest.cc',
'service/service_process_unittest.cc',
],
'chrome_unit_tests_captive_portal_sources': [
'browser/captive_portal/captive_portal_service_unittest.cc',
'browser/captive_portal/captive_portal_tab_helper_unittest.cc',
'browser/captive_portal/captive_portal_tab_reloader_unittest.cc',
],
'chrome_unit_tests_session_service_sources': [
'browser/sessions/session_service_unittest.cc',
'browser/ui/startup/session_crashed_infobar_delegate_unittest.cc',
],
'chrome_unit_tests_webrtc_sources': [
'browser/media/webrtc_log_uploader_unittest.cc',
'browser/media/webrtc_log_util_unittest.cc',
'browser/media/webrtc_rtp_dump_handler_unittest.cc',
'browser/media/webrtc_rtp_dump_writer_unittest.cc',
'renderer/media/chrome_webrtc_log_message_delegate_unittest.cc',
],
'chrome_unit_tests_chromeos_sources': [
'browser/chromeos/accessibility/magnification_manager_unittest.cc',
'browser/chromeos/attestation/attestation_ca_client_unittest.cc',
'browser/chromeos/attestation/attestation_policy_observer_unittest.cc',
'browser/chromeos/attestation/fake_certificate.cc',
'browser/chromeos/attestation/fake_certificate.h',
'browser/chromeos/attestation/platform_verification_flow_unittest.cc',
'browser/chromeos/customization/customization_document_unittest.cc',
'browser/chromeos/dbus/printer_service_provider_unittest.cc',
'browser/chromeos/display/display_preferences_unittest.cc',
'browser/chromeos/drive/change_list_loader_unittest.cc',
'browser/chromeos/drive/change_list_processor_unittest.cc',
'browser/chromeos/drive/directory_loader_unittest.cc',
'browser/chromeos/drive/download_handler_unittest.cc',
'browser/chromeos/drive/drive_file_stream_reader_unittest.cc',
'browser/chromeos/drive/drive_integration_service_unittest.cc',
'browser/chromeos/drive/dummy_file_system.cc',
'browser/chromeos/drive/dummy_file_system.h',
'browser/chromeos/drive/fake_file_system.cc',
'browser/chromeos/drive/fake_file_system.h',
'browser/chromeos/drive/fake_file_system_unittest.cc',
'browser/chromeos/drive/fake_free_disk_space_getter.cc',
'browser/chromeos/drive/fake_free_disk_space_getter.h',
'browser/chromeos/drive/file_cache_unittest.cc',
'browser/chromeos/drive/file_change_unittest.cc',
'browser/chromeos/drive/file_system/copy_operation_unittest.cc',
'browser/chromeos/drive/file_system/create_directory_operation_unittest.cc',
'browser/chromeos/drive/file_system/create_file_operation_unittest.cc',
'browser/chromeos/drive/file_system/download_operation_unittest.cc',
'browser/chromeos/drive/file_system/get_file_for_saving_operation_unittest.cc',
'browser/chromeos/drive/file_system/move_operation_unittest.cc',
'browser/chromeos/drive/file_system/open_file_operation_unittest.cc',
'browser/chromeos/drive/file_system/operation_test_base.cc',
'browser/chromeos/drive/file_system/operation_test_base.h',
'browser/chromeos/drive/file_system/remove_operation_unittest.cc',
'browser/chromeos/drive/file_system/search_operation_unittest.cc',
'browser/chromeos/drive/file_system/touch_operation_unittest.cc',
'browser/chromeos/drive/file_system/truncate_operation_unittest.cc',
'browser/chromeos/drive/file_system_unittest.cc',
'browser/chromeos/drive/file_system_util_unittest.cc',
'browser/chromeos/drive/file_task_executor_unittest.cc',
'browser/chromeos/drive/file_write_watcher_unittest.cc',
'browser/chromeos/drive/fileapi/fileapi_worker_unittest.cc',
'browser/chromeos/drive/fileapi/webkit_file_stream_reader_impl_unittest.cc',
'browser/chromeos/drive/job_queue_unittest.cc',
'browser/chromeos/drive/job_scheduler_unittest.cc',
'browser/chromeos/drive/local_file_reader_unittest.cc',
'browser/chromeos/drive/remove_stale_cache_files_unittest.cc',
'browser/chromeos/drive/resource_entry_conversion_unittest.cc',
'browser/chromeos/drive/resource_metadata_storage_unittest.cc',
'browser/chromeos/drive/resource_metadata_unittest.cc',
'browser/chromeos/drive/search_metadata_unittest.cc',
'browser/chromeos/drive/sync/entry_revert_performer_unittest.cc',
'browser/chromeos/drive/sync/entry_update_performer_unittest.cc',
'browser/chromeos/drive/sync/remove_performer_unittest.cc',
'browser/chromeos/drive/sync_client_unittest.cc',
'browser/chromeos/drive/test_util.cc',
'browser/chromeos/drive/test_util.h',
'browser/chromeos/drive/write_on_cache_file_unittest.cc',
'browser/chromeos/events/event_rewriter_unittest.cc',
'browser/chromeos/events/keyboard_driven_event_rewriter_unittest.cc',
'browser/chromeos/extensions/default_app_order_unittest.cc',
'browser/chromeos/extensions/device_local_account_external_policy_loader_unittest.cc',
'browser/chromeos/extensions/device_local_account_management_policy_provider_unittest.cc',
'browser/chromeos/extensions/external_cache_unittest.cc',
'browser/chromeos/extensions/file_manager/device_event_router_unittest.cc',
'browser/chromeos/extensions/wallpaper_private_api_unittest.cc',
'browser/chromeos/external_metrics_unittest.cc',
'browser/chromeos/file_manager/file_tasks_unittest.cc',
'browser/chromeos/file_manager/file_watcher_unittest.cc',
'browser/chromeos/file_manager/fileapi_util_unittest.cc',
'browser/chromeos/file_manager/path_util_unittest.cc',
'browser/chromeos/file_manager/url_util_unittest.cc',
'browser/chromeos/file_manager/volume_manager_unittest.cc',
'browser/chromeos/file_system_provider/fake_provided_file_system.cc',
'browser/chromeos/file_system_provider/fake_provided_file_system.h',
'browser/chromeos/file_system_provider/fileapi/buffering_file_stream_reader_unittest.cc',
'browser/chromeos/file_system_provider/fileapi/buffering_file_stream_writer_unittest.cc',
'browser/chromeos/file_system_provider/fileapi/file_stream_reader_unittest.cc',
'browser/chromeos/file_system_provider/fileapi/file_stream_writer_unittest.cc',
'browser/chromeos/file_system_provider/fileapi/provider_async_file_util_unittest.cc',
'browser/chromeos/file_system_provider/mount_path_util_unittest.cc',
'browser/chromeos/file_system_provider/operations/abort_unittest.cc',
'browser/chromeos/file_system_provider/operations/add_watcher_unittest.cc',
'browser/chromeos/file_system_provider/operations/close_file_unittest.cc',
'browser/chromeos/file_system_provider/operations/copy_entry_unittest.cc',
'browser/chromeos/file_system_provider/operations/create_directory_unittest.cc',
'browser/chromeos/file_system_provider/operations/create_file_unittest.cc',
'browser/chromeos/file_system_provider/operations/delete_entry_unittest.cc',
'browser/chromeos/file_system_provider/operations/get_metadata_unittest.cc',
'browser/chromeos/file_system_provider/operations/move_entry_unittest.cc',
'browser/chromeos/file_system_provider/operations/open_file_unittest.cc',
'browser/chromeos/file_system_provider/operations/read_directory_unittest.cc',
'browser/chromeos/file_system_provider/operations/read_file_unittest.cc',
'browser/chromeos/file_system_provider/operations/remove_watcher_unittest.cc',
'browser/chromeos/file_system_provider/operations/test_util.cc',
'browser/chromeos/file_system_provider/operations/test_util.h',
'browser/chromeos/file_system_provider/operations/truncate_unittest.cc',
'browser/chromeos/file_system_provider/operations/unmount_unittest.cc',
'browser/chromeos/file_system_provider/operations/write_file_unittest.cc',
'browser/chromeos/file_system_provider/provided_file_system_unittest.cc',
'browser/chromeos/file_system_provider/queue_unittest.cc',
'browser/chromeos/file_system_provider/registry_unittest.cc',
'browser/chromeos/file_system_provider/request_manager_unittest.cc',
'browser/chromeos/file_system_provider/service_unittest.cc',
'browser/chromeos/file_system_provider/throttled_file_system_unittest.cc',
'browser/chromeos/fileapi/external_file_url_request_job_unittest.cc',
'browser/chromeos/fileapi/external_file_url_util_unittest.cc',
'browser/chromeos/fileapi/file_access_permissions_unittest.cc',
'browser/chromeos/fileapi/file_system_backend_unittest.cc',
'browser/chromeos/input_method/browser_state_monitor_unittest.cc',
'browser/chromeos/input_method/input_method_configuration_unittest.cc',
'browser/chromeos/input_method/input_method_engine_unittest.cc',
'browser/chromeos/input_method/input_method_manager_impl_unittest.cc',
'browser/chromeos/input_method/input_method_persistence_unittest.cc',
'browser/chromeos/input_method/input_method_util_unittest.cc',
'browser/chromeos/locale_change_guard_unittest.cc',
'browser/chromeos/login/auth/cryptohome_authenticator_unittest.cc',
'browser/chromeos/login/auth/online_attempt_unittest.cc',
'browser/chromeos/login/easy_unlock/easy_unlock_tpm_key_manager_unittest.cc',
'browser/chromeos/login/error_screens_histogram_helper_unittest.cc',
'browser/chromeos/login/existing_user_controller_auto_login_unittest.cc',
'browser/chromeos/login/hwid_checker_unittest.cc',
'browser/chromeos/login/profile_auth_data_unittest.cc',
'browser/chromeos/login/saml/saml_offline_signin_limiter_unittest.cc',
'browser/chromeos/login/signin/merge_session_load_page_unittest.cc',
'browser/chromeos/login/supervised/supervised_user_authentication_unittest.cc',
'browser/chromeos/login/users/multi_profile_user_controller_unittest.cc',
'browser/chromeos/login/users/user_manager_unittest.cc',
'browser/chromeos/login/users/wallpaper/wallpaper_manager_unittest.cc',
'browser/chromeos/memory/oom_priority_manager_unittest.cc',
'browser/chromeos/mobile/mobile_activator_unittest.cc',
'browser/chromeos/mobile_config_unittest.cc',
'browser/chromeos/net/cert_verify_proc_chromeos_unittest.cc',
'browser/chromeos/net/network_portal_detector_impl_unittest.cc',
'browser/chromeos/net/network_portal_notification_controller_unittest.cc',
'browser/chromeos/net/onc_utils_unittest.cc',
'browser/chromeos/offline/offline_load_page_unittest.cc',
'browser/chromeos/options/network_property_ui_data_unittest.cc',
'browser/chromeos/ownership/fake_owner_settings_service.cc',
'browser/chromeos/ownership/fake_owner_settings_service.h',
'browser/chromeos/ownership/owner_settings_service_chromeos_unittest.cc',
'browser/chromeos/policy/affiliated_cloud_policy_invalidator_unittest.cc',
'browser/chromeos/policy/affiliated_invalidation_service_provider_impl_unittest.cc',
'browser/chromeos/policy/auto_enrollment_client_unittest.cc',
'browser/chromeos/policy/cloud_external_data_manager_base_unittest.cc',
'browser/chromeos/policy/cloud_external_data_policy_observer_unittest.cc',
'browser/chromeos/policy/cloud_external_data_store_unittest.cc',
'browser/chromeos/policy/configuration_policy_handler_chromeos_unittest.cc',
'browser/chromeos/policy/consumer_enrollment_handler_factory_unittest.cc',
'browser/chromeos/policy/consumer_enrollment_handler_unittest.cc',
'browser/chromeos/policy/consumer_management_notifier_factory_unittest.cc',
'browser/chromeos/policy/consumer_management_notifier_unittest.cc',
'browser/chromeos/policy/consumer_management_service_unittest.cc',
'browser/chromeos/policy/consumer_unenrollment_handler_unittest.cc',
'browser/chromeos/policy/device_cloud_policy_initializer_unittest.cc',
'browser/chromeos/policy/device_cloud_policy_manager_chromeos_unittest.cc',
'browser/chromeos/policy/device_cloud_policy_store_chromeos_unittest.cc',
'browser/chromeos/policy/device_local_account_policy_service_unittest.cc',
'browser/chromeos/policy/enterprise_install_attributes_unittest.cc',
'browser/chromeos/policy/fake_affiliated_invalidation_service_provider.cc',
'browser/chromeos/policy/fake_affiliated_invalidation_service_provider.h',
'browser/chromeos/policy/network_configuration_updater_unittest.cc',
'browser/chromeos/policy/recommendation_restorer_unittest.cc',
'browser/chromeos/policy/server_backed_state_keys_broker_unittest.cc',
'browser/chromeos/policy/status_uploader_unittest.cc',
'browser/chromeos/policy/user_cloud_policy_manager_chromeos_unittest.cc',
'browser/chromeos/policy/user_cloud_policy_store_chromeos_unittest.cc',
'browser/chromeos/power/extension_event_observer_unittest.cc',
'browser/chromeos/power/power_data_collector_unittest.cc',
'browser/chromeos/power/power_prefs_unittest.cc',
'browser/chromeos/power/renderer_freezer_unittest.cc',
'browser/chromeos/preferences_unittest.cc',
'browser/chromeos/profiles/profile_list_chromeos_unittest.cc',
'browser/chromeos/proxy_config_service_impl_unittest.cc',
'browser/chromeos/session_length_limiter_unittest.cc',
'browser/chromeos/settings/cros_settings_unittest.cc',
'browser/chromeos/settings/device_oauth2_token_service_unittest.cc',
'browser/chromeos/settings/device_settings_provider_unittest.cc',
'browser/chromeos/settings/device_settings_service_unittest.cc',
'browser/chromeos/settings/session_manager_operation_unittest.cc',
'browser/chromeos/settings/shutdown_policy_handler_unittest.cc',
'browser/chromeos/settings/stub_cros_settings_provider_unittest.cc',
'browser/chromeos/system/automatic_reboot_manager_unittest.cc',
'browser/chromeos/system/device_disabling_manager_unittest.cc',
'browser/chromeos/ui/accessibility_focus_ring_controller_unittest.cc',
'browser/chromeos/ui/idle_app_name_notification_view_unittest.cc',
'browser/extensions/api/enterprise_platform_keys_private/enterprise_platform_keys_private_api_unittest.cc',
'browser/extensions/api/log_private/syslog_parser_unittest.cc',
'browser/extensions/updater/local_extension_cache_unittest.cc',
'browser/metrics/chromeos_metrics_provider_unittest.cc',
'browser/notifications/login_state_notification_blocker_chromeos_unittest.cc',
'browser/resources/chromeos/braille_ime/braille_ime.js',
'browser/resources/chromeos/braille_ime/braille_ime_unittest.gtestjs',
# TODO(zturner): Enable this on Windows. See
# BrowserWithTestWindowTest::SetUp() for a comment explaining why this is
# broken.
'browser/ui/views/frame/immersive_mode_controller_ash_unittest.cc',
'browser/ui/views/frame/web_app_left_header_view_ash_unittest.cc',
'browser/ui/views/select_file_dialog_extension_unittest.cc',
'browser/ui/webui/chromeos/login/l10n_util_test_util.cc',
'browser/ui/webui/chromeos/login/l10n_util_test_util.h',
'browser/ui/webui/chromeos/login/l10n_util_unittest.cc',
'browser/ui/webui/chromeos/login/signin_userlist_unittest.cc',
'browser/ui/webui/options/chromeos/cros_language_options_handler_unittest.cc',
],
'chrome_unit_tests_desktop_linux_sources': [
'browser/password_manager/native_backend_kwallet_x_unittest.cc',
'browser/profiles/profile_list_desktop_unittest.cc',
'browser/shell_integration_linux_unittest.cc',
],
'chrome_unit_tests_views_sources': [
'browser/ui/views/accelerator_table_unittest.cc',
# Views implies app list is enabled (only disabled on mobile).
'browser/ui/views/app_list/linux/app_list_linux_unittest.cc',
'browser/ui/views/app_list/win/app_list_win_unittest.cc',
'browser/ui/views/apps/app_info_dialog/app_info_dialog_views_unittest.cc',
'browser/ui/views/apps/app_info_dialog/app_info_permissions_panel_unittest.cc',
'browser/ui/views/apps/shaped_app_window_targeter_unittest.cc',
'browser/ui/views/autofill/autofill_dialog_views_unittest.cc',
'browser/ui/views/bookmarks/bookmark_bar_view_unittest.cc',
'browser/ui/views/bookmarks/bookmark_bubble_view_unittest.cc',
'browser/ui/views/bookmarks/bookmark_context_menu_unittest.cc',
'browser/ui/views/bookmarks/bookmark_editor_view_unittest.cc',
'browser/ui/views/bookmarks/bookmark_menu_delegate_unittest.cc',
'browser/ui/views/bookmarks/bookmark_sync_promo_view_unittest.cc',
'browser/ui/views/confirm_bubble_views_unittest.cc',
'browser/ui/views/crypto_module_password_dialog_view_unittest.cc',
'browser/ui/views/desktop_media_picker_views_unittest.cc',
'browser/ui/views/extensions/media_galleries_dialog_views_unittest.cc',
'browser/ui/views/first_run_bubble_unittest.cc',
'browser/ui/views/frame/browser_view_layout_unittest.cc',
'browser/ui/views/frame/browser_view_unittest.cc',
'browser/ui/views/frame/opaque_browser_frame_view_layout_unittest.cc',
'browser/ui/views/frame/test_with_browser_view.cc',
'browser/ui/views/frame/test_with_browser_view.h',
'browser/ui/views/frame/web_contents_close_handler_unittest.cc',
'browser/ui/views/status_icons/status_tray_win_unittest.cc',
'browser/ui/views/tab_contents/chrome_web_contents_view_delegate_views_unittest.cc',
'browser/ui/views/tabs/fake_base_tab_strip_controller.cc',
'browser/ui/views/tabs/fake_base_tab_strip_controller.h',
'browser/ui/views/tabs/stacked_tab_strip_layout_unittest.cc',
'browser/ui/views/tabs/tab_strip_unittest.cc',
'browser/ui/views/tabs/tab_unittest.cc',
'browser/ui/views/toolbar/reload_button_unittest.cc',
'browser/ui/views/toolbar/test_toolbar_actions_bar_helper_views.cc',
'browser/ui/views/toolbar/toolbar_action_view_unittest.cc',
'browser/ui/views/translate/translate_bubble_view_unittest.cc',
'browser/ui/views/validation_message_bubble_delegate_unittest.cc',
],
'chrome_unit_tests_win_sources': [
'app/chrome_dll.rc',
'browser/search_engines/template_url_scraper_unittest.cc',
'test/data/resource.rc',
],
'chrome_unit_tests_mac_sources': [
'browser/media_galleries/fileapi/iphoto_file_util_unittest.cc',
'utility/media_galleries/iphoto_library_parser_unittest.cc',
],
'chrome_unit_tests_win_mac_sources': [
'browser/media_galleries/fileapi/itunes_file_util_unittest.cc',
'browser/media_galleries/fileapi/picasa_file_util_unittest.cc',
'utility/media_galleries/itunes_library_parser_unittest.cc',
'utility/media_galleries/picasa_album_table_reader_unittest.cc',
'utility/media_galleries/picasa_albums_indexer_unittest.cc',
'utility/media_galleries/pmp_column_reader_unittest.cc',
],
'chrome_unit_tests_themes_sources': [
'browser/themes/browser_theme_pack_unittest.cc',
'browser/themes/theme_properties_unittest.cc',
'browser/themes/theme_service_unittest.cc',
'browser/themes/theme_syncable_service_unittest.cc',
'browser/ui/webui/theme_source_unittest.cc',
],
'chrome_unit_tests_supervised_user_sources': [
'browser/supervised_user/child_accounts/family_info_fetcher_unittest.cc',
'browser/supervised_user/child_accounts/permission_request_creator_apiary_unittest.cc',
'browser/supervised_user/experimental/supervised_user_async_url_checker_unittest.cc',
'browser/supervised_user/legacy/supervised_user_pref_mapping_service_unittest.cc',
'browser/supervised_user/legacy/supervised_user_refresh_token_fetcher_unittest.cc',
'browser/supervised_user/legacy/supervised_user_registration_utility_unittest.cc',
'browser/supervised_user/legacy/supervised_user_shared_settings_service_unittest.cc',
'browser/supervised_user/legacy/supervised_user_shared_settings_update_unittest.cc',
'browser/supervised_user/legacy/supervised_user_sync_service_unittest.cc',
'browser/supervised_user/supervised_user_bookmarks_handler_unittest.cc',
'browser/supervised_user/supervised_user_pref_store_unittest.cc',
'browser/supervised_user/supervised_user_service_unittest.cc',
'browser/supervised_user/supervised_user_settings_service_unittest.cc',
'browser/supervised_user/supervised_user_url_filter_unittest.cc',
'browser/supervised_user/supervised_user_whitelist_service_unittest.cc',
],
# Everything but Android and iOS (iOS is handled separately).
'chrome_unit_tests_non_android_sources': [
# Bookmark export/import are handled via the BookmarkColumns
# ContentProvider.
'browser/bookmarks/bookmark_html_writer_unittest.cc',
'browser/browser_commands_unittest.cc',
'browser/diagnostics/diagnostics_controller_unittest.cc',
'browser/diagnostics/diagnostics_model_unittest.cc',
'browser/download/download_shelf_unittest.cc',
'browser/first_run/first_run_unittest.cc',
'browser/font_family_cache_unittest.cc',
'browser/importer/firefox_profile_lock_unittest.cc',
'browser/importer/profile_writer_unittest.cc',
# Android uses a different invaliator.
'browser/invalidation/gcm_invalidation_bridge_unittest.cc',
'browser/invalidation/ticl_profile_settings_provider_unittest.cc',
'browser/media_galleries/fileapi/native_media_file_util_unittest.cc',
'browser/media_galleries/gallery_watch_manager_unittest.cc',
'browser/media_galleries/linux/mtp_device_object_enumerator_unittest.cc',
'browser/media_galleries/mac/mtp_device_delegate_impl_mac_unittest.mm',
'browser/media_galleries/media_file_system_registry_unittest.cc',
'browser/media_galleries/media_folder_finder_unittest.cc',
'browser/media_galleries/media_galleries_dialog_controller_mock.cc',
'browser/media_galleries/media_galleries_dialog_controller_mock.h',
'browser/media_galleries/media_galleries_dialog_controller_test_util.cc',
'browser/media_galleries/media_galleries_dialog_controller_test_util.h',
'browser/media_galleries/media_galleries_permission_controller_unittest.cc',
'browser/media_galleries/media_galleries_permissions_unittest.cc',
'browser/media_galleries/media_galleries_preferences_unittest.cc',
'browser/media_galleries/media_galleries_scan_result_controller_unittest.cc',
'browser/media_galleries/media_scan_manager_unittest.cc',
'browser/media_galleries/win/mtp_device_delegate_impl_win_unittest.cc',
'browser/media_galleries/win/mtp_device_object_enumerator_unittest.cc',
'browser/net/firefox_proxy_settings_unittest.cc',
'browser/platform_util_unittest.cc',
'browser/power/process_power_collector_unittest.cc',
'browser/process_singleton_posix_unittest.cc',
'browser/profile_resetter/profile_resetter_unittest.cc',
'browser/renderer_context_menu/render_view_context_menu_unittest.cc',
'browser/search/instant_service_unittest.cc',
'browser/search/search_unittest.cc',
# GCMDriverAndroid is not yet implemented.
'browser/services/gcm/gcm_profile_service_unittest.cc',
'browser/sessions/persistent_tab_restore_service_unittest.cc',
'browser/sync/sessions/sessions_sync_manager_unittest.cc',
'browser/sync/sync_ui_util_unittest.cc', # Sync setup uses native ui.
'browser/translate/translate_manager_render_view_host_unittest.cc',
# The autofill popup is implemented in mostly native code on Android.
'browser/ui/autofill/autofill_popup_controller_unittest.cc',
'browser/ui/autofill/generated_credit_card_bubble_controller_unittest.cc',
'browser/ui/autofill/mock_new_credit_card_bubble_controller.cc',
'browser/ui/autofill/mock_new_credit_card_bubble_controller.h',
'browser/ui/autofill/test_generated_credit_card_bubble_controller.cc',
'browser/ui/autofill/test_generated_credit_card_bubble_controller.h',
'browser/ui/autofill/test_generated_credit_card_bubble_view.cc',
'browser/ui/autofill/test_generated_credit_card_bubble_view.h',
'browser/ui/bookmarks/bookmark_context_menu_controller_unittest.cc',
'browser/ui/bookmarks/bookmark_unittest.cc',
'browser/ui/browser_command_controller_unittest.cc',
'browser/ui/browser_instant_controller_unittest.cc',
'browser/ui/browser_iterator_unittest.cc',
'browser/ui/cocoa/autofill/new_credit_card_bubble_cocoa_unittest.mm',
'browser/ui/content_settings/content_setting_bubble_model_unittest.cc',
'browser/ui/content_settings/content_setting_image_model_unittest.cc',
'browser/ui/elide_url_unittest.cc', # URL elider not used on Android.
'browser/ui/exclusive_access/fullscreen_controller_state_unittest.cc',
'browser/ui/omnibox/omnibox_controller_unittest.cc',
'browser/ui/omnibox/omnibox_edit_unittest.cc',
'browser/ui/omnibox/omnibox_popup_model_unittest.cc',
'browser/ui/omnibox/omnibox_view_unittest.cc',
'browser/ui/panels/panel_mouse_watcher_unittest.cc',
'browser/ui/search/instant_page_unittest.cc',
'browser/ui/search/instant_search_prerenderer_unittest.cc',
'browser/ui/search/search_delegate_unittest.cc',
'browser/ui/search/search_ipc_router_policy_unittest.cc',
'browser/ui/search/search_ipc_router_unittest.cc',
'browser/ui/search/search_model_unittest.cc',
'browser/ui/search/search_tab_helper_unittest.cc',
'browser/ui/tab_contents/tab_contents_iterator_unittest.cc',
'browser/ui/tabs/pinned_tab_codec_unittest.cc',
'browser/ui/tabs/pinned_tab_service_unittest.cc',
'browser/ui/tabs/pinned_tab_test_utils.cc',
'browser/ui/tabs/tab_menu_model_unittest.cc',
'browser/ui/tabs/tab_strip_model_unittest.cc',
'browser/ui/tabs/test_tab_strip_model_delegate.cc',
'browser/ui/tabs/test_tab_strip_model_delegate.h',
'browser/ui/toolbar/back_forward_menu_model_unittest.cc',
'browser/ui/toolbar/encoding_menu_controller_unittest.cc',
'browser/ui/toolbar/recent_tabs_builder_test_helper.cc',
'browser/ui/toolbar/recent_tabs_builder_test_helper.h',
'browser/ui/toolbar/recent_tabs_sub_menu_model_unittest.cc',
'browser/ui/toolbar/test_toolbar_actions_bar_helper.h',
'browser/ui/toolbar/test_toolbar_model.cc',
'browser/ui/toolbar/test_toolbar_model.h',
'browser/ui/toolbar/toolbar_actions_bar_unittest.cc',
'browser/ui/toolbar/toolbar_model_unittest.cc',
'browser/ui/toolbar/wrench_icon_painter_unittest.cc',
'browser/ui/toolbar/wrench_menu_model_unittest.cc',
'browser/ui/website_settings/permission_menu_model_unittest.cc',
'browser/ui/webui/help/version_updater_chromeos_unittest.cc',
'browser/ui/webui/ntp/ntp_user_data_logger_unittest.cc',
'browser/ui/webui/ntp/suggestions_combiner_unittest.cc',
'browser/ui/webui/options/autofill_options_handler_unittest.cc',
'browser/ui/webui/options/language_options_handler_unittest.cc',
'browser/ui/webui/options/pepper_flash_content_settings_utils_unittest.cc',
'browser/ui/webui/options/sync_setup_handler_unittest.cc',
'browser/ui/webui/signin/login_ui_service_unittest.cc',
'browser/ui/webui/web_dialog_web_contents_delegate_unittest.cc',
'browser/ui/window_sizer/window_sizer_common_unittest.cc',
'browser/ui/window_sizer/window_sizer_unittest.cc',
'browser/ui/zoom/zoom_controller_unittest.cc',
# Bookmark undo is not used on Android.
'browser/undo/bookmark_undo_service_test.cc',
'browser/undo/undo_manager_test.cc',
# The importer code is not used on Android.
'common/importer/firefox_importer_utils_unittest.cc',
# No service process (which also requires multiprocess lock).
'common/multi_process_lock_unittest.cc',
'test/base/browser_with_test_window_test.cc',
'test/base/browser_with_test_window_test.h',
'utility/importer/bookmark_html_reader_unittest.cc',
'utility/importer/bookmarks_file_importer_unittest.cc',
'utility/importer/firefox_importer_unittest.cc',
'utility/importer/firefox_importer_unittest_messages_internal.h',
'utility/importer/firefox_importer_unittest_utils.h',
'utility/importer/firefox_importer_unittest_utils_mac.cc',
'utility/importer/safari_importer_unittest.mm',
],
# Everything but Android, ChromeOS and iOS (iOS is handled separately).
'chrome_unit_tests_non_android_or_chromeos_sources': [
'browser/sync/sync_global_error_unittest.cc',
'browser/upgrade_detector_impl_unittest.cc',
],
'chrome_unit_tests_app_list_sources': [
'browser/ui/app_list/app_list_positioner_unittest.cc',
'browser/ui/app_list/app_list_service_mac_unittest.mm',
'browser/ui/app_list/app_list_service_unittest.cc',
'browser/ui/app_list/app_list_shower_views_unittest.cc',
'browser/ui/app_list/app_list_test_util.cc',
'browser/ui/app_list/app_list_test_util.h',
'browser/ui/app_list/extension_app_model_builder_unittest.cc',
'browser/ui/app_list/model_pref_updater_unittest.cc',
'browser/ui/app_list/profile_loader_unittest.cc',
'browser/ui/app_list/search/omnibox_result_unittest.cc',
'browser/ui/app_list/search/suggestions/suggestions_search_provider_unittest.cc',
'browser/ui/app_list/speech_auth_helper_unittest.cc',
'browser/ui/app_list/test/fake_profile.cc',
'browser/ui/app_list/test/fake_profile.h',
'browser/ui/app_list/test/fake_profile_store.cc',
'browser/ui/app_list/test/fake_profile_store.h',
'browser/ui/app_list/test/fast_show_pickler_unittest.cc',
],
},
'targets': [
{
# This target contains mocks and test utilities that don't belong in
# production libraries but are used by more than one test executable.
#
# GN version: //chrome/test:test_support
'target_name': 'test_support_common',
'type': 'static_library',
'dependencies': [
# NOTE: New dependencies should generally be added in the OS!="ios"
# dependencies block below, rather than here.
'browser',
'chrome_resources.gyp:chrome_resources',
'chrome_resources.gyp:chrome_strings',
'chrome_resources.gyp:theme_resources',
'common',
'../base/base.gyp:base_prefs_test_support',
'../base/base.gyp:test_support_base',
'../components/components.gyp:bookmarks_test_support',
'../components/components.gyp:gcm_driver_test_support',
'../components/components.gyp:history_core_test_support',
'../components/components.gyp:invalidation',
'../components/components.gyp:invalidation_test_support',
'../components/components.gyp:metrics_test_support',
'../components/components.gyp:omnibox_test_support',
'../components/components.gyp:ownership',
'../components/components.gyp:password_manager_core_browser_test_support',
'../components/components.gyp:pref_registry_test_support',
'../components/components.gyp:rappor_test_support',
'../components/components.gyp:search_engines_test_support',
'../components/components.gyp:signin_core_browser_test_support',
'../components/components.gyp:sync_driver_test_support',
'../components/components.gyp:update_client_test_support',
'../components/components.gyp:wallpaper',
'../content/content.gyp:content_app_both',
'../content/content_shell_and_tests.gyp:test_support_content',
'../net/net.gyp:net',
'../net/net.gyp:net_test_support',
'../skia/skia.gyp:skia',
'../sql/sql.gyp:sql',
'../sql/sql.gyp:test_support_sql',
'../sync/sync.gyp:sync',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/zlib/zlib.gyp:zlib',
'../ui/gfx/gfx.gyp:gfx_test_support',
'../ui/message_center/message_center.gyp:message_center_test_support',
],
'export_dependent_settings': [
'../base/base.gyp:test_support_base',
'../content/content_shell_and_tests.gyp:test_support_content',
],
'include_dirs': [
'..',
],
'sources': [
'app/chrome_main_delegate.cc',
'app/chrome_main_delegate.h',
'app/close_handle_hook_win.cc',
'app/close_handle_hook_win.h',
'browser/browsing_data/mock_browsing_data_appcache_helper.cc',
'browser/browsing_data/mock_browsing_data_appcache_helper.h',
'browser/browsing_data/mock_browsing_data_channel_id_helper.cc',
'browser/browsing_data/mock_browsing_data_channel_id_helper.h',
'browser/browsing_data/mock_browsing_data_cookie_helper.cc',
'browser/browsing_data/mock_browsing_data_cookie_helper.h',
'browser/browsing_data/mock_browsing_data_database_helper.cc',
'browser/browsing_data/mock_browsing_data_database_helper.h',
'browser/browsing_data/mock_browsing_data_file_system_helper.cc',
'browser/browsing_data/mock_browsing_data_file_system_helper.h',
'browser/browsing_data/mock_browsing_data_flash_lso_helper.cc',
'browser/browsing_data/mock_browsing_data_flash_lso_helper.h',
'browser/browsing_data/mock_browsing_data_indexed_db_helper.cc',
'browser/browsing_data/mock_browsing_data_indexed_db_helper.h',
'browser/browsing_data/mock_browsing_data_local_storage_helper.cc',
'browser/browsing_data/mock_browsing_data_local_storage_helper.h',
'browser/browsing_data/mock_browsing_data_quota_helper.cc',
'browser/browsing_data/mock_browsing_data_quota_helper.h',
'browser/browsing_data/mock_browsing_data_service_worker_helper.cc',
'browser/browsing_data/mock_browsing_data_service_worker_helper.h',
'browser/download/download_test_file_activity_observer.cc',
'browser/download/download_test_file_activity_observer.h',
'browser/download/test_download_shelf.cc',
'browser/download/test_download_shelf.h',
'browser/invalidation/fake_invalidation_service.cc',
'browser/invalidation/fake_invalidation_service.h',
'browser/media/fake_desktop_media_list.cc',
'browser/media/fake_desktop_media_list.h',
'browser/net/dns_probe_test_util.cc',
'browser/net/dns_probe_test_util.h',
'browser/net/url_request_mock_util.cc',
'browser/net/url_request_mock_util.h',
'browser/notifications/notification_test_util.cc',
'browser/notifications/notification_test_util.h',
'browser/password_manager/mock_password_store_service.cc',
'browser/password_manager/mock_password_store_service.h',
'browser/password_manager/null_password_store_service.cc',
'browser/password_manager/null_password_store_service.h',
'browser/password_manager/test_password_store_service.cc',
'browser/password_manager/test_password_store_service.h',
'browser/prefs/pref_service_mock_factory.cc',
'browser/prefs/pref_service_mock_factory.h',
'browser/profile_resetter/profile_resetter_test_base.cc',
'browser/profile_resetter/profile_resetter_test_base.h',
'browser/search_engines/template_url_service_factory_test_util.cc',
'browser/search_engines/template_url_service_factory_test_util.h',
'browser/search_engines/template_url_service_test_util.cc',
'browser/search_engines/template_url_service_test_util.h',
'browser/sessions/session_restore_test_helper.cc',
'browser/sessions/session_restore_test_helper.h',
'browser/sessions/session_service_test_helper.cc',
'browser/sessions/session_service_test_helper.h',
'browser/signin/fake_account_reconcilor.cc',
'browser/signin/fake_account_reconcilor.h',
'browser/signin/fake_account_tracker_service.cc',
'browser/signin/fake_account_tracker_service.h',
'browser/signin/fake_profile_oauth2_token_service.cc',
'browser/signin/fake_profile_oauth2_token_service.h',
'browser/signin/fake_profile_oauth2_token_service_builder.cc',
'browser/signin/fake_profile_oauth2_token_service_builder.h',
'browser/signin/fake_signin_manager.cc',
'browser/signin/fake_signin_manager.h',
'browser/ssl/ssl_client_auth_requestor_mock.cc',
'browser/ssl/ssl_client_auth_requestor_mock.h',
'browser/sync/profile_sync_components_factory_mock.cc',
'browser/sync/profile_sync_components_factory_mock.h',
'browser/sync/profile_sync_service_mock.cc',
'browser/sync/profile_sync_service_mock.h',
'browser/ui/browser.h',
'browser/ui/cocoa/find_bar/find_bar_host_unittest_util_cocoa.mm',
'browser/ui/cocoa/run_loop_testing.h',
'browser/ui/cocoa/run_loop_testing.mm',
'browser/ui/exclusive_access/fullscreen_controller_state_test.cc',
'browser/ui/exclusive_access/fullscreen_controller_state_test.h',
'browser/ui/exclusive_access/fullscreen_controller_state_tests.h',
'browser/ui/exclusive_access/fullscreen_controller_test.cc',
'browser/ui/exclusive_access/fullscreen_controller_test.h',
'browser/ui/find_bar/find_bar_host_unittest_util.h',
'browser/ui/login/login_prompt_test_utils.cc',
'browser/ui/login/login_prompt_test_utils.h',
'browser/ui/passwords/manage_passwords_ui_controller_mock.cc',
'browser/ui/passwords/manage_passwords_ui_controller_mock.h',
'browser/ui/pdf/pdf_browsertest_base.cc',
'browser/ui/pdf/pdf_browsertest_base.h',
'browser/ui/test/test_confirm_bubble_model.cc',
'browser/ui/test/test_confirm_bubble_model.h',
'browser/ui/toolbar/test_toolbar_action_view_controller.cc',
'browser/ui/toolbar/test_toolbar_action_view_controller.h',
'browser/ui/views/find_bar_host_unittest_util_views.cc',
'browser/ui/website_settings/mock_permission_bubble_request.cc',
'browser/ui/website_settings/mock_permission_bubble_request.h',
'browser/ui/webui/signin/login_ui_test_utils.cc',
'browser/ui/webui/signin/login_ui_test_utils.h',
'renderer/chrome_mock_render_thread.cc',
'renderer/chrome_mock_render_thread.h',
'renderer/safe_browsing/mock_feature_extractor_clock.cc',
'renderer/safe_browsing/mock_feature_extractor_clock.h',
'renderer/safe_browsing/test_utils.cc',
'renderer/safe_browsing/test_utils.h',
'test/base/chrome_process_util.cc',
'test/base/chrome_process_util.h',
'test/base/chrome_process_util_mac.cc',
'test/base/chrome_render_view_host_test_harness.cc',
'test/base/chrome_render_view_host_test_harness.h',
'test/base/chrome_test_launcher.cc',
'test/base/chrome_test_launcher.h',
'test/base/chrome_test_suite.cc',
'test/base/chrome_test_suite.h',
'test/base/chrome_unit_test_suite.cc',
'test/base/chrome_unit_test_suite.h',
'test/base/find_in_page_observer.cc',
'test/base/find_in_page_observer.h',
'test/base/history_index_restore_observer.cc',
'test/base/history_index_restore_observer.h',
'test/base/in_process_browser_test.cc',
'test/base/in_process_browser_test.h',
'test/base/profile_mock.cc',
'test/base/profile_mock.h',
'test/base/scoped_browser_locale.cc',
'test/base/scoped_browser_locale.h',
'test/base/scoped_testing_local_state.cc',
'test/base/scoped_testing_local_state.h',
'test/base/test_browser_window.cc',
'test/base/test_browser_window.h',
'test/base/test_launcher_utils.cc',
'test/base/test_launcher_utils.h',
'test/base/test_switches.cc',
'test/base/test_switches.h',
'test/base/testing_browser_process.cc',
'test/base/testing_browser_process.h',
'test/base/testing_browser_process_platform_part.cc',
'test/base/testing_browser_process_platform_part.h',
'test/base/testing_io_thread_state.cc',
'test/base/testing_io_thread_state.h',
'test/base/testing_pref_service_syncable.cc',
'test/base/testing_pref_service_syncable.h',
'test/base/testing_profile.cc',
'test/base/testing_profile.h',
'test/base/testing_profile_manager.cc',
'test/base/testing_profile_manager.h',
'test/base/tracing.cc',
'test/base/tracing.h',
'test/base/ui_test_utils.cc',
'test/base/ui_test_utils.h',
'test/logging/win/file_logger.cc',
'test/logging/win/file_logger.h',
'test/logging/win/log_file_printer.cc',
'test/logging/win/log_file_printer.h',
'test/logging/win/log_file_reader.cc',
'test/logging/win/log_file_reader.h',
'test/logging/win/mof_data_parser.cc',
'test/logging/win/mof_data_parser.h',
'test/logging/win/test_log_collector.cc',
'test/logging/win/test_log_collector.h',
],
'conditions': [
['OS!="ios"', {
'dependencies': [
'child',
'plugin',
'renderer',
'utility',
'../content/content.gyp:content_gpu',
'../content/content.gyp:content_plugin',
'../content/content.gyp:content_ppapi_plugin',
'../content/content.gyp:content_renderer',
'../content/content.gyp:content_utility',
'../components/components.gyp:autofill_core_test_support',
'../components/components.gyp:captive_portal_test_support',
'../components/components.gyp:rappor_test_support',
'../components/components.gyp:sessions_test_support',
'../components/components.gyp:web_resource_test_support',
'../google_apis/google_apis.gyp:google_apis_test_support',
'../ipc/ipc.gyp:test_support_ipc',
'../media/media.gyp:media_test_support',
'../ppapi/ppapi_internal.gyp:ppapi_shared',
'../sql/sql.gyp:test_support_sql',
'../third_party/leveldatabase/leveldatabase.gyp:leveldatabase',
],
'export_dependent_settings': [
'renderer',
],
}, { # OS=="ios"
'sources/': [
# Exclude everything but iOS-specific files.
['exclude', '\\.(cc|mm)$'],
['include', '_ios\\.(cc|mm)$'],
['include', '(^|/)ios/'],
# TODO(ios): Add files here as they are updated to compile on iOS.
['include', '^test/base/chrome_test_suite\\.cc$'],
['include', '^test/base/chrome_unit_test_suite\\.cc$'],
['include', '^test/base/testing_browser_process'],
],
}],
['chromeos==1', {
'dependencies': [
'../build/linux/system.gyp:dbus',
'../chromeos/chromeos.gyp:chromeos_test_support',
'../components/components.gyp:user_manager_test_support',
],
'sources': [
# Note: sources list duplicated in GN build.
'browser/chromeos/app_mode/fake_cws.cc',
'browser/chromeos/app_mode/fake_cws.h',
'browser/chromeos/file_manager/fake_disk_mount_manager.cc',
'browser/chromeos/file_manager/fake_disk_mount_manager.h',
'browser/chromeos/input_method/mock_candidate_window_controller.cc',
'browser/chromeos/input_method/mock_candidate_window_controller.h',
'browser/chromeos/input_method/mock_input_method_engine.cc',
'browser/chromeos/input_method/mock_input_method_engine.h',
'browser/chromeos/input_method/mock_input_method_manager.cc',
'browser/chromeos/input_method/mock_input_method_manager.h',
'browser/chromeos/login/screens/mock_device_disabled_screen_actor.cc',
'browser/chromeos/login/screens/mock_device_disabled_screen_actor.h',
'browser/chromeos/login/session/user_session_manager_test_api.cc',
'browser/chromeos/login/session/user_session_manager_test_api.h',
'browser/chromeos/login/test/js_checker.cc',
'browser/chromeos/login/test/js_checker.h',
'browser/chromeos/login/test/oobe_screen_waiter.cc',
'browser/chromeos/login/test/oobe_screen_waiter.h',
'browser/chromeos/login/ui/mock_login_display.cc',
'browser/chromeos/login/ui/mock_login_display.h',
'browser/chromeos/login/ui/mock_login_display_host.cc',
'browser/chromeos/login/ui/mock_login_display_host.h',
'browser/chromeos/login/users/avatar/mock_user_image_manager.cc',
'browser/chromeos/login/users/avatar/mock_user_image_manager.h',
'browser/chromeos/login/users/fake_chrome_user_manager.cc',
'browser/chromeos/login/users/fake_chrome_user_manager.h',
'browser/chromeos/login/users/fake_supervised_user_manager.cc',
'browser/chromeos/login/users/fake_supervised_user_manager.h',
'browser/chromeos/login/users/mock_user_manager.cc',
'browser/chromeos/login/users/mock_user_manager.h',
'browser/chromeos/net/network_portal_detector_test_utils.cc',
'browser/chromeos/net/network_portal_detector_test_utils.h',
'browser/chromeos/policy/cloud_external_data_manager_base_test_util.cc',
'browser/chromeos/policy/cloud_external_data_manager_base_test_util.h',
'browser/chromeos/policy/device_policy_builder.cc',
'browser/chromeos/policy/device_policy_builder.h',
'browser/chromeos/policy/fake_consumer_management_service.cc',
'browser/chromeos/policy/fake_consumer_management_service.h',
'browser/chromeos/policy/fake_device_cloud_policy_initializer.cc',
'browser/chromeos/policy/fake_device_cloud_policy_initializer.h',
'browser/chromeos/policy/fake_device_cloud_policy_manager.cc',
'browser/chromeos/policy/fake_device_cloud_policy_manager.h',
'browser/chromeos/policy/stub_enterprise_install_attributes.cc',
'browser/chromeos/policy/stub_enterprise_install_attributes.h',
'browser/chromeos/settings/device_settings_test_helper.cc',
'browser/chromeos/settings/device_settings_test_helper.h',
'browser/chromeos/system/fake_input_device_settings.cc',
'browser/chromeos/system/fake_input_device_settings.h',
],
}],
['configuration_policy==1', {
'dependencies': [
'../components/components.gyp:policy_component_test_support',
'../components/components.gyp:policy_test_support',
],
'export_dependent_settings': [
'../components/components.gyp:policy_component_test_support',
'../components/components.gyp:policy_test_support',
],
'sources': [
'browser/policy/test/local_policy_test_server.cc',
'browser/policy/test/local_policy_test_server.h',
],
}],
['safe_browsing==1', {
'sources': [
'browser/extensions/fake_safe_browsing_database_manager.cc',
'browser/extensions/fake_safe_browsing_database_manager.h',
],
}],
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:ssl',
],
}],
['enable_print_preview==1', {
'dependencies': [
'service',
],
}],
['enable_extensions==1', {
'dependencies': [
'../components/components.gyp:storage_monitor_test_support',
'../extensions/extensions.gyp:extensions_test_support',
'common/extensions/api/api.gyp:chrome_api',
],
'sources': [
'browser/drive/dummy_drive_service.cc',
'browser/drive/dummy_drive_service.h',
'browser/drive/fake_drive_service.cc',
'browser/drive/fake_drive_service.h',
'browser/drive/test_util.cc',
'browser/drive/test_util.h',
'browser/extensions/api/messaging/native_messaging_test_util.cc',
'browser/extensions/api/messaging/native_messaging_test_util.h',
'browser/extensions/extension_action_test_util.cc',
'browser/extensions/extension_action_test_util.h',
'browser/extensions/extension_notification_observer.cc',
'browser/extensions/extension_notification_observer.h',
'browser/extensions/mock_extension_special_storage_policy.cc',
'browser/extensions/mock_extension_special_storage_policy.h',
'browser/extensions/test_blacklist.cc',
'browser/extensions/test_blacklist.h',
'browser/extensions/test_blacklist_state_fetcher.cc',
'browser/extensions/test_blacklist_state_fetcher.h',
'browser/extensions/test_extension_dir.cc',
'browser/extensions/test_extension_dir.h',
'browser/extensions/test_extension_environment.cc',
'browser/extensions/test_extension_environment.h',
'browser/extensions/test_extension_prefs.cc',
'browser/extensions/test_extension_prefs.h',
'browser/extensions/test_extension_service.cc',
'browser/extensions/test_extension_service.h',
'browser/extensions/test_extension_system.cc',
'browser/extensions/test_extension_system.h',
'browser/media_galleries/media_galleries_test_util.cc',
'browser/media_galleries/media_galleries_test_util.h',
'common/extensions/extension_test_util.cc',
'common/extensions/extension_test_util.h',
],
}],
['OS=="win"', {
'include_dirs': [
'<(DEPTH)/third_party/wtl/include',
],
'conditions': [
['use_aura==1', {
'dependencies': [
'../win8/win8.gyp:test_registrar_constants',
'../win8/win8.gyp:test_support_win8',
],
}],
],
}],
['OS=="win" or OS=="mac"', {
'sources': [
'common/media_galleries/picasa_test_util.cc',
'common/media_galleries/picasa_test_util.cc',
'common/media_galleries/pmp_test_util.cc',
'common/media_galleries/pmp_test_util.h',
],
}],
['OS=="mac"', {
'dependencies': [
'../breakpad/breakpad.gyp:breakpad',
'../components/components.gyp:crash_component',
],
'sources': [
'app/chrome_crash_reporter_client.cc',
'app/chrome_crash_reporter_client_mac.mm',
'app/chrome_main_mac.mm',
],
}],
['enable_mdns==1', {
'sources': [
'browser/local_discovery/test_service_discovery_client.cc',
'browser/local_discovery/test_service_discovery_client.h',
],
}],
['enable_app_list==1', {
'sources': [
'browser/ui/app_list/test/chrome_app_list_test_support.cc',
'browser/ui/app_list/test/chrome_app_list_test_support.h',
'browser/ui/app_list/test/test_app_list_controller_delegate.cc',
'browser/ui/app_list/test/test_app_list_controller_delegate.h',
],
}],
['enable_webrtc==1', {
'sources': [
'renderer/media/mock_webrtc_logging_message_filter.cc',
'renderer/media/mock_webrtc_logging_message_filter.h',
],
}],
['enable_wifi_bootstrapping', {
"sources" : [
'browser/local_discovery/wifi/mock_wifi_manager.cc',
'browser/local_discovery/wifi/mock_wifi_manager.h',
],
}],
['enable_plugins==1', {
"sources" : [
'test/ppapi/ppapi_test.cc',
'test/ppapi/ppapi_test.h',
],
'dependencies': [
'../pdf/pdf.gyp:pdf',
],
}],
['enable_plugins==1 and disable_nacl==0', {
'dependencies': [
'<(DEPTH)/components/nacl/renderer/plugin/plugin.gyp:nacl_trusted_plugin',
],
}],
['remoting==1', {
'dependencies': [
'../remoting/remoting.gyp:remoting_client_plugin',
],
}],
],
},
{
# GN version: //chrome/test:test_support_unit
'target_name': 'test_support_unit',
'type': 'static_library',
'dependencies': [
'chrome_resources.gyp:chrome_resources',
'chrome_resources.gyp:chrome_strings',
'browser',
'common',
'test_support_common',
'../base/base.gyp:base',
'../mojo/mojo_base.gyp:mojo_environment_chromium',
'../skia/skia.gyp:skia',
'../sync/sync.gyp:sync',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'../third_party/mojo/mojo_edk.gyp:mojo_system_impl',
],
'include_dirs': [
'..',
],
'sources': [
'browser/sync/glue/session_sync_test_helper.cc',
'browser/sync/glue/session_sync_test_helper.h',
'test/base/run_all_unittests.cc',
],
},
{
# GN version: //chrome/test:unit_tests
'target_name': 'unit_tests',
'type': '<(gtest_target_type)',
'dependencies': [
# NOTE: New dependencies should generally be added in the OS!="ios"
# dependencies block below, rather than here.
# Unit tests should only depend on:
# 1) everything that the chrome binaries depend on:
'<@(chromium_browser_dependencies)',
'<@(chromium_child_dependencies)',
# 2) test-specific support libraries:
'../base/base.gyp:test_support_base',
'../components/components_resources.gyp:components_resources',
'../components/components.gyp:content_settings_core_test_support',
'../content/content_shell_and_tests.gyp:test_support_content',
'../content/content.gyp:content_app_both',
'../crypto/crypto.gyp:crypto_test_support',
'../net/net.gyp:net',
'../net/net.gyp:net_test_support',
'../sync/sync.gyp:test_support_sync_api',
'../sync/sync.gyp:test_support_sync_core',
'../sync/sync.gyp:test_support_sync_internal_api',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'test_support_common',
'test_support_unit',
# 3) anything tests directly depend on
'../courgette/courgette.gyp:courgette_lib',
'../google_apis/google_apis.gyp:google_apis',
'../skia/skia.gyp:skia',
'../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation',
'../third_party/icu/icu.gyp:icui18n',
'../third_party/icu/icu.gyp:icuuc',
'../third_party/libxml/libxml.gyp:libxml',
'../ui/base/ui_base.gyp:ui_base_test_support',
'../ui/gfx/gfx.gyp:gfx_test_support',
'../ui/resources/ui_resources.gyp:ui_resources',
'chrome_resources.gyp:chrome_resources',
'chrome_resources.gyp:chrome_strings',
],
'include_dirs': [
'..',
],
'msvs_settings': {
'VCLinkerTool': {
'conditions': [
['incremental_chrome_dll==1', {
'UseLibraryDependencyInputs': "true",
}],
],
},
},
'sources': [ '<@(chrome_unit_tests_sources)' ],
'conditions': [
['OS!="ios"', {
'dependencies': [
'../components/components.gyp:autofill_content_test_support',
'../components/components.gyp:component_metrics_proto',
'../components/components.gyp:data_reduction_proxy_test_support',
'../components/components.gyp:webdata_services_test_support',
'../components/components_strings.gyp:components_strings',
'../content/app/resources/content_resources.gyp:content_resources',
'../device/bluetooth/bluetooth.gyp:device_bluetooth_mocks',
'../gpu/gpu.gyp:gpu_unittest_utils',
'../media/media.gyp:media_test_support',
'../ppapi/ppapi_internal.gyp:ppapi_unittest_shared',
'../third_party/leveldatabase/leveldatabase.gyp:leveldatabase',
'../third_party/libaddressinput/libaddressinput.gyp:libaddressinput',
'../third_party/libjingle/libjingle.gyp:libjingle',
'../third_party/libphonenumber/libphonenumber.gyp:libphonenumber',
'../tools/json_schema_compiler/test/json_schema_compiler_tests.gyp:json_schema_compiler_tests',
'../ui/gl/gl.gyp:gl',
'../v8/tools/gyp/v8.gyp:v8',
],
# TODO(scr): Use this in browser_tests too.
'includes': [
'js_unittest_rules.gypi',
],
'include_dirs': [
'<(SHARED_INTERMEDIATE_DIR)/chrome',
],
}, { # OS=="ios"
'dependencies': [
'../third_party/ocmock/ocmock.gyp:ocmock',
],
'sources/': [
# Exclude everything but iOS-specific files.
['exclude', '\\.(cc|mm)$'],
['include', '_ios\\.(cc|mm)$'],
['include', '(^|/)ios/'],
# TODO(ios): Add files here as they are updated to compile on iOS.
['include', '^common/translate/language_detection_util_unittest\\.cc$'],
['include', '^common/translate/translate_util_unittest\\.cc$'],
['include', '^common/zip_'],
],
# Bundle all the necessary resources into the test bundle.
'includes': ['chrome_ios_bundle_resources.gypi'],
'xcode_settings': {'OTHER_LDFLAGS': ['-ObjC']},
}],
['OS=="ios" or chromeos==1', {
'sources!': [
'browser/metrics/signin_status_metrics_provider_unittest.cc',
],
}],
['enable_background==1', {
'sources': [ '<@(chrome_unit_tests_background_sources)' ],
}],
['enable_spellcheck==1', {
'sources': [ '<@(chrome_unit_tests_spellchecker_sources)' ],
'conditions': [
['OS!="mac" and OS!="ios"', {
'dependencies': [ '../third_party/hunspell/hunspell.gyp:hunspell' ],
}],
],
}],
['enable_one_click_signin==1', {
'sources': [ '<@(chrome_unit_tests_one_click_signin_sources)' ],
'conditions': [
['chromeos == 1', {
'sources!': [
'browser/ui/sync/one_click_signin_helper_unittest.cc',
'browser/ui/sync/one_click_signin_sync_starter_unittest.cc',
],
}],
['toolkit_views == 0', {
'sources!': [
'browser/ui/views/sync/one_click_signin_bubble_view_unittest.cc',
],
}],
],
}],
['enable_extensions==1', {
'sources': [ '<@(chrome_unit_tests_extensions_sources)' ],
'dependencies': [
'common/extensions/api/api.gyp:chrome_api',
'../components/components.gyp:audio_modem_test_support',
'../extensions/extensions_resources.gyp:extensions_resources',
'../extensions/extensions_strings.gyp:extensions_strings',
],
'conditions': [
['configuration_policy==1', {
'sources': [ '<@(chrome_unit_tests_extensions_policy_sources)' ],
}],
['chromeos == 0', {
'sources': [
'<@(chrome_unit_tests_extensions_non_chromeos_sources)',
],
}],
],
}],
['use_ash==1', {
# TODO(calamity): Consider moving the tests to regular
# "chrome_unit_tests_sources" and not require Ash. crbug.com/439542.
'sources': [ '<@(chrome_unit_tests_ash_sources)' ],
'dependencies': [
'../ash/ash.gyp:ash_test_support',
'../ash/ash_resources.gyp:ash_resources',
'../ash/ash_strings.gyp:ash_strings',
],
# We eventually want to compile both in Win Aura builds, see
# http://crbug.com/155545.
'conditions': [
['OS !="win"', {
'sources!': [
'browser/ui/window_sizer/window_sizer_unittest.cc',
],
}],
],
}],
['use_aura==1', {
'dependencies': [
'../ui/wm/wm.gyp:wm',
'../ui/aura/aura.gyp:aura_test_support',
'../ui/views/views.gyp:views_test_support',
],
'sources': [
'../ui/views/controls/webview/webview_unittest.cc',
],
}],
['chromeos==0 and OS=="linux"', {
'sources': [ '<@(chrome_unit_tests_desktop_linux_sources)' ],
}],
['chromeos==0 and use_ozone==0 and OS=="linux"', {
'dependencies': [
'browser/ui/libgtk2ui/libgtk2ui.gyp:gtk2ui',
'../build/linux/system.gyp:gio',
],
'conditions': [
['component != "shared_library"', {
# TODO(erg): This file does not compile in shared library mode
# because it is reaching into the internals of libgtk2ui, which
# shouldn't be linked with the rest of chrome. This should either
# be fixed by creating a separate unit test target, or by deleting
# the test.
'sources': [
'browser/ui/libgtk2ui/x11_input_method_context_impl_gtk2_unittest.cc',
],
}],
],
}],
['use_aura==1 or toolkit_views==1', {
'dependencies': [
'../ui/events/events.gyp:events_test_support',
],
}],
['enable_task_manager==1', {
'sources': [ '<@(chrome_unit_tests_task_manager_sources)' ],
}],
['enable_mdns==1', {
'sources': [ '<@(chrome_unit_tests_mdns_sources)' ],
}],
['enable_service_discovery==1', {
'sources': [ '<@(chrome_unit_tests_service_discovery_sources)' ],
}],
['configuration_policy==1', {
'sources': [ '<@(chrome_unit_tests_configuration_policy_sources)' ],
'conditions': [
['chromeos==1', {
'sources!': [
'browser/policy/cloud/user_policy_signin_service_unittest.cc',
],
}],
['OS=="android"', {
'sources!': [
'browser/policy/policy_path_parser_unittest.cc',
],
}],
['OS!="android" and OS!="ios" and chromeos==0', {
'sources': [
'browser/net/disk_cache_dir_policy_handler_unittest.cc',
],
}],
['OS!="android" and OS!="ios"', {
'sources': [
'browser/download/download_dir_policy_handler_unittest.cc'
],
}],
],
}],
['enable_web_speech==1', {
'sources': [ '<@(chrome_unit_tests_speech_sources)' ],
}],
['notifications==1', {
'sources': [ '<@(chrome_unit_tests_notifications_sources)' ],
'conditions': [
['OS == "android"', {
'sources!': [
# Android does not use the Message Center notification system.
'browser/notifications/message_center_notifications_unittest.cc',
'browser/notifications/message_center_settings_controller_unittest.cc',
],
}],
],
}],
['safe_browsing==1', {
# TODO(sgurun): enable tests for safe_browsing==2.
'sources': [ '<@(chrome_unit_tests_full_safe_browsing_sources)' ],
'defines': [ 'FULL_SAFE_BROWSING' ],
'conditions': [
['chromeos == 1', {
'sources!': [
'browser/safe_browsing/download_protection_service_unittest.cc',
],
}],
['OS == "android"', {
'sources!': [
# Android doesn't support download feedbacks.
'browser/safe_browsing/download_feedback_service_unittest.cc',
'browser/safe_browsing/download_feedback_unittest.cc',
'browser/safe_browsing/download_protection_service_unittest.cc',
'browser/safe_browsing/two_phase_uploader_unittest.cc',
],
}],
],
}],
['enable_autofill_dialog==1 and OS!="android"', {
'sources': [ '<@(chrome_unit_tests_autofill_dialog_sources)' ],
}],
['enable_plugins==1', {
'sources': [ '<@(chrome_unit_tests_plugins_sources)' ],
}],
['enable_print_preview==1', {
'sources': [ '<@(chrome_unit_tests_print_preview_sources)' ],
}],
['enable_captive_portal_detection==1', {
'sources': [ '<@(chrome_unit_tests_captive_portal_sources)' ],
}],
['enable_session_service==1', {
'sources': [ '<@(chrome_unit_tests_session_service_sources)' ],
}],
['enable_webrtc==1', {
'sources': [ '<@(chrome_unit_tests_webrtc_sources)' ],
}],
['chromeos==1', {
'sources': [ '<@(chrome_unit_tests_chromeos_sources)' ],
'dependencies': [
'../ash/ash_resources.gyp:ash_resources',
'../ui/chromeos/ui_chromeos.gyp:ui_chromeos_resources',
],
'sources!': [
'browser/signin/signin_global_error_unittest.cc',
'browser/signin/signin_manager_unittest.cc',
'browser/signin/signin_names_io_thread_unittest.cc',
'browser/ui/views/app_list/linux/app_list_linux_unittest.cc',
'browser/ui/views/frame/opaque_browser_frame_view_layout_unittest.cc',
],
}],
['use_x11==1', {
'dependencies': [
'../tools/xdisplaycheck/xdisplaycheck.gyp:xdisplaycheck',
'../ui/events/devices/events_devices.gyp:events_devices',
],
}],
[ 'cld_version==0 or cld_version==2', {
'dependencies': [
# Unit tests should be independent of the CLD2 access mechanism,
# just use static for simplicity.
'<(DEPTH)/third_party/cld_2/cld_2.gyp:cld2_static', ],
}],
['OS=="linux"', {
'dependencies': [
'../build/linux/system.gyp:ssl',
],
}],
# Only add this test for 64 bit builds because otherwise we need the 32
# bit library on 64 bit systems when running this test.
['use_gnome_keyring == 1 and target_arch=="x64"', {
'sources': [
'browser/password_manager/native_backend_gnome_x_unittest.cc',
],
# We use a few library functions directly, so link directly.
'dependencies': [
'../build/linux/system.gyp:gnome_keyring_direct',
],
}],
['OS=="linux" and chromeos!=1', {
'sources': [
'browser/password_manager/native_backend_libsecret_unittest.cc',
],
}],
['OS=="linux" and use_aura==1', {
'dependencies': [
'../build/linux/system.gyp:dbus',
'../dbus/dbus.gyp:dbus_test_support',
'../ui/aura/aura.gyp:aura_test_support',
],
}],
['OS=="linux" and branding=="Chrome" and target_arch=="ia32"', {
'configurations': {
'Release': {
'ldflags': [
'-Wl,--strip-debug',
],
},
},
}],
['os_posix == 1 and OS != "mac" and OS != "ios" and OS != "android"', {
'conditions': [
['use_allocator!="none"', {
'dependencies': [
'../base/allocator/allocator.gyp:allocator',
],
}],
],
}],
['OS=="mac"', {
# The test fetches resources which means Mac need the app bundle to
# exist on disk so it can pull from it.
'dependencies': [
'../third_party/google_toolbox_for_mac/google_toolbox_for_mac.gyp:google_toolbox_for_mac',
'../third_party/ocmock/ocmock.gyp:ocmock',
'chrome',
],
'sources': [ '<@(chrome_unit_tests_mac_sources)' ],
'sources!': [
'browser/ui/tests/ui_gfx_image_unittest.cc',
'renderer/spellchecker/spellcheck_provider_hunspell_unittest.cc',
'tools/convert_dict/convert_dict_unittest.cc',
],
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui and startup tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}],
['OS!="mac" and OS!="ios"', {
'dependencies': [
'../third_party/hunspell/hunspell.gyp:hunspell',
'chrome_resources.gyp:packed_extra_resources',
'chrome_resources.gyp:packed_resources',
'tools/convert_dict/convert_dict.gyp:convert_dict_lib',
],
}],
['OS=="win" or OS=="mac"', {
'sources': [ '<@(chrome_unit_tests_win_mac_sources)' ],
}],
['OS=="win" or OS=="mac" or chromeos==1', {
'sources': [
'common/extensions/api/networking_private/networking_private_crypto_unittest.cc',
],
}],
['enable_rlz!=0', {
'dependencies': [
'../rlz/rlz.gyp:test_support_rlz',
],
}, { # enable_rlz==0
'sources!': [
'browser/rlz/rlz_unittest.cc',
],
}],
['OS=="win" and component!="shared_library"', {
# Unit_tests pdb files can get too big when incremental linking is
# on, disabling for this target.
'configurations': {
'Debug': {
'msvs_settings': {
'VCLinkerTool': {
'LinkIncremental': '<(msvs_debug_link_nonincremental)',
},
},
},
},
}],
['OS=="win"', {
'dependencies': [
'browser/safe_browsing/verifier_test/verifier_unittest.gyp:verifier_test_dll_1',
'browser/safe_browsing/verifier_test/verifier_unittest.gyp:verifier_test_dll_2',
'chrome_version_resources',
'installer_util_strings',
'../chrome_elf/chrome_elf.gyp:blacklist_test_dll_1',
'../third_party/iaccessible2/iaccessible2.gyp:iaccessible2',
'../third_party/isimpledom/isimpledom.gyp:isimpledom',
],
'conditions': [
['win_use_allocator_shim==1', {
'dependencies': [
'<(allocator_target)',
],
}],
],
'include_dirs': [
'<(DEPTH)/third_party/wtl/include',
],
'sources': [
'<@(chrome_unit_tests_win_sources)',
# TODO: It would be nice to have these pulled in
# automatically from direct_dependent_settings in
# their various targets (net.gyp:net_resources, etc.),
# but that causes errors in other targets when
# resulting .res files get referenced multiple times.
'<(SHARED_INTERMEDIATE_DIR)/chrome_version/other_version.rc',
'<(SHARED_INTERMEDIATE_DIR)/installer_util_strings/installer_util_strings.rc',
'<(SHARED_INTERMEDIATE_DIR)/ui/resources/ui_unscaled_resources.rc',
],
'link_settings': {
'libraries': [
'-lcomsupp.lib',
'-loleacc.lib',
'-lrpcrt4.lib',
'-lurlmon.lib',
'-lwinmm.lib',
],
},
}],
['OS=="android" or OS=="ios"', {
'sources!': [
'browser/devtools/device/webrtc/devtools_bridge_instances_request_unittest.cc',
'browser/ui/sync/sync_promo_ui_unittest.cc',
],
}, { # 'OS!="android" and OS!="ios"'
'dependencies': [
'tools/profile_reset/jtl_compiler.gyp:jtl_compiler_lib',
],
'sources!': [
'browser/metrics/variations/variations_request_scheduler_mobile_unittest.cc',
'browser/web_resource/promo_resource_service_mobile_ntp_unittest.cc',
],
}],
['OS != "android" and chromeos == 0', {
'sources': [ '<@(chrome_unit_tests_non_android_or_chromeos_sources)' ],
}],
['OS=="android"', {
'dependencies!': [
'../third_party/libaddressinput/libaddressinput.gyp:libaddressinput',
],
'ldflags': [
# Some android targets still depend on --gc-sections to link.
# TODO: remove --gc-sections for Debug builds (crbug.com/159847).
'-Wl,--gc-sections',
],
'dependencies': [
'../testing/android/native_test.gyp:native_test_native_code',
],
}, { # Not Android.
'sources': [ '<@(chrome_unit_tests_non_android_sources)' ],
}],
['enable_themes == 1', {
'sources': [ '<@(chrome_unit_tests_themes_sources)' ],
}],
['toolkit_views==1', {
'dependencies': [
'../components/components.gyp:web_modal_test_support',
'../ui/views/views.gyp:views',
'../ui/views/views.gyp:views_test_support',
],
'conditions': [
# TODO(tapted): Enable toolkit-views unit_tests on Mac when their
# respective implementations are linked in. http://crbug.com/412234.
['OS != "mac"', {
'sources': [ '<@(chrome_unit_tests_views_sources)' ],
}],
],
}],
['OS != "android" and (use_nss == 1 or use_openssl_certs == 1)', {
'sources': [
'common/net/x509_certificate_model_unittest.cc',
],
}],
['enable_supervised_users == 1', {
'sources': [ '<@(chrome_unit_tests_supervised_user_sources)' ],
}],
['safe_browsing==1 and enable_extensions==1', {
'sources': [
'browser/extensions/blacklist_unittest.cc',
],
}],
['cld_version==1', {
'defines': [
'CLD_WINDOWS',
],
'direct_dependent_settings': {
'defines': [
'CLD_WINDOWS',
],
},
'sources': [
'../third_party/cld/encodings/compact_lang_det/compact_lang_det_unittest_small.cc',
],
'dependencies': [
'../third_party/cld/cld.gyp:cld',
],
}],
['enable_app_list==1', {
'sources': [ '<@(chrome_unit_tests_app_list_sources)' ],
'dependencies': [
'../ui/app_list/app_list.gyp:app_list_test_support',
],
}],
['use_ozone==1', {
'sources!': [
# crbug.com/354036
'browser/chromeos/events/event_rewriter_unittest.cc',
],
}],
['enable_plugin_installation==0', {
'sources!': [
'browser/plugins/plugin_installer_unittest.cc',
],
}],
# Adding more conditions? Don't forget to update the GN build.
],
},
{
'target_name': 'chrome_app_unittests',
'type': 'executable',
'dependencies': [
# unit tests should only depend on
# 1) everything that the chrome binaries depend on:
'<@(chromium_browser_dependencies)',
'<@(chromium_child_dependencies)',
'../content/content.gyp:content_app_both',
# 2) test-specific support libraries:
'../base/base.gyp:run_all_unittests',
'../testing/gmock.gyp:gmock',
'../testing/gtest.gyp:gtest',
'test_support_common',
],
'include_dirs': [
'..',
],
'sources': [
'app/chrome_watcher_client_unittest_win.cc',
'app/chrome_watcher_client_win.cc',
'app/chrome_watcher_command_line_unittest_win.cc',
'app/chrome_watcher_command_line_win.cc',
'app/delay_load_hook_unittest_win.cc',
'app/delay_load_hook_win.cc',
'app/delay_load_hook_win.h',
'app/signature_validator_win.cc',
'app/signature_validator_win.h',
'app/signature_validator_win_unittest.cc',
'common/crash_keys.cc',
'common/crash_keys.h',
],
'conditions': [
['OS=="mac" or OS=="ios"', {
'include_dirs': [
'<(DEPTH)/breakpad/src',
],
}],
['OS=="mac"', {
# TODO(mark): We really want this for all non-static library targets,
# but when we tried to pull it up to the common.gypi level, it broke
# other things like the ui and startup tests. *shrug*
'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
}],
['OS=="win"', {
'dependencies': [
# breakpad is currently only tested on Windows.
'../breakpad/breakpad.gyp:*',
'../components/components.gyp:crash_component',
],
'conditions': [
['win_use_allocator_shim==1', {
'dependencies': [
'<(allocator_target)',
],
}],
],
'msvs_settings': {
'VCLinkerTool': {
'AdditionalDependencies': [ 'wintrust.lib' ],
},
},
}],
],
},
],
'conditions': [
['OS == "android"', {
'targets': [
{
'target_name': 'unit_tests_java',
'type': 'none',
'variables': {
'java_in_dir': 'test/android/unit_tests_apk',
},
'dependencies': [
'chrome_java',
],
'includes': [ '../build/java.gypi' ],
},
{
'target_name': 'unit_tests_apk',
'type': 'none',
'dependencies': [
'chrome_java',
'unit_tests_java',
'unit_tests',
],
'variables': {
'test_suite_name': 'unit_tests',
'android_manifest_path': 'test/android/unit_tests_apk/AndroidManifest.xml',
'conditions': [
['v8_use_external_startup_data==1', {
'asset_location': '<(PRODUCT_DIR)/unit_tests_apk/assets',
'additional_input_paths': [
'<(PRODUCT_DIR)/unit_tests_apk/assets/natives_blob.bin',
'<(PRODUCT_DIR)/unit_tests_apk/assets/snapshot_blob.bin',
],
'inputs': [
'<(PRODUCT_DIR)/natives_blob.bin',
'<(PRODUCT_DIR)/snapshot_blob.bin',
],
}],
],
},
'conditions': [
['v8_use_external_startup_data==1', {
'dependencies': [
'../v8/tools/gyp/v8.gyp:v8_external_snapshot',
],
'copies': [
{
'destination': '<(asset_location)',
'files': [
'<(PRODUCT_DIR)/natives_blob.bin',
'<(PRODUCT_DIR)/snapshot_blob.bin',
],
},
],
}],
],
'includes': [ '../build/apk_test.gypi' ],
},
],
}],
['test_isolation_mode != "noop"', {
'targets': [
{
'target_name': 'unit_tests_run',
'type': 'none',
'dependencies': [
'unit_tests',
],
'includes': [
'../build/isolate.gypi',
],
'sources': [
'unit_tests.isolate',
],
'conditions': [
['use_x11 == 1', {
'dependencies': [
'../tools/xdisplaycheck/xdisplaycheck.gyp:xdisplaycheck',
],
}],
],
},
],
}],
], # 'conditions'
}
|
[
"[email protected]"
] | |
83ab91127cfd012ac6082da0ab35a359b9d36368
|
163bbb4e0920dedd5941e3edfb2d8706ba75627d
|
/Code/CodeRecords/2535/60647/283667.py
|
9ed53c7cab12facc604a276ec4facf9ffd08731e
|
[] |
no_license
|
AdamZhouSE/pythonHomework
|
a25c120b03a158d60aaa9fdc5fb203b1bb377a19
|
ffc5606817a666aa6241cfab27364326f5c066ff
|
refs/heads/master
| 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 534 |
py
|
list=input()
#如果从这个数开始,每一个数都比前一个大, 则加一
def num(a,list):
for i in range(len(list)):
if int(list[i])<int(a):
return False
return True
max=0
list1=[]
res=1
for i in range(len(list)):
if int(list[i])>=max:
if i==len(list)-1:
res+=1
else:
max=int(list[i])
list1=[]
for j in range(i,len(list)):
list1.append(list[j])
if num(max,list1):
res+=1
print(res)
|
[
"[email protected]"
] | |
b1e51b9190495225dce3a0f41da983608c9957cf
|
5864e86954a221d52d4fa83a607c71bacf201c5a
|
/carbon/common/script/entities/Spawners/encounterSpawner.py
|
4d5bb3f2a0bdebce0ec3d76e2e5daec0453da68f
|
[] |
no_license
|
connoryang/1v1dec
|
e9a2303a01e5a26bf14159112b112be81a6560fd
|
404f2cebf13b311e754d45206008918881496370
|
refs/heads/master
| 2021-05-04T02:34:59.627529 | 2016-10-19T08:56:26 | 2016-10-19T08:56:26 | 71,334,417 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 303 |
py
|
#Embedded file name: e:\jenkins\workspace\client_SERENITY\branches\release\SERENITY\carbon\common\script\entities\Spawners\encounterSpawner.py
from carbon.common.script.entities.Spawners.runtimeSpawner import RuntimeSpawner
class EncounterSpawner(RuntimeSpawner):
__guid__ = 'cef.EncounterSpawner'
|
[
"[email protected]"
] | |
d00a29e77a591aa61625df863e903de88e36ea7c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03213/s787167232.py
|
875bd7ad591f6ef540a3b100a4dfaa4648a17478
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,419 |
py
|
#template
def inputlist(): return [int(k) for k in input().split()]
#template
def factorization(n):
arr = []
temp = n
for i in range(2, int(-(-n**0.5//1))+1):
if temp%i==0:
cnt=0
while temp%i==0:
cnt+=1
temp //= i
arr.append([i, cnt])
if temp!=1:
arr.append([temp, 1])
if arr==[]:
arr.append([n, 1])
return arr
N = int(input())
if N == 1:
print(0)
exit()
dp = [[0]*101 for _ in range(N+1)]
for i in range(2,N+1):
li = factorization(i)
n = len(li)
li0 = [0]*n
li1 = {}
for k in range(n):
li0[k] = li[k][0]
li1[li[k][0]] = li[k][1]
for j in range(2,101):
if j in li0:
dp[i][j] = dp[i-1][j] + li1[j]
continue
dp[i][j] = dp[i-1][j]
li = dp[N]
li.sort()
from bisect import bisect_right
indexa = bisect_right(li,0)
lia = li[indexa:]
na = len(lia)
c2 = 0
c4 = 0
c14 = 0
c24 = 0
c74 = 0
for i in range(na):
if lia[i] >= 2:
c2+=1
if lia[i] >= 4:
c4+=1
if lia[i] >= 14:
c14 +=1
if lia[i] >= 24:
c24+=1
if lia[i] >= 74:
c74+=1
d4_2 = c2-c4
d14_4 = c4 - c14
d24_2 = c2 - c24
def comb2(i):
return i*(i-1)//2
def comb3(i):
return i*(i-1)*(i-2)//6
ans = d4_2*comb2(c4) + 3*comb3(c4) + d14_4*c14 + 2*comb2(c14) + d24_2*c24 + 2*comb2(c24) +c74
print(ans)
|
[
"[email protected]"
] | |
f0e99347c8713ef9c69c0f8b8fe5543221c7f17f
|
e3b9aa9b17ebb55e53dbc4fa9d1f49c3a56c6488
|
/red_canary/komand_red_canary/actions/create_activity_monitor/action.py
|
73804a2de359e817432441c993f2c0436e600e90
|
[
"MIT"
] |
permissive
|
OSSSP/insightconnect-plugins
|
ab7c77f91c46bd66b10db9da1cd7571dfc048ab7
|
846758dab745170cf1a8c146211a8bea9592e8ff
|
refs/heads/master
| 2023-04-06T23:57:28.449617 | 2020-03-18T01:24:28 | 2020-03-18T01:24:28 | 248,185,529 | 1 | 0 |
MIT
| 2023-04-04T00:12:18 | 2020-03-18T09:14:53 | null |
UTF-8
|
Python
| false | false | 954 |
py
|
import komand
from .schema import CreateActivityMonitorInput, CreateActivityMonitorOutput
# Custom imports below
class CreateActivityMonitor(komand.Action):
def __init__(self):
super(self.__class__, self).__init__(
name='create_activity_monitor',
description='Creates a new activity monitor',
input=CreateActivityMonitorInput(),
output=CreateActivityMonitorOutput())
def run(self, params={}):
activity_monitor = self.connection.api.create_activity_monitor(
params.get('name'),
params.get('type', 'file_modification'),
params.get('active', True),
params.get('file_modification_types_monitored', []),
params.get('file_paths_monitored', []),
params.get('usernames_matched', []),
params.get('usernames_excluded', []),
)
return {'activity_monitor': activity_monitor}
|
[
"[email protected]"
] | |
24c96ba58b6631ab83d625248e7a2ea45d61df78
|
c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c
|
/cases/synthetic/sieve-big-7579.py
|
716f080560f9fa34d2dda762ac1bf094da068a60
|
[] |
no_license
|
Virtlink/ccbench-chocopy
|
c3f7f6af6349aff6503196f727ef89f210a1eac8
|
c7efae43bf32696ee2b2ee781bdfe4f7730dec3f
|
refs/heads/main
| 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 31,744 |
py
|
# A resizable list of integers
class Vector(object):
items: [int] = None
size: int = 0
def __init__(self:"Vector"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector", idx: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector") -> int:
return self.size
# A resizable list of integers
class Vector2(object):
items: [int] = None
items2: [int] = None
size: int = 0
size2: int = 0
def __init__(self:"Vector2"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector2") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector2") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector2") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector2", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector2", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector2", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector2", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector2", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector2", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector2", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector2", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector2") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector2") -> int:
return self.size
# A resizable list of integers
class Vector3(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
def __init__(self:"Vector3"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector3") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector3") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector3") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector3", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector3", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector3", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector3", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector3", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector3", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector3", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector3", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector3", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector3", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector3", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector3", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector3") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector3") -> int:
return self.size
# A resizable list of integers
class Vector4(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
def __init__(self:"Vector4"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector4") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector4") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector4") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector4", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector4", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector4", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector4", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector4", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector4", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector4", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector4", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector4", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector4", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector4", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector4", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector4", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector4", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector4") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector4") -> int:
return self.size
# A resizable list of integers
class Vector5(object):
items: [int] = None
items2: [int] = None
items3: [int] = None
items4: [int] = None
items5: [int] = None
size: int = 0
size2: int = 0
size3: int = 0
size4: int = 0
size5: int = 0
def __init__(self:"Vector5"):
self.items = [0]
# Returns current capacity
def capacity(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity2(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity3(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity4(self:"Vector5") -> int:
return len(self.items)
# Returns current capacity
def capacity5(self:"Vector5") -> int:
return len(self.items)
# Increases capacity of vector by one element
def increase_capacity(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity2(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity3(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity4(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Increases capacity of vector by one element
def increase_capacity5(self:"Vector5") -> int:
self.items = self.items + [0]
return self.capacity()
# Appends one item to end of vector
def append(self:"Vector5", item: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append2(self:"Vector5", item: int, item2: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append3(self:"Vector5", item: int, item2: int, item3: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append4(self:"Vector5", item: int, item2: int, item3: int, item4: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends one item to end of vector
def append5(self:"Vector5", item: int, item2: int, item3: int, item4: int, item5: int) -> object:
if self.size == self.capacity():
self.increase_capacity()
self.items[self.size] = item
self.size = self.size + 1
# Appends many items to end of vector
def append_all(self:"Vector5", new_items: [int]) -> object:
item:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all2(self:"Vector5", new_items: [int], new_items2: [int]) -> object:
item:int = 0
item2:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all3(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all4(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
for item in new_items:
self.append(item)
# Appends many items to end of vector
def append_all5(self:"Vector5", new_items: [int], new_items2: [int], new_items3: [int], new_items4: [int], new_items5: [int]) -> object:
item:int = 0
item2:int = 0
item3:int = 0
item4:int = 0
item5:int = 0
for item in new_items:
self.append(item)
# Removes an item from the middle of vector
def remove_at(self:"Vector5", idx: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at2(self:"Vector5", idx: int, idx2: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at3(self:"Vector5", idx: int, idx2: int, idx3: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Removes an item from the middle of vector
def remove_at5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> object:
if idx < 0:
return
while idx < self.size - 1:
self.items[idx] = self.items[idx + 1]
idx = idx + 1
self.size = self.size - 1
# Retrieves an item at a given index
def get(self:"Vector5", idx: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get2(self:"Vector5", idx: int, idx2: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get3(self:"Vector5", idx: int, idx2: int, idx3: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get4(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int) -> int:
return self.items[idx]
# Retrieves an item at a given index
def get5(self:"Vector5", idx: int, idx2: int, idx3: int, idx4: int, idx5: int) -> int:
return self.items[idx]
# Retrieves the current size of the vector
def length(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length2(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length3(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length4(self:"Vector5") -> int:
return self.size
# Retrieves the current size of the vector
def length5(self:"Vector5") -> int:
return self.size
# A faster (but more memory-consuming) implementation of vector
class DoublingVector(Vector):
doubling_limit:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector2(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector2") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector3(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector3") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector4(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector4") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# A faster (but more memory-consuming) implementation of vector
class DoublingVector5(Vector):
doubling_limit:int = 1000
doubling_limit2:int = 1000
doubling_limit3:int = 1000
doubling_limit4:int = 1000
doubling_limit5:int = 1000
# Overriding to do fewer resizes
def increase_capacity(self:"DoublingVector5") -> int:
if (self.capacity() <= self.$ID // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity2(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity3(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity4(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Overriding to do fewer resizes
def increase_capacity5(self:"DoublingVector5") -> int:
if (self.capacity() <= self.doubling_limit // 2):
self.items = self.items + self.items
else:
# If doubling limit has been reached, fall back to
# standard capacity increases
self.items = self.items + [0]
return self.capacity()
# Makes a vector in the range [i, j)
def vrange(i:int, j:int) -> Vector:
v:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange2(i:int, j:int, i2:int, j2:int) -> Vector:
v:Vector = None
v2:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange3(i:int, j:int, i2:int, j2:int, i3:int, j3:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange4(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
def vrange5(i:int, j:int, i2:int, j2:int, i3:int, j3:int, i4:int, j4:int, i5:int, j5:int) -> Vector:
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
v = DoublingVector()
while i < j:
v.append(i)
i = i + 1
return v
# Sieve of Eratosthenes (not really)
def sieve(v:Vector) -> object:
i:int = 0
j:int = 0
k:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve2(v:Vector, v2:Vector) -> object:
i:int = 0
i2:int = 0
j:int = 0
j2:int = 0
k:int = 0
k2:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve3(v:Vector, v2:Vector, v3:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
j:int = 0
j2:int = 0
j3:int = 0
k:int = 0
k2:int = 0
k3:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve4(v:Vector, v2:Vector, v3:Vector, v4:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
def sieve5(v:Vector, v2:Vector, v3:Vector, v4:Vector, v5:Vector) -> object:
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
j:int = 0
j2:int = 0
j3:int = 0
j4:int = 0
j5:int = 0
k:int = 0
k2:int = 0
k3:int = 0
k4:int = 0
k5:int = 0
while i < v.length():
k = v.get(i)
j = i + 1
while j < v.length():
if v.get(j) % k == 0:
v.remove_at(j)
else:
j = j + 1
i = i + 1
# Input parameter
n:int = 50
n2:int = 50
n3:int = 50
n4:int = 50
n5:int = 50
# Data
v:Vector = None
v2:Vector = None
v3:Vector = None
v4:Vector = None
v5:Vector = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
# Crunch
v = vrange(2, n)
v2 = vrange(2, n)
v3 = vrange(2, n)
v4 = vrange(2, n)
v5 = vrange(2, n)
sieve(v)
# Print
while i < v.length():
print(v.get(i))
i = i + 1
|
[
"[email protected]"
] | |
9e061540c9d7fbc1ae4b26fd114478d3a32ab5c3
|
720513cd846de7c095a7b7e0b55eba958eda6b0d
|
/tests/__init__.py
|
5cf60c16edddb1e1a5ad7df0390db1e3b1cd521f
|
[
"MIT"
] |
permissive
|
yoophi/flask-login-sample
|
77ca69e919be34be17040f7b7312f3d108e81ef1
|
ae76e2c396896f99b1ba8add44c752d5abc22c01
|
refs/heads/main
| 2023-02-21T00:54:49.636312 | 2021-01-17T10:52:17 | 2021-01-17T10:52:17 | 330,368,990 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 48 |
py
|
"""Unit test package for flask_login_sample."""
|
[
"[email protected]"
] | |
4b105aca2fcf5637f1b9c2ead4204f34d9ebdd74
|
9577a25e8dfca9d45942b739d9b24b1170dd8a0e
|
/groovebox/app.py
|
c115e123781f665800f5575ad0b3b7c5d53dac1a
|
[
"Apache-2.0"
] |
permissive
|
ArchiveLabs/groovebox.org
|
31a075dc55b2edc8d633b1bbe3e0017271cd808d
|
62378347e7152eac68b9f6685e2e064f39c0a042
|
refs/heads/master
| 2021-01-18T02:08:49.728837 | 2015-10-22T00:01:33 | 2015-10-22T00:01:33 | 46,458,090 | 3 | 0 | null | 2015-11-19T01:05:43 | 2015-11-19T01:05:42 |
CSS
|
UTF-8
|
Python
| false | false | 452 |
py
|
#!/usr/bin/env python
# -*-coding: utf-8 -*-
"""
app.py
~~~~~~
:copyright: (c) 2015 by Mek
:license: see LICENSE for more details.
"""
from flask import Flask
from flask.ext.routing import router
import views
from configs import options
urls = ('/favicon.ico', views.Favicon,
'/<path:uri>', views.Base,
'/', views.Base
)
app = router(Flask(__name__), urls)
if __name__ == "__main__":
app.run(**options)
|
[
"[email protected]"
] | |
77d08e574058ab3ecbaf0bdad91021313c004b2e
|
4bf12f7854548117cf3299ad5726e6b4e56584e3
|
/apps/centro_de_custo/apps.py
|
26bb8e89a4264031791121e394946cf176b8be5b
|
[] |
no_license
|
fgomesc/csc_system
|
4a12224b9129933f7713b9f75eb6fbf0ba06da5f
|
a1f5aacf96f9885a62c16fcb5700ca90c114cc50
|
refs/heads/master
| 2021-02-06T01:25:10.779361 | 2020-05-29T02:49:39 | 2020-05-29T02:49:39 | 243,857,987 | 0 | 1 | null | 2020-03-02T18:38:06 | 2020-02-28T21:31:12 |
Python
|
UTF-8
|
Python
| false | false | 103 |
py
|
from django.apps import AppConfig
class CentroDeCustoConfig(AppConfig):
name = 'centro_de_custo'
|
[
"[email protected]"
] | |
cde792f26d913f7e253f942c3f79c8b50cd05070
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03665/s533070063.py
|
36f4b83274d20d183a9103fb57d80ae329081a87
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 510 |
py
|
from math import factorial
def combinations_count(n, r):
return factorial(n) // (factorial(n - r) * factorial(r))
n,p = map(int,input().split())
a = list(map(int,input().split()))
eve = 0
odd = 0
for i in range(n):
if a[i]%2==0:
eve += 1
else:
odd += 1
eve_cb = 2**eve
odd_cb1 = 0
odd_cb2 = 0
if p==1:
for i in range(1,odd+1)[::2]:
odd_cb1 += combinations_count(odd,i)
else:
for i in range(0,odd+1)[::2]:
odd_cb2 += combinations_count(odd,i)
print(eve_cb*(odd_cb1*p + odd_cb2*(1-p)))
|
[
"[email protected]"
] | |
7fce737a3eeaedf3089c200e8c6e31876fe362e2
|
a8547f73463eef517b98d1085430732f442c856e
|
/numpy/lib/tests/test_index_tricks.py
|
82f66e0584f905f5f42299cd644fbaeb09cebd36
|
[] |
no_license
|
EnjoyLifeFund/macHighSierra-py36-pkgs
|
63aece1b692225ee2fbb865200279d7ef88a1eca
|
5668b5785296b314ea1321057420bcd077dba9ea
|
refs/heads/master
| 2021-01-23T19:13:04.707152 | 2017-12-25T17:41:30 | 2017-12-25T17:41:30 | 102,808,884 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 102 |
py
|
../../../../../../Cellar/numpy/1.13.3/lib/python3.6/site-packages/numpy/lib/tests/test_index_tricks.py
|
[
"[email protected]"
] | |
75b6137cb632fbf92961924ccf4818e4055d273e
|
009d7750dc8636c31bd8da890bdf4be3770bfddd
|
/tmp/env/lib/python3.6/site-packages/tensorflow/_api/v1/compat/v1/train/experimental/__init__.py
|
c8952a33efd54ba1f9937e7d3a5c0e526da1988f
|
[
"Apache-2.0"
] |
permissive
|
Nintendofan885/Detect_Cyberbullying_from_socialmedia
|
241a5ae70405494ea5f7e393f9dac273ac2ff378
|
2f3d0a1eca0e3163565a17dcb35074e0808ed176
|
refs/heads/master
| 2022-11-25T18:56:27.253834 | 2020-08-03T13:16:16 | 2020-08-03T13:16:16 | 284,701,752 | 0 | 0 |
NOASSERTION
| 2020-08-03T13:02:06 | 2020-08-03T13:02:05 | null |
UTF-8
|
Python
| false | false | 886 |
py
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Public API for tf.train.experimental namespace.
"""
from __future__ import print_function as _print_function
from tensorflow.python.training.experimental.loss_scale import DynamicLossScale
from tensorflow.python.training.experimental.loss_scale import FixedLossScale
from tensorflow.python.training.experimental.loss_scale import LossScale
from tensorflow.python.training.experimental.loss_scale_optimizer import MixedPrecisionLossScaleOptimizer
from tensorflow.python.training.experimental.mixed_precision import disable_mixed_precision_graph_rewrite
from tensorflow.python.training.experimental.mixed_precision import enable_mixed_precision_graph_rewrite
from tensorflow.python.training.tracking.python_state import PythonState
del _print_function
|
[
"[email protected]"
] | |
bb36f1b7530eb689956bb608eb5bdb38787a27cc
|
7437ecc0d856adef02ae0a84b51dd1db04fc7c79
|
/matplot.py
|
fab461f597050c2a5afd30669263b1e5976c235b
|
[] |
no_license
|
samarthdubey46/Matplotlib
|
06c6e2ac1abbd125c1a3d8c0fbe6e57dce0a032b
|
31fe567938a5cb5183860b723747675b3741d56b
|
refs/heads/master
| 2022-11-18T13:57:23.610309 | 2020-07-18T11:17:14 | 2020-07-18T11:17:14 | 280,640,303 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,355 |
py
|
from matplotlib import pyplot as plt
plt.style.use('dark_background')
#DATA
ages_x = [18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35,
36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55]
py_dev_y = [20046, 17100, 20000, 24744, 30500, 37732, 41247, 45372, 48876, 53850, 57287, 63016, 65998, 70003, 70000, 71496, 75370, 83640, 84666,
84392, 78254, 85000, 87038, 91991, 100000, 94796, 97962, 93302, 99240, 102736, 112285, 100771, 104708, 108423, 101407, 112542, 122870, 120000]
js_dev_y = [16446, 16791, 18942, 21780, 25704, 29000, 34372, 37810, 43515, 46823, 49293, 53437, 56373, 62375, 66674, 68745, 68746, 74583, 79000,
78508, 79996, 80403, 83820, 88833, 91660, 87892, 96243, 90000, 99313, 91660, 102264, 100000, 100000, 91660, 99240, 108000, 105000, 104000]
dev_y = [17784, 16500, 18012, 20628, 25206, 30252, 34368, 38496, 42000, 46752, 49320, 53200, 56000, 62316, 64928, 67317, 68748, 73752, 77232,
78000, 78508, 79536, 82488, 88935, 90000, 90056, 95000, 90000, 91633, 91660, 98150, 98964, 100000, 98988, 100000, 108923, 105000, 103117]
plt.plot(ages_x,py_dev_y,color='b',label="Python")
plt.plot(ages_x,js_dev_y,color='r',label="JavaScript")
plt.plot(ages_x,dev_y,color='#f5c800',label="All_devs")
plt.grid(True)
plt.legend()
plt.tight_layout()
plt.show()
|
[
"[email protected]"
] | |
3ecba571246a2f523371be75f8e62af98fbc9f0f
|
bc54edd6c2aec23ccfe36011bae16eacc1598467
|
/simscale_sdk/models/celllimited_gauss_linear_gradient_scheme.py
|
29431b00d80dc5ff4282a783ef1a700d8721c50e
|
[
"MIT"
] |
permissive
|
SimScaleGmbH/simscale-python-sdk
|
4d9538d5efcadae718f12504fb2c7051bbe4b712
|
6fe410d676bf53df13c461cb0b3504278490a9bb
|
refs/heads/master
| 2023-08-17T03:30:50.891887 | 2023-08-14T08:09:36 | 2023-08-14T08:09:36 | 331,949,105 | 17 | 5 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,471 |
py
|
# coding: utf-8
"""
SimScale API
The version of the OpenAPI document: 0.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from simscale_sdk.configuration import Configuration
class CelllimitedGaussLinearGradientScheme(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'type': 'str',
'limiter_coefficient': 'float'
}
attribute_map = {
'type': 'type',
'limiter_coefficient': 'limiterCoefficient'
}
def __init__(self, type='CELLLIMITED_GAUSS_LINEAR', limiter_coefficient=None, local_vars_configuration=None): # noqa: E501
"""CelllimitedGaussLinearGradientScheme - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._type = None
self._limiter_coefficient = None
self.discriminator = None
self.type = type
if limiter_coefficient is not None:
self.limiter_coefficient = limiter_coefficient
@property
def type(self):
"""Gets the type of this CelllimitedGaussLinearGradientScheme. # noqa: E501
Schema name: CelllimitedGaussLinearGradientScheme # noqa: E501
:return: The type of this CelllimitedGaussLinearGradientScheme. # noqa: E501
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this CelllimitedGaussLinearGradientScheme.
Schema name: CelllimitedGaussLinearGradientScheme # noqa: E501
:param type: The type of this CelllimitedGaussLinearGradientScheme. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and type is None: # noqa: E501
raise ValueError("Invalid value for `type`, must not be `None`") # noqa: E501
self._type = type
@property
def limiter_coefficient(self):
"""Gets the limiter_coefficient of this CelllimitedGaussLinearGradientScheme. # noqa: E501
This property defines a limiter coefficient for the scheme. 1 ensures boundedness while 0 applies no limiting. # noqa: E501
:return: The limiter_coefficient of this CelllimitedGaussLinearGradientScheme. # noqa: E501
:rtype: float
"""
return self._limiter_coefficient
@limiter_coefficient.setter
def limiter_coefficient(self, limiter_coefficient):
"""Sets the limiter_coefficient of this CelllimitedGaussLinearGradientScheme.
This property defines a limiter coefficient for the scheme. 1 ensures boundedness while 0 applies no limiting. # noqa: E501
:param limiter_coefficient: The limiter_coefficient of this CelllimitedGaussLinearGradientScheme. # noqa: E501
:type: float
"""
if (self.local_vars_configuration.client_side_validation and
limiter_coefficient is not None and limiter_coefficient > 1): # noqa: E501
raise ValueError("Invalid value for `limiter_coefficient`, must be a value less than or equal to `1`") # noqa: E501
if (self.local_vars_configuration.client_side_validation and
limiter_coefficient is not None and limiter_coefficient < 0): # noqa: E501
raise ValueError("Invalid value for `limiter_coefficient`, must be a value greater than or equal to `0`") # noqa: E501
self._limiter_coefficient = limiter_coefficient
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CelllimitedGaussLinearGradientScheme):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, CelllimitedGaussLinearGradientScheme):
return True
return self.to_dict() != other.to_dict()
|
[
"simscale"
] |
simscale
|
d5f4325d7d2975e5aee9e7aded230a775b329c7a
|
ce8644b929d07e5de2ac2155bd89bb33700d44e5
|
/electrum_axe/gui/kivy/uix/dialogs/tx_dialog.py
|
6dd37a10719b0756f9a2007516c084946fa6443b
|
[
"MIT"
] |
permissive
|
Doyoks/electrum-axe
|
b40d88603eeec87014618b4ffc3b950488b00369
|
15f6c97f34fdf5518aaec472c3346532515929d4
|
refs/heads/master
| 2020-06-20T18:21:26.031098 | 2019-06-12T16:59:49 | 2019-06-12T16:59:49 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 8,252 |
py
|
from datetime import datetime
from typing import NamedTuple, Callable
from kivy.app import App
from kivy.factory import Factory
from kivy.properties import ObjectProperty
from kivy.lang import Builder
from kivy.clock import Clock
from kivy.uix.label import Label
from kivy.uix.dropdown import DropDown
from kivy.uix.button import Button
from .question import Question
from electrum_axe.gui.kivy.i18n import _
from electrum_axe.util import InvalidPassword
from electrum_axe.address_synchronizer import TX_HEIGHT_LOCAL
Builder.load_string('''
<TxDialog>
id: popup
title: _('Transaction')
is_mine: True
can_sign: False
can_broadcast: False
fee_str: ''
date_str: ''
date_label:''
amount_str: ''
tx_hash: ''
status_str: ''
description: ''
outputs_str: ''
BoxLayout:
orientation: 'vertical'
ScrollView:
scroll_type: ['bars', 'content']
bar_width: '25dp'
GridLayout:
height: self.minimum_height
size_hint_y: None
cols: 1
spacing: '10dp'
padding: '10dp'
GridLayout:
height: self.minimum_height
size_hint_y: None
cols: 1
spacing: '10dp'
BoxLabel:
text: _('Status')
value: root.status_str
BoxLabel:
text: _('Description') if root.description else ''
value: root.description
BoxLabel:
text: root.date_label
value: root.date_str
BoxLabel:
text: _('Amount sent') if root.is_mine else _('Amount received')
value: root.amount_str
BoxLabel:
text: _('Transaction fee') if root.fee_str else ''
value: root.fee_str
TopLabel:
text: _('Transaction ID') + ':' if root.tx_hash else ''
TxHashLabel:
data: root.tx_hash
name: _('Transaction ID')
TopLabel:
text: _('Outputs') + ':'
OutputList:
id: output_list
Widget:
size_hint: 1, 0.1
BoxLayout:
size_hint: 1, None
height: '48dp'
Button:
id: action_button
size_hint: 0.5, None
height: '48dp'
text: ''
disabled: True
opacity: 0
on_release: root.on_action_button_clicked()
IconButton:
size_hint: 0.5, None
height: '48dp'
icon: 'atlas://electrum_axe/gui/kivy/theming/light/qrcode'
on_release: root.show_qr()
Button:
size_hint: 0.5, None
height: '48dp'
text: _('Close')
on_release: root.dismiss()
''')
class ActionButtonOption(NamedTuple):
text: str
func: Callable
enabled: bool
class TxDialog(Factory.Popup):
def __init__(self, app, tx):
Factory.Popup.__init__(self)
self.app = app
self.wallet = self.app.wallet
self.tx = tx
self._action_button_fn = lambda btn: None
def on_open(self):
self.update()
def update(self):
format_amount = self.app.format_amount_and_units
tx_details = self.wallet.get_tx_info(self.tx)
tx_mined_status = tx_details.tx_mined_status
exp_n = tx_details.mempool_depth_bytes
amount, fee = tx_details.amount, tx_details.fee
self.status_str = tx_details.status
self.description = tx_details.label
self.can_broadcast = tx_details.can_broadcast
self.tx_hash = tx_details.txid or ''
if tx_mined_status.timestamp:
self.date_label = _('Date')
self.date_str = datetime.fromtimestamp(tx_mined_status.timestamp).isoformat(' ')[:-3]
elif exp_n:
self.date_label = _('Mempool depth')
self.date_str = _('{} from tip').format('%.2f MB'%(exp_n/1000000))
else:
self.date_label = ''
self.date_str = ''
if amount is None:
self.amount_str = _("Transaction unrelated to your wallet")
elif amount > 0:
self.is_mine = False
self.amount_str = format_amount(amount)
else:
self.is_mine = True
self.amount_str = format_amount(-amount)
self.fee_str = format_amount(fee) if fee is not None else _('unknown')
self.can_sign = self.wallet.can_sign(self.tx)
self.ids.output_list.update(self.tx.get_outputs_for_UI())
self.is_local_tx = tx_mined_status.height == TX_HEIGHT_LOCAL
self.update_action_button()
def update_action_button(self):
action_button = self.ids.action_button
options = (
ActionButtonOption(text=_('Sign'), func=lambda btn: self.do_sign(), enabled=self.can_sign),
ActionButtonOption(text=_('Broadcast'), func=lambda btn: self.do_broadcast(), enabled=self.can_broadcast),
ActionButtonOption(text=_('Remove'), func=lambda btn: self.remove_local_tx(), enabled=self.is_local_tx),
)
num_options = sum(map(lambda o: bool(o.enabled), options))
# if no options available, hide button
if num_options == 0:
action_button.disabled = True
action_button.opacity = 0
return
action_button.disabled = False
action_button.opacity = 1
if num_options == 1:
# only one option, button will correspond to that
for option in options:
if option.enabled:
action_button.text = option.text
self._action_button_fn = option.func
else:
# multiple options. button opens dropdown which has one sub-button for each
dropdown = DropDown()
action_button.text = _('Options')
self._action_button_fn = dropdown.open
for option in options:
if option.enabled:
btn = Button(text=option.text, size_hint_y=None, height=48)
btn.bind(on_release=option.func)
dropdown.add_widget(btn)
def on_action_button_clicked(self):
action_button = self.ids.action_button
self._action_button_fn(action_button)
def do_sign(self):
self.app.protected(_("Enter your PIN code in order to sign this transaction"), self._do_sign, ())
def _do_sign(self, password):
self.status_str = _('Signing') + '...'
Clock.schedule_once(lambda dt: self.__do_sign(password), 0.1)
def __do_sign(self, password):
try:
self.app.wallet.sign_transaction(self.tx, password)
except InvalidPassword:
self.app.show_error(_("Invalid PIN"))
self.update()
def do_broadcast(self):
self.app.broadcast(self.tx)
def show_qr(self):
from electrum_axe.bitcoin import base_encode, bfh
raw_tx = str(self.tx)
text = bfh(raw_tx)
text = base_encode(text, base=43)
self.app.qr_dialog(_("Raw Transaction"), text, text_for_clipboard=raw_tx)
def remove_local_tx(self):
txid = self.tx.txid()
to_delete = {txid}
to_delete |= self.wallet.get_depending_transactions(txid)
question = _("Are you sure you want to remove this transaction?")
if len(to_delete) > 1:
question = (_("Are you sure you want to remove this transaction and {} child transactions?")
.format(len(to_delete) - 1))
def on_prompt(b):
if b:
for tx in to_delete:
self.wallet.remove_transaction(tx)
self.wallet.storage.write()
self.app._trigger_update_wallet() # FIXME private...
self.dismiss()
d = Question(question, on_prompt)
d.open()
|
[
"[email protected]"
] | |
e6c280ba64865152ee2282f92423a4eece3ec74a
|
7f313668d00abcbf529858303635c7bf58964487
|
/dashboard/dashboard/pinpoint/handlers/new.py
|
48d096fbc3f8a6b7b307a66089dd92e04a893569
|
[
"BSD-3-Clause"
] |
permissive
|
flychen50/catapult
|
70d4f16c313829f312ee76ed4c0d133885406282
|
52e67e75f6c2d0315d510a54a52ed29abe3d57f7
|
refs/heads/master
| 2022-03-07T00:54:11.401996 | 2021-05-12T17:31:28 | 2021-05-12T18:35:11 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 15,959 |
py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import json
import logging
import shlex
from dashboard.api import api_request_handler
from dashboard.common import bot_configurations
from dashboard.common import utils
from dashboard.pinpoint.models import change
from dashboard.pinpoint.models import job as job_module
from dashboard.pinpoint.models import job_state
from dashboard.pinpoint.models import quest as quest_module
from dashboard.pinpoint.models import scheduler
from dashboard.pinpoint.models import task as task_module
from dashboard.pinpoint.models.tasks import performance_bisection
from dashboard.pinpoint.models.tasks import read_value
_ERROR_BUG_ID = 'Bug ID must be an integer.'
_ERROR_TAGS_DICT = 'Tags must be a dict of key/value string pairs.'
_ERROR_UNSUPPORTED = 'This benchmark (%s) is unsupported.'
_ERROR_PRIORITY = 'Priority must be an integer.'
class New(api_request_handler.ApiRequestHandler):
"""Handler that cooks up a fresh Pinpoint job."""
def _CheckUser(self):
self._CheckIsLoggedIn()
if not utils.IsTryjobUser():
raise api_request_handler.ForbiddenError()
def Post(self):
# TODO(dberris): Validate the inputs based on the type of job requested.
job = _CreateJob(self.request)
# We apply the cost-based scheduling at job creation time, so that we can
# roll out the feature as jobs come along.
scheduler.Schedule(job, scheduler.Cost(job))
job.PostCreationUpdate()
return {
'jobId': job.job_id,
'jobUrl': job.url,
}
def _CreateJob(request):
"""Creates a new Pinpoint job from WebOb request arguments."""
original_arguments = request.params.mixed()
logging.debug('Received Params: %s', original_arguments)
# This call will fail if some of the required arguments are not in the
# original request.
_ValidateRequiredParams(original_arguments)
arguments = _ArgumentsWithConfiguration(original_arguments)
logging.debug('Updated Params: %s', arguments)
# Validate arguments and convert them to canonical internal representation.
quests = _GenerateQuests(arguments)
# Validate the priority, if it's present.
priority = _ValidatePriority(arguments.get('priority'))
# Validate and find the associated issue.
bug_id, project = _ValidateBugId(
arguments.get('bug_id'), arguments.get('project', 'chromium'))
comparison_mode = _ValidateComparisonMode(arguments.get('comparison_mode'))
comparison_magnitude = _ValidateComparisonMagnitude(
arguments.get('comparison_magnitude'))
gerrit_server, gerrit_change_id = _ValidatePatch(
arguments.get('patch', arguments.get('experiment_patch')))
name = arguments.get('name')
pin = _ValidatePin(arguments.get('pin'))
tags = _ValidateTags(arguments.get('tags'))
user = _ValidateUser(arguments.get('user'))
changes = _ValidateChanges(comparison_mode, arguments)
# If this is a try job, we assume it's higher priority than bisections, so
# we'll set it at a negative priority.
if priority not in arguments and comparison_mode == job_state.TRY:
priority = -1
# TODO(dberris): Make this the default when we've graduated the beta.
use_execution_engine = (
arguments.get('experimental_execution_engine')
and arguments.get('comparison_mode') == job_state.PERFORMANCE)
# Ensure that we have the required fields in tryjob requests.
if comparison_mode == 'try':
if 'benchmark' not in arguments:
raise ValueError('Missing required "benchmark" argument.')
# First we check whether there's a quest that's of type 'RunTelemetryTest'.
is_telemetry_test = any(
[isinstance(q, quest_module.RunTelemetryTest) for q in quests])
if is_telemetry_test and ('story' not in arguments
and 'story_tags' not in arguments):
raise ValueError(
'Missing either "story" or "story_tags" as arguments for try jobs.')
# Create job.
job = job_module.Job.New(
quests if not use_execution_engine else (),
changes,
arguments=original_arguments,
bug_id=bug_id,
comparison_mode=comparison_mode,
comparison_magnitude=comparison_magnitude,
gerrit_server=gerrit_server,
gerrit_change_id=gerrit_change_id,
name=name,
pin=pin,
tags=tags,
user=user,
priority=priority,
use_execution_engine=use_execution_engine,
project=project)
if use_execution_engine:
# TODO(dberris): We need to figure out a way to get the arguments to be more
# structured when it comes in from the UI, so that we don't need to do the
# manual translation of options here.
# TODO(dberris): Decide whether we can make some of these hard-coded options
# be part of a template that's available in the UI (or by configuration
# somewhere else, maybe luci-config?)
start_change, end_change = changes
target = arguments.get('target')
task_options = performance_bisection.TaskOptions(
build_option_template=performance_bisection.BuildOptionTemplate(
builder=arguments.get('builder'),
target=target,
bucket=arguments.get('bucket', 'master.tryserver.chromium.perf'),
),
test_option_template=performance_bisection.TestOptionTemplate(
swarming_server=arguments.get('swarming_server'),
dimensions=arguments.get('dimensions'),
extra_args=arguments.get('extra_test_args'),
),
read_option_template=performance_bisection.ReadOptionTemplate(
benchmark=arguments.get('benchmark'),
histogram_options=read_value.HistogramOptions(
grouping_label=arguments.get('grouping_label'),
story=arguments.get('story'),
statistic=arguments.get('statistic'),
histogram_name=arguments.get('chart'),
),
graph_json_options=read_value.GraphJsonOptions(
chart=arguments.get('chart'), trace=arguments.get('trace')),
mode=('histogram_sets'
if target in performance_bisection.EXPERIMENTAL_TARGET_SUPPORT
else 'graph_json')),
analysis_options=performance_bisection.AnalysisOptions(
comparison_magnitude=arguments.get('comparison_magnitude'),
min_attempts=10,
max_attempts=60,
),
start_change=start_change,
end_change=end_change,
pinned_change=arguments.get('patch'),
)
task_module.PopulateTaskGraph(
job, performance_bisection.CreateGraph(task_options, arguments))
return job
def _ArgumentsWithConfiguration(original_arguments):
# "configuration" is a special argument that maps to a list of preset
# arguments. Pull any arguments from the specified "configuration", if any.
new_arguments = original_arguments.copy()
configuration = original_arguments.get('configuration')
if configuration:
try:
default_arguments = bot_configurations.Get(configuration)
except ValueError:
# Reraise with a clearer message.
raise ValueError("Bot Config: %s doesn't exist." % configuration)
logging.info('Bot Config: %s', default_arguments)
if default_arguments:
for k, v in list(default_arguments.items()):
# We special-case the extra_test_args argument to be additive, so that
# we can respect the value set in bot_configurations in addition to
# those provided from the UI.
if k == 'extra_test_args':
# First, parse whatever is already there. We'll canonicalise the
# inputs as a JSON list of strings.
provided_args = new_arguments.get('extra_test_args', '')
extra_test_args = []
if provided_args:
try:
extra_test_args = json.loads(provided_args)
except ValueError:
extra_test_args = shlex.split(provided_args)
try:
configured_args = json.loads(v)
except ValueError:
configured_args = shlex.split(v)
new_arguments['extra_test_args'] = json.dumps(extra_test_args +
configured_args)
else:
new_arguments.setdefault(k, v)
return new_arguments
def _ValidateBugId(bug_id, project):
if not bug_id:
return None, None
try:
# TODO(dberris): Figure out a way to check the issue tracker if the project
# is valid at creation time. That might involve a user credential check, so
# we might need to update the scopes we're asking for. For now trust that
# the inputs are valid.
return int(bug_id), project
except ValueError:
raise ValueError(_ERROR_BUG_ID)
def _ValidatePriority(priority):
if not priority:
return None
try:
return int(priority)
except ValueError:
raise ValueError(_ERROR_PRIORITY)
def _ValidateChangesForTry(arguments):
if 'base_git_hash' not in arguments:
raise ValueError('base_git_hash is required for try jobs')
commit_1 = change.Commit.FromDict({
'repository': arguments.get('repository'),
'git_hash': arguments.get('base_git_hash'),
})
commit_2 = change.Commit.FromDict({
'repository':
arguments.get('repository'),
'git_hash':
arguments.get(
'end_git_hash',
arguments.get(
'experiment_git_hash',
arguments.get('base_git_hash'),
),
),
})
# Now, if we have a patch argument, we need to handle the case where a patch
# needs to be applied to both the 'end_git_hash' and the 'base_git_hash'.
patch = arguments.get('patch')
if patch:
patch = change.GerritPatch.FromUrl(patch)
exp_patch = arguments.get('experiment_patch')
if exp_patch:
exp_patch = change.GerritPatch.FromUrl(exp_patch)
base_patch = arguments.get('base_patch')
if base_patch:
base_patch = change.GerritPatch.FromUrl(base_patch)
if commit_1.git_hash != commit_2.git_hash and patch:
base_patch = patch
exp_patch = patch
if not exp_patch:
exp_patch = patch
change_1 = change.Change(commits=(commit_1,), patch=base_patch)
change_2 = change.Change(commits=(commit_2,), patch=exp_patch)
return change_1, change_2
def _ValidateChanges(comparison_mode, arguments):
changes = arguments.get('changes')
if changes:
# FromData() performs input validation.
return [change.Change.FromData(c) for c in json.loads(changes)]
# There are valid cases where a tryjob requests a base_git_hash and an
# end_git_hash without a patch. Let's check first whether we're finding the
# right combination of inputs here.
if comparison_mode == job_state.TRY:
return _ValidateChangesForTry(arguments)
# Everything else that follows only applies to bisections.
assert (comparison_mode == job_state.FUNCTIONAL
or comparison_mode == job_state.PERFORMANCE)
if 'start_git_hash' not in arguments or 'end_git_hash' not in arguments:
raise ValueError(
'bisections require both a start_git_hash and an end_git_hash')
commit_1 = change.Commit.FromDict({
'repository': arguments.get('repository'),
'git_hash': arguments.get('start_git_hash'),
})
commit_2 = change.Commit.FromDict({
'repository': arguments.get('repository'),
'git_hash': arguments.get('end_git_hash'),
})
if 'patch' in arguments:
patch = change.GerritPatch.FromUrl(arguments['patch'])
else:
patch = None
# If we find a patch in the request, this means we want to apply it even to
# the start commit.
change_1 = change.Change(commits=(commit_1,), patch=patch)
change_2 = change.Change(commits=(commit_2,), patch=patch)
return change_1, change_2
def _ValidatePatch(patch_data):
if patch_data:
patch_details = change.GerritPatch.FromData(patch_data)
return patch_details.server, patch_details.change
return None, None
def _ValidateComparisonMode(comparison_mode):
if not comparison_mode:
comparison_mode = job_state.TRY
if comparison_mode and comparison_mode not in job_module.COMPARISON_MODES:
raise ValueError('`comparison_mode` should be one of %s. Got "%s".' %
(job_module.COMPARISON_MODES + (None,), comparison_mode))
return comparison_mode
def _ValidateComparisonMagnitude(comparison_magnitude):
if not comparison_magnitude:
return 1.0
return float(comparison_magnitude)
def _GenerateQuests(arguments):
"""Generate a list of Quests from a dict of arguments.
GenerateQuests uses the arguments to infer what types of Quests the user wants
to run, and creates a list of Quests with the given configuration.
Arguments:
arguments: A dict or MultiDict containing arguments.
Returns:
A tuple of (arguments, quests), where arguments is a dict containing the
request arguments that were used, and quests is a list of Quests.
"""
quests = arguments.get('quests')
if quests:
if isinstance(quests, basestring):
quests = quests.split(',')
quest_classes = []
for quest in quests:
if not hasattr(quest_module, quest):
raise ValueError('Unknown quest: "%s"' % quest)
quest_classes.append(getattr(quest_module, quest))
else:
target = arguments.get('target')
logging.debug('Target: %s', target)
if target in ('performance_test_suite', 'performance_webview_test_suite',
'telemetry_perf_tests', 'telemetry_perf_webview_tests'):
quest_classes = (quest_module.FindIsolate, quest_module.RunTelemetryTest,
quest_module.ReadValue)
elif 'performance_test_suite_eve' in target:
quest_classes = (quest_module.FindIsolate,
quest_module.RunLacrosTelemetryTest,
quest_module.ReadValue)
elif target == 'vr_perf_tests':
quest_classes = (quest_module.FindIsolate,
quest_module.RunVrTelemetryTest, quest_module.ReadValue)
elif 'browser_test' in target:
quest_classes = (quest_module.FindIsolate, quest_module.RunBrowserTest,
quest_module.ReadValue)
elif 'instrumentation_test' in target:
quest_classes = (quest_module.FindIsolate,
quest_module.RunInstrumentationTest,
quest_module.ReadValue)
elif 'webrtc_perf_tests' in target:
quest_classes = (quest_module.FindIsolate, quest_module.RunWebRtcTest,
quest_module.ReadValue)
else:
quest_classes = (quest_module.FindIsolate, quest_module.RunGTest,
quest_module.ReadValue)
quest_instances = []
for quest_class in quest_classes:
# FromDict() performs input validation.
quest_instances.append(quest_class.FromDict(arguments))
return quest_instances
def _ValidatePin(pin):
if not pin:
return None
return change.Change.FromData(pin)
def _ValidateTags(tags):
if not tags:
return {}
tags_dict = json.loads(tags)
if not isinstance(tags_dict, dict):
raise ValueError(_ERROR_TAGS_DICT)
for k, v in tags_dict.items():
if not isinstance(k, basestring) or not isinstance(v, basestring):
raise ValueError(_ERROR_TAGS_DICT)
return tags_dict
def _ValidateUser(user):
return user or utils.GetEmail()
_REQUIRED_NON_EMPTY_PARAMS = {'target', 'benchmark'}
def _ValidateRequiredParams(params):
missing = _REQUIRED_NON_EMPTY_PARAMS - set(params.keys())
if missing:
raise ValueError('Missing required parameters: %s' % (list(missing)))
# Check that they're not empty.
empty_keys = [key for key in _REQUIRED_NON_EMPTY_PARAMS if not params[key]]
if empty_keys:
raise ValueError('Parameters must not be empty: %s' % (empty_keys))
|
[
"[email protected]"
] | |
7b91548e9141af53338a31e7040127830dbfe2eb
|
b5ac4c1f7906bf6722ffab8b04e1aacde632b9d5
|
/server/server/view.py
|
174ca7df8d1faefbf616028cb5dc2c3f9c21d7df
|
[] |
no_license
|
mportela/chrome_plugin
|
e661d4fbb26685683c067ffd0a6441d88f04766d
|
2e84ba8c86e786b86d6ba8572ee28f9f3bb54f15
|
refs/heads/master
| 2021-01-10T06:10:57.730223 | 2016-03-15T16:40:53 | 2016-03-15T16:40:53 | 53,960,830 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 526 |
py
|
import json
from django.http import HttpResponse
from server import models
def display(request):
return HttpResponse(models.UrlReached.objects.all().values('url','date_time'), content_type='application/json')
def process(request):
res_dict = {"true": 1}
name = str(request.GET.get('query')) # not used in this demo
current_url = str(request.GET.get('url'))
new_url = models.UrlReached(url=current_url)
new_url.save()
return HttpResponse(json.dumps(res_dict), content_type='application/json')
|
[
"[email protected]"
] | |
b5b3bdb6ee9ad2097b89fd0245c51af814711a6d
|
3c4198d76240852d4abcf9b7c940927e217635b3
|
/conanizer/template/test_package/conanfile.py
|
df0cf5cc551fc76ea950b35bd5579c4189adda39
|
[
"MIT"
] |
permissive
|
lasote/vcpkg
|
a3ba4b07936df4395709e4566859bea57cb4a754
|
a92528a9ddf626ac640e80762e3b6dc00001812e
|
refs/heads/master
| 2021-01-19T04:49:13.330532 | 2017-03-13T09:14:06 | 2017-03-13T09:14:06 | 69,260,019 | 3 | 2 | null | 2017-03-13T09:08:15 | 2016-09-26T14:43:08 |
C++
|
UTF-8
|
Python
| false | false | 1,459 |
py
|
from conans import ConanFile, CMake
import os
channel = os.getenv("CONAN_CHANNEL", "vcpkg")
username = os.getenv("CONAN_USERNAME", "lasote")
class VcpkgwrapperTestConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
requires = "**NAME**/**VERSION**@%s/%s" % (username, channel)
generators = "cmake"
@property
def port(self):
return "**NAME**"
@property
def port_example(self):
possibles = [os.path.join("port_examples", "%s.cpp" % self.port),
os.path.join("port_examples", "%s.c" % self.port),]
for filename in possibles:
if os.path.exists(os.path.join(self.conanfile_directory, filename)):
return filename.replace("\\", "\\\\")
return None
def build(self):
cmake = CMake(self.settings)
if self.port_example:
self.run('cmake "%s" %s -DTEST_FILENAME=%s' % (self.conanfile_directory, cmake.command_line, self.port_example))
self.run("cmake --build . %s" % cmake.build_config)
else:
self.output.warn("NOT TEST PROGRAM PREPARED FOR PORT %s, please collaborate with some example in https://github.com/lasote/vcpkg" % self.port)
def imports(self):
self.copy("*.dll", "bin", "bin")
self.copy("*.dylib", "bin", "bin")
def test(self):
if self.port_example:
os.chdir("bin")
self.run(".%stest_exe.exe" % os.sep)
|
[
"[email protected]"
] | |
d9303ea004b2f1d96a416f13066197be0e531418
|
4b0c57dddf8bd98c021e0967b5d94563d15372e1
|
/run_MatrixElement/test/emptyPSets/emptyPSet_STopT_T_JESDown_cfg.py
|
042a3a7c8fdb4c02eb45821cf68f515dc5dea706
|
[] |
no_license
|
aperloff/TAMUWW
|
fea6ed0066f3f2cef4d44c525ee843c6234460ba
|
c18e4b7822076bf74ee919509a6bd1f3cf780e11
|
refs/heads/master
| 2021-01-21T14:12:34.813887 | 2018-07-23T04:59:40 | 2018-07-23T04:59:40 | 10,922,954 | 0 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 914 |
py
|
import FWCore.ParameterSet.Config as cms
import os
#!
#! PROCESS
#!
process = cms.Process("MatrixElementProcess")
#!
#! SERVICES
#!
#process.load('Configuration.StandardSequences.Services_cff')
process.load('FWCore.MessageLogger.MessageLogger_cfi')
process.MessageLogger.cerr.FwkReport.reportEvery = 5000
process.load('CommonTools.UtilAlgos.TFileService_cfi')
process.TFileService.fileName=cms.string('STopT_T_JESDown.root')
#!
#! INPUT
#!
inputFiles = cms.untracked.vstring(
'root://cmsxrootd.fnal.gov//store/user/aperloff/MatrixElement/Summer12ME8TeV/MEInput/STopT_T_JESDown.root'
)
process.maxEvents = cms.untracked.PSet(input = cms.untracked.int32(10))
process.source = cms.Source("PoolSource",
skipEvents = cms.untracked.uint32(0),
fileNames = inputFiles )
process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
|
[
"[email protected]"
] | |
3b9d08febc3ef5b0a1a002a405d4fddb1b6182df
|
5a37472eae214d70dbe90b9dc61d03d01b8ccead
|
/accounts/models.py
|
9e4d1366d3e2d05f7d54cc80f88b60c1a2695f51
|
[] |
no_license
|
Anuragjain20/ChatApp
|
5e65d3264f27fe7bef5fdf8a9c5517220653d767
|
4db975c522a3f410a4e918e96087144e8abe7c06
|
refs/heads/main
| 2023-08-24T19:23:47.617916 | 2021-10-28T18:55:54 | 2021-10-28T18:55:54 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 272 |
py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Profile(User):
name = models.CharField(max_length=255)
is_verified = models.BooleanField(default=False)
def __str__(self):
return self.name
|
[
"[email protected]"
] | |
dffdc0c909630a0d178787445594c46956598d25
|
dcadfaaf6d5aca5a52b422df68a7ddef67b37ec1
|
/pay-api/migrations/versions/7e51d3ce4005_statement_settings.py
|
5f5f0dc12d331e69986fef93636af1629b78e6f6
|
[
"Apache-2.0"
] |
permissive
|
pwei1018/sbc-pay
|
ec161e2d2574272b52a7cad38a43cf68e105f855
|
137b64ab57316f0452c760488301e33be6e9bbe0
|
refs/heads/development
| 2022-06-08T17:36:20.226648 | 2021-04-07T17:00:42 | 2021-04-07T17:01:07 | 168,407,310 | 0 | 4 |
Apache-2.0
| 2019-06-12T19:06:58 | 2019-01-30T20:05:36 |
Python
|
UTF-8
|
Python
| false | false | 4,488 |
py
|
"""statement_settings
Revision ID: 7e51d3ce4005
Revises: 567df104d26c
Create Date: 2020-08-18 10:04:08.532357
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '7e51d3ce4005'
down_revision = '567df104d26c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('statement_settings',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('frequency', sa.String(length=50), nullable=True),
sa.Column('payment_account_id', sa.Integer(), nullable=True),
sa.Column('from_date', sa.Date(), nullable=False),
sa.Column('to_date', sa.Date(), nullable=False),
sa.ForeignKeyConstraint(['payment_account_id'], ['payment_account.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_statement_settings_frequency'), 'statement_settings', ['frequency'], unique=False)
op.create_index(op.f('ix_statement_settings_payment_account_id'), 'statement_settings', ['payment_account_id'],
unique=False)
op.add_column('statement', sa.Column('created_on', sa.Date(), nullable=False))
op.add_column('statement', sa.Column('statement_settings_id', sa.Integer(), nullable=True))
op.alter_column('statement', 'to_date',
existing_type=sa.DATE(),
nullable=False)
op.create_index(op.f('ix_statement_statement_settings_id'), 'statement', ['statement_settings_id'], unique=False)
op.drop_index('ix_statement_payment_account_id', table_name='statement')
op.drop_index('ix_statement_status', table_name='statement')
op.drop_constraint('statement_payment_account_id_fkey', 'statement', type_='foreignkey')
op.create_foreign_key(None, 'statement', 'statement_settings', ['statement_settings_id'], ['id'])
op.drop_column('statement', 'status')
op.drop_column('statement', 'payment_account_id')
op.add_column('statement_invoices', sa.Column('invoice_id', sa.Integer(), nullable=False))
op.drop_index('ix_statement_invoices_status', table_name='statement_invoices')
op.drop_constraint('statement_invoices_inovice_id_fkey', 'statement_invoices', type_='foreignkey')
op.create_foreign_key(None, 'statement_invoices', 'invoice', ['invoice_id'], ['id'])
op.drop_column('statement_invoices', 'status')
op.drop_column('statement_invoices', 'inovice_id')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('statement_invoices', sa.Column('inovice_id', sa.INTEGER(), autoincrement=False, nullable=False))
op.add_column('statement_invoices', sa.Column('status', sa.VARCHAR(length=50), autoincrement=False, nullable=True))
op.drop_constraint(None, 'statement_invoices', type_='foreignkey')
op.create_foreign_key('statement_invoices_inovice_id_fkey', 'statement_invoices', 'invoice', ['inovice_id'], ['id'])
op.create_index('ix_statement_invoices_status', 'statement_invoices', ['status'], unique=False)
op.drop_column('statement_invoices', 'invoice_id')
op.add_column('statement', sa.Column('payment_account_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.add_column('statement', sa.Column('status', sa.VARCHAR(length=50), autoincrement=False, nullable=True))
op.drop_constraint(None, 'statement', type_='foreignkey')
op.create_foreign_key('statement_payment_account_id_fkey', 'statement', 'payment_account', ['payment_account_id'],
['id'])
op.create_index('ix_statement_status', 'statement', ['status'], unique=False)
op.create_index('ix_statement_payment_account_id', 'statement', ['payment_account_id'], unique=False)
op.drop_index(op.f('ix_statement_statement_settings_id'), table_name='statement')
op.alter_column('statement', 'to_date',
existing_type=sa.DATE(),
nullable=True)
op.drop_column('statement', 'statement_settings_id')
op.drop_column('statement', 'created_on')
op.drop_index(op.f('ix_statement_settings_payment_account_id'), table_name='statement_settings')
op.drop_index(op.f('ix_statement_settings_frequency'), table_name='statement_settings')
op.drop_table('statement_settings')
# ### end Alembic commands ###
|
[
"[email protected]"
] | |
d0fd835e3d61a7db44b259b0c1d9a05bc4796396
|
a5a99f646e371b45974a6fb6ccc06b0a674818f2
|
/DPGAnalysis/SiStripTools/python/apvcyclephasemonitor_cfi.py
|
9814e0f8f7209992f5a68a743eef2f4a9d42ad1f
|
[
"Apache-2.0"
] |
permissive
|
cms-sw/cmssw
|
4ecd2c1105d59c66d385551230542c6615b9ab58
|
19c178740257eb48367778593da55dcad08b7a4f
|
refs/heads/master
| 2023-08-23T21:57:42.491143 | 2023-08-22T20:22:40 | 2023-08-22T20:22:40 | 10,969,551 | 1,006 | 3,696 |
Apache-2.0
| 2023-09-14T19:14:28 | 2013-06-26T14:09:07 |
C++
|
UTF-8
|
Python
| false | false | 198 |
py
|
import FWCore.ParameterSet.Config as cms
apvcyclephasemonitor = cms.EDAnalyzer('APVCyclePhaseMonitor',
apvCyclePhaseCollection = cms.InputTag("APVPhases"),
)
|
[
"[email protected]"
] | |
dc11481dcd1127650ac017a993ccbc85dc68dfff
|
1d23c51bd24fc168df14fa10b30180bd928d1ea4
|
/Lib/site-packages/twisted/conch/test/test_manhole.py
|
b4247f0f7dceafecdd11fad67215eabd6c14773d
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
BeaverInc/covid19CityMontreal
|
62dac14840dadcdf20985663bc2527c90bab926c
|
1b283589f6885977a179effce20212a9311a2ac0
|
refs/heads/master
| 2021-05-22T20:01:22.443897 | 2020-06-21T08:00:57 | 2020-06-21T08:00:57 | 253,067,914 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 13,436 |
py
|
# -*- test-case-name: twisted.conch.test.test_manhole -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
# pylint: disable=I0011,W9401,W9402
"""
Tests for L{twisted.conch.manhole}.
"""
import traceback
from twisted.trial import unittest
from twisted.internet import error, defer
from twisted.test.proto_helpers import StringTransport
from twisted.conch.test.test_recvline import (
_TelnetMixin, _SSHMixin, _StdioMixin, stdio, ssh)
from twisted.conch import manhole
from twisted.conch.insults import insults
def determineDefaultFunctionName():
"""
Return the string used by Python as the name for code objects which are
compiled from interactive input or at the top-level of modules.
"""
try:
1 // 0
except:
# The last frame is this function. The second to last frame is this
# function's caller, which is module-scope, which is what we want,
# so -2.
return traceback.extract_stack()[-2][2]
defaultFunctionName = determineDefaultFunctionName()
class ManholeInterpreterTests(unittest.TestCase):
"""
Tests for L{manhole.ManholeInterpreter}.
"""
def test_resetBuffer(self):
"""
L{ManholeInterpreter.resetBuffer} should empty the input buffer.
"""
interpreter = manhole.ManholeInterpreter(None)
interpreter.buffer.extend(["1", "2"])
interpreter.resetBuffer()
self.assertFalse(interpreter.buffer)
class ManholeProtocolTests(unittest.TestCase):
"""
Tests for L{manhole.Manhole}.
"""
def test_interruptResetsInterpreterBuffer(self):
"""
L{manhole.Manhole.handle_INT} should cause the interpreter input buffer
to be reset.
"""
transport = StringTransport()
terminal = insults.ServerProtocol(manhole.Manhole)
terminal.makeConnection(transport)
protocol = terminal.terminalProtocol
interpreter = protocol.interpreter
interpreter.buffer.extend(["1", "2"])
protocol.handle_INT()
self.assertFalse(interpreter.buffer)
class WriterTests(unittest.TestCase):
def test_Integer(self):
"""
Colorize an integer.
"""
manhole.lastColorizedLine("1")
def test_DoubleQuoteString(self):
"""
Colorize an integer in double quotes.
"""
manhole.lastColorizedLine('"1"')
def test_SingleQuoteString(self):
"""
Colorize an integer in single quotes.
"""
manhole.lastColorizedLine("'1'")
def test_TripleSingleQuotedString(self):
"""
Colorize an integer in triple quotes.
"""
manhole.lastColorizedLine("'''1'''")
def test_TripleDoubleQuotedString(self):
"""
Colorize an integer in triple and double quotes.
"""
manhole.lastColorizedLine('"""1"""')
def test_FunctionDefinition(self):
"""
Colorize a function definition.
"""
manhole.lastColorizedLine("def foo():")
def test_ClassDefinition(self):
"""
Colorize a class definition.
"""
manhole.lastColorizedLine("class foo:")
def test_unicode(self):
"""
Colorize a Unicode string.
"""
res = manhole.lastColorizedLine(u"\u0438")
self.assertTrue(isinstance(res, bytes))
def test_bytes(self):
"""
Colorize a UTF-8 byte string.
"""
res = manhole.lastColorizedLine(b"\xd0\xb8")
self.assertTrue(isinstance(res, bytes))
def test_identicalOutput(self):
"""
The output of UTF-8 bytestrings and Unicode strings are identical.
"""
self.assertEqual(manhole.lastColorizedLine(b"\xd0\xb8"),
manhole.lastColorizedLine(u"\u0438"))
class ManholeLoopbackMixin:
serverProtocol = manhole.ColoredManhole
def wfd(self, d):
return defer.waitForDeferred(d)
def test_SimpleExpression(self):
"""
Evaluate simple expression.
"""
done = self.recvlineClient.expect(b"done")
self._testwrite(
b"1 + 1\n"
b"done")
def finished(ign):
self._assertBuffer(
[b">>> 1 + 1",
b"2",
b">>> done"])
return done.addCallback(finished)
def test_TripleQuoteLineContinuation(self):
"""
Evaluate line continuation in triple quotes.
"""
done = self.recvlineClient.expect(b"done")
self._testwrite(
b"'''\n'''\n"
b"done")
def finished(ign):
self._assertBuffer(
[b">>> '''",
b"... '''",
b"'\\n'",
b">>> done"])
return done.addCallback(finished)
def test_FunctionDefinition(self):
"""
Evaluate function definition.
"""
done = self.recvlineClient.expect(b"done")
self._testwrite(
b"def foo(bar):\n"
b"\tprint(bar)\n\n"
b"foo(42)\n"
b"done")
def finished(ign):
self._assertBuffer(
[b">>> def foo(bar):",
b"... print(bar)",
b"... ",
b">>> foo(42)",
b"42",
b">>> done"])
return done.addCallback(finished)
def test_ClassDefinition(self):
"""
Evaluate class definition.
"""
done = self.recvlineClient.expect(b"done")
self._testwrite(
b"class Foo:\n"
b"\tdef bar(self):\n"
b"\t\tprint('Hello, world!')\n\n"
b"Foo().bar()\n"
b"done")
def finished(ign):
self._assertBuffer(
[b">>> class Foo:",
b"... def bar(self):",
b"... print('Hello, world!')",
b"... ",
b">>> Foo().bar()",
b"Hello, world!",
b">>> done"])
return done.addCallback(finished)
def test_Exception(self):
"""
Evaluate raising an exception.
"""
done = self.recvlineClient.expect(b"done")
self._testwrite(
b"raise Exception('foo bar baz')\n"
b"done")
def finished(ign):
self._assertBuffer(
[b">>> raise Exception('foo bar baz')",
b"Traceback (most recent call last):",
b' File "<console>", line 1, in ' +
defaultFunctionName.encode("utf-8"),
b"Exception: foo bar baz",
b">>> done"])
return done.addCallback(finished)
def test_ControlC(self):
"""
Evaluate interrupting with CTRL-C.
"""
done = self.recvlineClient.expect(b"done")
self._testwrite(
b"cancelled line" + manhole.CTRL_C +
b"done")
def finished(ign):
self._assertBuffer(
[b">>> cancelled line",
b"KeyboardInterrupt",
b">>> done"])
return done.addCallback(finished)
def test_interruptDuringContinuation(self):
"""
Sending ^C to Manhole while in a state where more input is required to
complete a statement should discard the entire ongoing statement and
reset the input prompt to the non-continuation prompt.
"""
continuing = self.recvlineClient.expect(b"things")
self._testwrite(b"(\nthings")
def gotContinuation(ignored):
self._assertBuffer(
[b">>> (",
b"... things"])
interrupted = self.recvlineClient.expect(b">>> ")
self._testwrite(manhole.CTRL_C)
return interrupted
continuing.addCallback(gotContinuation)
def gotInterruption(ignored):
self._assertBuffer(
[b">>> (",
b"... things",
b"KeyboardInterrupt",
b">>> "])
continuing.addCallback(gotInterruption)
return continuing
def test_ControlBackslash(self):
"""
Evaluate cancelling with CTRL-\.
"""
self._testwrite(b"cancelled line")
partialLine = self.recvlineClient.expect(b"cancelled line")
def gotPartialLine(ign):
self._assertBuffer(
[b">>> cancelled line"])
self._testwrite(manhole.CTRL_BACKSLASH)
d = self.recvlineClient.onDisconnection
return self.assertFailure(d, error.ConnectionDone)
def gotClearedLine(ign):
self._assertBuffer(
[b""])
return partialLine.addCallback(gotPartialLine).addCallback(
gotClearedLine)
@defer.inlineCallbacks
def test_controlD(self):
"""
A CTRL+D in the middle of a line doesn't close a connection,
but at the beginning of a line it does.
"""
self._testwrite(b"1 + 1")
yield self.recvlineClient.expect(br"\+ 1")
self._assertBuffer([b">>> 1 + 1"])
self._testwrite(manhole.CTRL_D + b" + 1")
yield self.recvlineClient.expect(br"\+ 1")
self._assertBuffer([b">>> 1 + 1 + 1"])
self._testwrite(b"\n")
yield self.recvlineClient.expect(b"3\n>>> ")
self._testwrite(manhole.CTRL_D)
d = self.recvlineClient.onDisconnection
yield self.assertFailure(d, error.ConnectionDone)
@defer.inlineCallbacks
def test_ControlL(self):
"""
CTRL+L is generally used as a redraw-screen command in terminal
applications. Manhole doesn't currently respect this usage of it,
but it should at least do something reasonable in response to this
event (rather than, say, eating your face).
"""
# Start off with a newline so that when we clear the display we can
# tell by looking for the missing first empty prompt line.
self._testwrite(b"\n1 + 1")
yield self.recvlineClient.expect(br"\+ 1")
self._assertBuffer([b">>> ", b">>> 1 + 1"])
self._testwrite(manhole.CTRL_L + b" + 1")
yield self.recvlineClient.expect(br"1 \+ 1 \+ 1")
self._assertBuffer([b">>> 1 + 1 + 1"])
def test_controlA(self):
"""
CTRL-A can be used as HOME - returning cursor to beginning of
current line buffer.
"""
self._testwrite(b'rint "hello"' + b'\x01' + b'p')
d = self.recvlineClient.expect(b'print "hello"')
def cb(ignore):
self._assertBuffer([b'>>> print "hello"'])
return d.addCallback(cb)
def test_controlE(self):
"""
CTRL-E can be used as END - setting cursor to end of current
line buffer.
"""
self._testwrite(b'rint "hello' + b'\x01' + b'p' + b'\x05' + b'"')
d = self.recvlineClient.expect(b'print "hello"')
def cb(ignore):
self._assertBuffer([b'>>> print "hello"'])
return d.addCallback(cb)
@defer.inlineCallbacks
def test_deferred(self):
"""
When a deferred is returned to the manhole REPL, it is displayed with
a sequence number, and when the deferred fires, the result is printed.
"""
self._testwrite(
b"from twisted.internet import defer, reactor\n"
b"d = defer.Deferred()\n"
b"d\n")
yield self.recvlineClient.expect(b"<Deferred #0>")
self._testwrite(
b"c = reactor.callLater(0.1, d.callback, 'Hi!')\n")
yield self.recvlineClient.expect(b">>> ")
yield self.recvlineClient.expect(
b"Deferred #0 called back: 'Hi!'\n>>> ")
self._assertBuffer(
[b">>> from twisted.internet import defer, reactor",
b">>> d = defer.Deferred()",
b">>> d",
b"<Deferred #0>",
b">>> c = reactor.callLater(0.1, d.callback, 'Hi!')",
b"Deferred #0 called back: 'Hi!'",
b">>> "])
class ManholeLoopbackTelnetTests(_TelnetMixin, unittest.TestCase,
ManholeLoopbackMixin):
"""
Test manhole loopback over Telnet.
"""
pass
class ManholeLoopbackSSHTests(_SSHMixin, unittest.TestCase,
ManholeLoopbackMixin):
"""
Test manhole loopback over SSH.
"""
if ssh is None:
skip = "cryptography requirements missing"
class ManholeLoopbackStdioTests(_StdioMixin, unittest.TestCase,
ManholeLoopbackMixin):
"""
Test manhole loopback over standard IO.
"""
if stdio is None:
skip = "Terminal requirements missing"
else:
serverProtocol = stdio.ConsoleManhole
class ManholeMainTests(unittest.TestCase):
"""
Test the I{main} method from the I{manhole} module.
"""
if stdio is None:
skip = "Terminal requirements missing"
def test_mainClassNotFound(self):
"""
Will raise an exception when called with an argument which is a
dotted patch which can not be imported..
"""
exception = self.assertRaises(
ValueError,
stdio.main, argv=['no-such-class'],
)
self.assertEqual('Empty module name', exception.args[0])
|
[
"[email protected]"
] | |
deae5050d581b1d17e3c50c20f4ab36e74df1bd1
|
d2c229f74a3ca61d6a22f64de51215d9e30c5c11
|
/qiskit/circuit/library/templates/nct/template_nct_9c_7.py
|
b9f87f69effbf1b0646eb719daee998d4d86ad23
|
[
"Apache-2.0"
] |
permissive
|
1ucian0/qiskit-terra
|
90e8be8a7b392fbb4b3aa9784c641a818a180e4c
|
0b51250e219ca303654fc28a318c21366584ccd3
|
refs/heads/main
| 2023-08-31T07:50:33.568824 | 2023-08-22T01:52:53 | 2023-08-22T01:52:53 | 140,555,676 | 6 | 1 |
Apache-2.0
| 2023-09-14T13:21:54 | 2018-07-11T09:52:28 |
Python
|
UTF-8
|
Python
| false | false | 1,620 |
py
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Template 9c_7:
.. parsed-literal::
q_0: ──■────■────■────■────■─────────■────■───────
┌─┴─┐ │ ┌─┴─┐┌─┴─┐ │ ┌───┐ │ │ ┌───┐
q_1: ┤ X ├──■──┤ X ├┤ X ├──┼──┤ X ├──■────┼──┤ X ├
└─┬─┘┌─┴─┐└───┘└─┬─┘┌─┴─┐└─┬─┘┌─┴─┐┌─┴─┐└─┬─┘
q_2: ──■──┤ X ├───────■──┤ X ├──■──┤ X ├┤ X ├──■──
└───┘ └───┘ └───┘└───┘
"""
from qiskit.circuit.quantumcircuit import QuantumCircuit
def template_nct_9c_7():
"""
Returns:
QuantumCircuit: template as a quantum circuit.
"""
qc = QuantumCircuit(3)
qc.ccx(0, 2, 1)
qc.ccx(0, 1, 2)
qc.cx(0, 1)
qc.ccx(0, 2, 1)
qc.cx(0, 2)
qc.cx(2, 1)
qc.ccx(0, 1, 2)
qc.cx(0, 2)
qc.cx(2, 1)
return qc
|
[
"[email protected]"
] | |
7e7607f6a093f30de9aba7f216f56bb3e00dda94
|
ddacbd31a215de3560d4c79489915f8b3bdf9a8d
|
/vertmodes.py
|
e4a45500783bee6e110328750660bec3d6ed6c71
|
[] |
no_license
|
jklymak/pythonlib
|
40cfce6ee34f36a90c03350d3bf50e5e99655e26
|
e71b1713394b5ac38ba0ea2f32d3fdff6f5118ff
|
refs/heads/master
| 2021-06-18T12:53:57.051465 | 2017-05-29T19:14:27 | 2017-05-29T19:14:27 | 21,134,313 | 1 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 3,236 |
py
|
from matplotlib import pylab
def vertModes(N2,dz,nmodes=0):
"""" psi,phi,ce,z=vertModes(N2,dz,nmodes=0)
Compute the vertical eigen modes of the internal wave solution on a flat bottom
Parameters:
-----------
N2 : (M) is buoyancy frequency squared (rad^2/s^2) as an 1-D
array. If there are M values of N2, the first one is assumed
to be at dz/2 deep, and the last one is H-dz/2 deep. The
water column is assumed to be H=M*dz deep. No gaps are
allowed, and N2>0 everywhere.
dz : is a single value, and the distance (in meters) between the N2 estimates
nmodes : number of modes to return. nmodes = 0 means return M-3 modes.
Returns:
--------
psi : (M,M-2) is the vertical structure function at
z=dz/2,3dz/2,2dz...,H-dz/2. Note there is one extra value
compared to N2 (ie there are M+1 values in depth). psi is
normalized so that sum(psi^2 dz) = 1. For internal waves,
psi is approriate for velocity and pressure vertical
structure.
phi : (M,M-2) is the vertical integral of psi (phi = int psi dz)
and represents the vertical velocity structure. It is
interpolated onto the same grid as psi.
ce : (M-2) is the non-rotating phase speed of the waves in m/s.
z : (M) is the vertical position of the psi and phi vector elements.
Notes:
------
This solves 1/N**2 psi_{zz} + (1/ce**2)psi = 0 subject to a
boundary condition of zero vertical velocity at the surface and
seafloor.
psi(0)=0 (rigid lid approx)
psi(H)=0
It is solved as an eigenvalue problem.
Also note that if
J. Klymak (Based on code by Sam Kelly and Gabe Vecchi)
"""
import numpy as np
# First we are solving for w on dz,2dz,3dz...H-dz
M = np.shape(N2)[0]-1
if M>200:
sparse = True
if nmodes==0:
nmodes = 100 # don't try too many eigenvectors in sparse mode...
else:
sparse = False
if nmodes==0:
nmodes = M-2
N2mid = N2[:-1]+np.diff(N2)/2.
# matrix for second difference operator
D = np.diag(-2.*np.ones(M),0)
D += np.diag(1.*np.ones(M-1),-1)
D += np.diag(1.*np.ones(M-1),1)
D=-D/dz/dz
D = np.diag(1./N2mid).dot(D)
ce,W = np.linalg.eig(D)
# psi is such that sum(psi^2)=1 but we want sum(psi^2 dz)=1.
W = W/np.sqrt(dz)
ce = 1./np.sqrt(ce)
ind=np.argsort(-ce)
ce=ce[ind[:-2]]
W=W[:,ind[:-2]]
# zphi
zphi = np.linspace(dz/2.,(M+1)*dz-dz/2.,M+1)
# now get phi (w structure) on dz/2,3dz/2...
phi = np.zeros((M+1,M+1-3))
phi[0,:]=0.5*(W[0,:])
phi[1:-1,:]=0.5*(W[:-1,:]+W[1:,:])
phi[-1,:]=0.5*(W[-1,:])
# Now get psi (u/p structure) on dz/2,3dz/2...
psi = np.zeros((M+1,M+1-3))
psi[0,:] = W[0,:]
psi[1:-1,] = np.diff(W,axis=0)
psi[-1,:] = -W[-1,:]
A = np.sqrt(np.sum(psi*psi,axis=0)*dz)
psi = psi/A
phi = phi/A
# flip sign so always same sign in psi at top:
phi[:,psi[0,:]<0] *= -1
psi[:,psi[0,:]<0] *= -1
return psi,phi,ce,zphi
|
[
"[email protected]"
] | |
7d454b4ee957e03dc6015d34bb5973486b64293d
|
1e0a8a929f8ea69e476d8a8c5f3455aaf5317de6
|
/scripts/utils/_rabbitmq.py
|
71b8a0f02aff9298131339204af550da774c7e2f
|
[
"MIT"
] |
permissive
|
jearistiz/guane-intern-fastapi
|
aa41400fa22076111e96be695fde0a1ff6f118d0
|
269adc3ee6a78a262b4e19e7df291fd920fae2e1
|
refs/heads/master
| 2023-06-25T08:58:03.729614 | 2023-06-11T15:28:59 | 2023-06-11T15:28:59 | 370,229,796 | 63 | 9 |
MIT
| 2021-06-11T01:28:52 | 2021-05-24T04:45:23 |
Python
|
UTF-8
|
Python
| false | false | 4,175 |
py
|
import time
import warnings
from pathlib import Path
from typing import Tuple
from subprocess import Popen, run, CompletedProcess
def local_rabbitmq_uri(
user: str, pwd: str, port: str, vhost: str
) -> str:
return f'amqp://{user}:{pwd}@0.0.0.0:{port}/{vhost}'
def init_rabbitmq_app(
rabbitmq_user: str,
rabbitmq_pass: str,
rabbitmq_vhost: str,
max_retries: int = 10,
sleep_time: int = 1 # In seconds
) -> Tuple[Popen, int]:
"""Starts the RabbitMQ server, creates a new user with its credentials,
creates a new virtual host and adds administration priviledges to the
user in the virtual host.
"""
module_name_tag = f'[{Path(__file__).stem}]'
hidden_pass = "x" * (len(rabbitmq_pass) - 2) + rabbitmq_pass[-2:]
user_with_pass = f'user {rabbitmq_user} with password {hidden_pass}'
_, _ = rabbitmq_full_start_app()
# Create user
rabbitmq_user_process = rabbitmq_create_user(rabbitmq_user, rabbitmq_pass)
if rabbitmq_user_process.returncode == 0:
print(f'{module_name_tag} rabbitmqctl created {user_with_pass} ')
else:
warnings.warn(
f'{module_name_tag} rabbitmqctl couldn\'t create '
f'{user_with_pass}, probably because the server couldn\'t be '
'started appropriately or the user already existed.'
)
# Add virtual host
rabbitmq_add_vhost(rabbitmq_vhost)
# Set user as administrator
rabbitmq_set_user_admin(rabbitmq_user)
# Set read, write and execute permissions on user
rabbitmq_user_permissions(rabbitmq_vhost, rabbitmq_user)
# We need to restart the server, this way the newly created user and
# permissions take effect
rabbitmq_server_process, server_ping_statuscode = rabbitmq_restart_server(
max_retries, sleep_time
)
return rabbitmq_server_process, server_ping_statuscode
def rabbitmq_start_wait_server(
retries: int = 15, sleep_time: int = 1
) -> Tuple[Popen, int]:
rabbitmq_server_process = Popen(['rabbitmq-server'])
ping_returncode = 1
i = 0
while ping_returncode != 0 and i < retries:
time.sleep(sleep_time)
ping_process = run(['rabbitmqctl', 'ping'])
ping_returncode = ping_process.returncode
del ping_process
i += 1
return rabbitmq_server_process, ping_returncode
def rabbitmq_full_start_app(
retries: int = 15, sleep_time: int = 1
) -> Tuple[Popen, int]:
"""Starts both rabbitmq server and application"""
# Start rabbitmq server
rabbitmq_server_process, server_ping_code = rabbitmq_start_wait_server(
retries, sleep_time
)
# Start rabbitmq application
run(['rabbitmqctl', 'start_app'])
run(['rabbitmqctl', 'await_startup'])
return rabbitmq_server_process, server_ping_code
def rabbitmq_create_user(
rabbitmq_user: str, rabbitmq_pass: str
) -> CompletedProcess:
return run(
['rabbitmqctl', 'add_user', rabbitmq_user, rabbitmq_pass]
)
def rabbitmq_add_vhost(rabbitmq_vhost: str) -> CompletedProcess:
return run(['rabbitmqctl', 'add_vhost', rabbitmq_vhost])
def rabbitmq_set_user_admin(
rabbitmq_user: str
) -> CompletedProcess:
# Set user as administrator
run(
['rabbitmqctl', 'set_user_tags', rabbitmq_user, 'administrator']
)
def rabbitmq_user_permissions(
rabbitmq_vhost: str,
rabbitmq_user: str,
permissions: Tuple[str, str, str] = ('.*', '.*', '.*')
):
"""Set read, write and execute permissions on user"""
cmd_base = [
'rabbitmqctl', 'set_permissions', '-p', rabbitmq_vhost, rabbitmq_user
]
run(cmd_base + list(permissions))
def rabbitmq_restart_server(
retries: int = 15, sleep_time: int = 1
) -> Tuple[Popen, int]:
run(['rabbitmqctl', 'shutdown'])
return rabbitmq_start_wait_server(retries, sleep_time)
def rabbitmq_reset_and_shut_down_server():
rabbitmq_start_wait_server()
run(['rabbitmqctl', 'stop_app'])
run(['rabbitmqctl', 'reset'])
run(['rabbitmqctl', 'shutdown'])
def rabbitmq_server_teardown(rabbitmq_server_process: Popen):
rabbitmq_server_process.terminate()
rabbitmq_reset_and_shut_down_server()
|
[
"[email protected]"
] | |
6ee1a6f11eb61dfb69b6d003f30d8b3a85fad419
|
fc772efe3eccb65e4e4a8da7f2b2897586b6a0e8
|
/Compute/nova/virt/vmwareapi/driver.py
|
1e0c295d08ca1c7f39ed2cabd4174d8a9e4cb397
|
[] |
no_license
|
iphonestack/Openstack_Kilo
|
9ae12505cf201839631a68c9ab4c041f737c1c19
|
b0ac29ddcf24ea258ee893daf22879cff4d03c1f
|
refs/heads/master
| 2021-06-10T23:16:48.372132 | 2016-04-18T07:25:40 | 2016-04-18T07:25:40 | 56,471,076 | 0 | 2 | null | 2020-07-24T02:17:46 | 2016-04-18T02:32:43 |
Python
|
UTF-8
|
Python
| false | false | 28,470 |
py
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2012 VMware, Inc.
# Copyright (c) 2011 Citrix Systems, Inc.
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A connection to the VMware vCenter platform.
"""
import re
from oslo.config import cfg
from oslo.serialization import jsonutils
from oslo.vmware import api
from oslo.vmware import exceptions as vexc
from oslo.vmware import pbm
from oslo.vmware import vim
from oslo.vmware import vim_util
import suds
from nova import exception
from nova.i18n import _, _LI, _LW
from nova.openstack.common import log as logging
from nova.openstack.common import uuidutils
from nova.virt import driver
from nova.virt.vmwareapi import constants
from nova.virt.vmwareapi import error_util
from nova.virt.vmwareapi import host
from nova.virt.vmwareapi import vm_util
from nova.virt.vmwareapi import vmops
from nova.virt.vmwareapi import volumeops
LOG = logging.getLogger(__name__)
vmwareapi_opts = [
cfg.StrOpt('host_ip',
help='Hostname or IP address for connection to VMware VC '
'host.'),
cfg.IntOpt('host_port',
default=443,
help='Port for connection to VMware VC host.'),
cfg.StrOpt('host_username',
help='Username for connection to VMware VC host.'),
cfg.StrOpt('host_password',
help='Password for connection to VMware VC host.',
secret=True),
cfg.MultiStrOpt('cluster_name',
help='Name of a VMware Cluster ComputeResource.'),
cfg.StrOpt('datastore_regex',
help='Regex to match the name of a datastore.'),
cfg.FloatOpt('task_poll_interval',
default=0.5,
help='The interval used for polling of remote tasks.'),
cfg.IntOpt('api_retry_count',
default=10,
help='The number of times we retry on failures, e.g., '
'socket error, etc.'),
cfg.IntOpt('vnc_port',
default=5900,
help='VNC starting port'),
cfg.IntOpt('vnc_port_total',
default=10000,
help='Total number of VNC ports'),
cfg.BoolOpt('use_linked_clone',
default=True,
help='Whether to use linked clone'),
cfg.StrOpt('wsdl_location',
help='Optional VIM Service WSDL Location '
'e.g http://<server>/vimService.wsdl. '
'Optional over-ride to default location for bug '
'work-arounds')
]
spbm_opts = [
cfg.BoolOpt('pbm_enabled',
default=False,
help='The PBM status.'),
cfg.StrOpt('pbm_wsdl_location',
help='PBM service WSDL file location URL. '
'e.g. file:///opt/SDK/spbm/wsdl/pbmService.wsdl '
'Not setting this will disable storage policy based '
'placement of instances.'),
cfg.StrOpt('pbm_default_policy',
help='The PBM default policy. If pbm_wsdl_location is set and '
'there is no defined storage policy for the specific '
'request then this policy will be used.'),
]
CONF = cfg.CONF
CONF.register_opts(vmwareapi_opts, 'vmware')
CONF.register_opts(spbm_opts, 'vmware')
TIME_BETWEEN_API_CALL_RETRIES = 1.0
class VMwareVCDriver(driver.ComputeDriver):
"""The VC host connection object."""
capabilities = {
"has_imagecache": True,
"supports_recreate": False,
}
# The vCenter driver includes API that acts on ESX hosts or groups
# of ESX hosts in clusters or non-cluster logical-groupings.
#
# vCenter is not a hypervisor itself, it works with multiple
# hypervisor host machines and their guests. This fact can
# subtly alter how vSphere and OpenStack interoperate.
def __init__(self, virtapi, scheme="https"):
super(VMwareVCDriver, self).__init__(virtapi)
if (CONF.vmware.host_ip is None or
CONF.vmware.host_username is None or
CONF.vmware.host_password is None):
raise Exception(_("Must specify host_ip, host_username and "
"host_password to use vmwareapi.VMwareVCDriver"))
self._datastore_regex = None
if CONF.vmware.datastore_regex:
try:
self._datastore_regex = re.compile(CONF.vmware.datastore_regex)
except re.error:
raise exception.InvalidInput(reason=
_("Invalid Regular Expression %s")
% CONF.vmware.datastore_regex)
self._session = VMwareAPISession(scheme=scheme)
# Update the PBM location if necessary
if CONF.vmware.pbm_enabled:
self._update_pbm_location()
self._validate_configuration()
# Get the list of clusters to be used
self._cluster_names = CONF.vmware.cluster_name
self.dict_mors = vm_util.get_all_cluster_refs_by_name(self._session,
self._cluster_names)
if not self.dict_mors:
raise exception.NotFound(_("All clusters specified %s were not"
" found in the vCenter")
% self._cluster_names)
# Check if there are any clusters that were specified in the nova.conf
# but are not in the vCenter, for missing clusters log a warning.
clusters_found = [v.get('name') for k, v in self.dict_mors.iteritems()]
missing_clusters = set(self._cluster_names) - set(clusters_found)
if missing_clusters:
LOG.warning(_LW("The following clusters could not be found in the "
"vCenter %s"), list(missing_clusters))
# The _resources is used to maintain the vmops, volumeops and vcstate
# objects per cluster
self._resources = {}
self._resource_keys = set()
self._virtapi = virtapi
self._update_resources()
# The following initialization is necessary since the base class does
# not use VC state.
first_cluster = self._resources.keys()[0]
self._vmops = self._resources.get(first_cluster).get('vmops')
self._volumeops = self._resources.get(first_cluster).get('volumeops')
self._vc_state = self._resources.get(first_cluster).get('vcstate')
# Register the OpenStack extension
self._register_openstack_extension()
@property
def need_legacy_block_device_info(self):
return False
def _update_pbm_location(self):
if CONF.vmware.pbm_wsdl_location:
pbm_wsdl_loc = CONF.vmware.pbm_wsdl_location
else:
version = vim_util.get_vc_version(self._session)
pbm_wsdl_loc = pbm.get_pbm_wsdl_location(version)
self._session.pbm_wsdl_loc_set(pbm_wsdl_loc)
def _validate_configuration(self):
if CONF.vmware.use_linked_clone is None:
raise vexc.UseLinkedCloneConfigurationFault()
if CONF.vmware.pbm_enabled:
if not CONF.vmware.pbm_default_policy:
raise error_util.PbmDefaultPolicyUnspecified()
if not pbm.get_profile_id_by_name(
self._session,
CONF.vmware.pbm_default_policy):
raise error_util.PbmDefaultPolicyDoesNotExist()
if CONF.vmware.datastore_regex:
LOG.warning(_LW(
"datastore_regex is ignored when PBM is enabled"))
self._datastore_regex = None
def init_host(self, host):
vim = self._session.vim
if vim is None:
self._session._create_session()
def cleanup_host(self, host):
# NOTE(hartsocks): we lean on the init_host to force the vim object
# to not be None.
vim = self._session.vim
service_content = vim.service_content
session_manager = service_content.sessionManager
try:
vim.client.service.Logout(session_manager)
except suds.WebFault:
LOG.debug("No vSphere session was open during cleanup_host.")
def _register_openstack_extension(self):
# Register an 'OpenStack' extension in vCenter
LOG.debug('Registering extension %s with vCenter',
constants.EXTENSION_KEY)
os_extension = self._session._call_method(vim_util, 'find_extension',
constants.EXTENSION_KEY)
if os_extension is None:
LOG.debug('Extension does not exist. Registering type %s.',
constants.EXTENSION_TYPE_INSTANCE)
self._session._call_method(vim_util, 'register_extension',
constants.EXTENSION_KEY,
constants.EXTENSION_TYPE_INSTANCE)
def cleanup(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None, destroy_vifs=True):
"""Cleanup after instance being destroyed by Hypervisor."""
pass
def resume_state_on_host_boot(self, context, instance, network_info,
block_device_info=None):
"""resume guest state when a host is booted."""
# Check if the instance is running already and avoid doing
# anything if it is.
instances = self.list_instances()
if instance['uuid'] not in instances:
LOG.warning(_LW('Instance cannot be found in host, or in an '
'unknown state.'), instance=instance)
else:
state = vm_util.get_vm_state_from_name(self._session,
instance['uuid'])
ignored_states = ['poweredon', 'suspended']
if state.lower() in ignored_states:
return
# Instance is not up and could be in an unknown state.
# Be as absolute as possible about getting it back into
# a known and running state.
self.reboot(context, instance, network_info, 'hard',
block_device_info)
def list_instance_uuids(self):
"""List VM instance UUIDs."""
uuids = self._vmops.list_instances()
return [uuid for uuid in uuids if uuidutils.is_uuid_like(uuid)]
def list_instances(self):
"""List VM instances from all nodes."""
instances = []
nodes = self.get_available_nodes()
for node in nodes:
vmops = self._get_vmops_for_compute_node(node)
instances.extend(vmops.list_instances())
return instances
def migrate_disk_and_power_off(self, context, instance, dest,
flavor, network_info,
block_device_info=None,
timeout=0, retry_interval=0):
"""Transfers the disk of a running instance in multiple phases, turning
off the instance before the end.
"""
# TODO(PhilDay): Add support for timeout (clean shutdown)
return self._vmops.migrate_disk_and_power_off(context, instance,
dest, flavor)
def confirm_migration(self, migration, instance, network_info):
"""Confirms a resize, destroying the source VM."""
self._vmops.confirm_migration(migration, instance, network_info)
def finish_revert_migration(self, context, instance, network_info,
block_device_info=None, power_on=True):
"""Finish reverting a resize, powering back on the instance."""
self._vmops.finish_revert_migration(context, instance, network_info,
block_device_info, power_on)
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info=None, power_on=True):
"""Completes a resize, turning on the migrated instance."""
self._vmops.finish_migration(context, migration, instance, disk_info,
network_info, image_meta, resize_instance,
block_device_info, power_on)
def live_migration(self, context, instance, dest,
post_method, recover_method, block_migration=False,
migrate_data=None):
"""Live migration of an instance to another host."""
self._vmops.live_migration(context, instance, dest,
post_method, recover_method,
block_migration)
def rollback_live_migration_at_destination(self, context, instance,
network_info,
block_device_info,
destroy_disks=True,
migrate_data=None):
"""Clean up destination node after a failed live migration."""
self.destroy(context, instance, network_info, block_device_info)
def get_instance_disk_info(self, instance, block_device_info=None):
pass
def get_vnc_console(self, context, instance):
"""Return link to instance's VNC console using vCenter logic."""
# vCenter does not actually run the VNC service
# itself. You must talk to the VNC host underneath vCenter.
return self._vmops.get_vnc_console(instance)
def _update_resources(self):
"""This method creates a dictionary of VMOps, VolumeOps and VCState.
The VMwareVMOps, VMwareVolumeOps and VCState object is for each
cluster/rp. The dictionary is of the form
{
domain-1000 : {'vmops': vmops_obj,
'volumeops': volumeops_obj,
'vcstate': vcstate_obj,
'name': MyCluster},
resgroup-1000 : {'vmops': vmops_obj,
'volumeops': volumeops_obj,
'vcstate': vcstate_obj,
'name': MyRP},
}
"""
added_nodes = set(self.dict_mors.keys()) - set(self._resource_keys)
for node in added_nodes:
_volumeops = volumeops.VMwareVolumeOps(self._session,
self.dict_mors[node]['cluster_mor'])
_vmops = vmops.VMwareVMOps(self._session, self._virtapi,
_volumeops,
self.dict_mors[node]['cluster_mor'],
datastore_regex=self._datastore_regex)
name = self.dict_mors.get(node)['name']
nodename = self._create_nodename(node, name)
_vc_state = host.VCState(self._session, nodename,
self.dict_mors.get(node)['cluster_mor'])
self._resources[nodename] = {'vmops': _vmops,
'volumeops': _volumeops,
'vcstate': _vc_state,
'name': name,
}
self._resource_keys.add(node)
deleted_nodes = (set(self._resource_keys) -
set(self.dict_mors.keys()))
for node in deleted_nodes:
name = self.dict_mors.get(node)['name']
nodename = self._create_nodename(node, name)
del self._resources[nodename]
self._resource_keys.discard(node)
def _create_nodename(self, mo_id, display_name):
"""Creates the name that is stored in hypervisor_hostname column.
The name will be of the form similar to
domain-1000(MyCluster)
resgroup-1000(MyResourcePool)
"""
return mo_id + '(' + display_name + ')'
def _get_resource_for_node(self, nodename):
"""Gets the resource information for the specific node."""
resource = self._resources.get(nodename)
if not resource:
msg = _("The resource %s does not exist") % nodename
raise exception.NotFound(msg)
return resource
def _get_vmops_for_compute_node(self, nodename):
"""Retrieve vmops object from mo_id stored in the node name.
Node name is of the form domain-1000(MyCluster)
"""
resource = self._get_resource_for_node(nodename)
return resource['vmops']
def _get_volumeops_for_compute_node(self, nodename):
"""Retrieve vmops object from mo_id stored in the node name.
Node name is of the form domain-1000(MyCluster)
"""
resource = self._get_resource_for_node(nodename)
return resource['volumeops']
def _get_vc_state_for_compute_node(self, nodename):
"""Retrieve VCState object from mo_id stored in the node name.
Node name is of the form domain-1000(MyCluster)
"""
resource = self._get_resource_for_node(nodename)
return resource['vcstate']
def _get_available_resources(self, host_stats):
return {'vcpus': host_stats['vcpus'],
'memory_mb': host_stats['host_memory_total'],
'local_gb': host_stats['disk_total'],
'vcpus_used': 0,
'memory_mb_used': host_stats['host_memory_total'] -
host_stats['host_memory_free'],
'local_gb_used': host_stats['disk_used'],
'hypervisor_type': host_stats['hypervisor_type'],
'hypervisor_version': host_stats['hypervisor_version'],
'hypervisor_hostname': host_stats['hypervisor_hostname'],
'cpu_info': jsonutils.dumps(host_stats['cpu_info']),
'supported_instances': jsonutils.dumps(
host_stats['supported_instances']),
'numa_topology': None,
}
def get_available_resource(self, nodename):
"""Retrieve resource info.
This method is called when nova-compute launches, and
as part of a periodic task.
:returns: dictionary describing resources
"""
stats_dict = {}
vc_state = self._get_vc_state_for_compute_node(nodename)
if vc_state:
host_stats = vc_state.get_host_stats(refresh=True)
# Updating host information
stats_dict = self._get_available_resources(host_stats)
else:
LOG.info(_LI("Invalid cluster or resource pool"
" name : %s"), nodename)
return stats_dict
def get_available_nodes(self, refresh=False):
"""Returns nodenames of all nodes managed by the compute service.
This method is for multi compute-nodes support. If a driver supports
multi compute-nodes, this method returns a list of nodenames managed
by the service. Otherwise, this method should return
[hypervisor_hostname].
"""
self.dict_mors = vm_util.get_all_cluster_refs_by_name(
self._session,
CONF.vmware.cluster_name)
node_list = []
self._update_resources()
for node in self.dict_mors.keys():
nodename = self._create_nodename(node,
self.dict_mors.get(node)['name'])
node_list.append(nodename)
LOG.debug("The available nodes are: %s", node_list)
return node_list
def spawn(self, context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None,
flavor=None):
"""Create VM instance."""
_vmops = self._get_vmops_for_compute_node(instance['node'])
_vmops.spawn(context, instance, image_meta, injected_files,
admin_password, network_info, block_device_info,
flavor=flavor)
def attach_volume(self, context, connection_info, instance, mountpoint,
disk_bus=None, device_type=None, encryption=None):
"""Attach volume storage to VM instance."""
_volumeops = self._get_volumeops_for_compute_node(instance['node'])
return _volumeops.attach_volume(connection_info,
instance)
def detach_volume(self, connection_info, instance, mountpoint,
encryption=None):
"""Detach volume storage to VM instance."""
_volumeops = self._get_volumeops_for_compute_node(instance['node'])
return _volumeops.detach_volume(connection_info,
instance)
def get_volume_connector(self, instance):
"""Return volume connector information."""
return self._volumeops.get_volume_connector(instance)
def get_host_ip_addr(self):
"""Returns the IP address of the vCenter host."""
return CONF.vmware.host_ip
def snapshot(self, context, instance, image_id, update_task_state):
"""Create snapshot from a running VM instance."""
self._vmops.snapshot(context, instance, image_id, update_task_state)
def reboot(self, context, instance, network_info, reboot_type,
block_device_info=None, bad_volumes_callback=None):
"""Reboot VM instance."""
self._vmops.reboot(instance, network_info)
def destroy(self, context, instance, network_info, block_device_info=None,
destroy_disks=True, migrate_data=None):
"""Destroy VM instance."""
# Destroy gets triggered when Resource Claim in resource_tracker
# is not successful. When resource claim is not successful,
# node is not set in instance. Perform destroy only if node is set
if not instance['node']:
return
self._vmops.destroy(instance, destroy_disks)
def pause(self, instance):
"""Pause VM instance."""
self._vmops.pause(instance)
def unpause(self, instance):
"""Unpause paused VM instance."""
self._vmops.unpause(instance)
def suspend(self, instance):
"""Suspend the specified instance."""
self._vmops.suspend(instance)
def resume(self, context, instance, network_info, block_device_info=None):
"""Resume the suspended VM instance."""
self._vmops.resume(instance)
def rescue(self, context, instance, network_info, image_meta,
rescue_password):
"""Rescue the specified instance."""
self._vmops.rescue(context, instance, network_info, image_meta)
def unrescue(self, instance, network_info):
"""Unrescue the specified instance."""
self._vmops.unrescue(instance)
def power_off(self, instance, timeout=0, retry_interval=0):
"""Power off the specified instance."""
# TODO(PhilDay): Add support for timeout (clean shutdown)
self._vmops.power_off(instance)
def power_on(self, context, instance, network_info,
block_device_info=None):
"""Power on the specified instance."""
self._vmops.power_on(instance)
def poll_rebooting_instances(self, timeout, instances):
"""Poll for rebooting instances."""
self._vmops.poll_rebooting_instances(timeout, instances)
def get_info(self, instance):
"""Return info about the VM instance."""
return self._vmops.get_info(instance)
def get_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_diagnostics(instance)
def get_instance_diagnostics(self, instance):
"""Return data about VM diagnostics."""
return self._vmops.get_instance_diagnostics(instance)
def host_power_action(self, action):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def host_maintenance_mode(self, host, mode):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def set_host_enabled(self, enabled):
"""Host operations not supported by VC driver.
This needs to override the ESX driver implementation.
"""
raise NotImplementedError()
def get_host_uptime(self, host):
"""Host uptime operation not supported by VC driver."""
msg = _("Multiple hosts may be managed by the VMWare "
"vCenter driver; therefore we do not return "
"uptime for just one host.")
raise NotImplementedError(msg)
def inject_network_info(self, instance, nw_info):
"""inject network info for specified instance."""
self._vmops.inject_network_info(instance, nw_info)
def manage_image_cache(self, context, all_instances):
"""Manage the local cache of images."""
# Running instances per cluster
cluster_instances = {}
for instance in all_instances:
instances = cluster_instances.get(instance['node'])
if instances:
instances.append(instance)
else:
instances = [instance]
cluster_instances[instance['node']] = instances
# Invoke the image aging per cluster
for resource in self._resources.keys():
instances = cluster_instances.get(resource, [])
_vmops = self._get_vmops_for_compute_node(resource)
_vmops.manage_image_cache(context, instances)
def instance_exists(self, instance):
"""Efficient override of base instance_exists method."""
return self._vmops.instance_exists(instance)
def attach_interface(self, instance, image_meta, vif):
"""Attach an interface to the instance."""
self._vmops.attach_interface(instance, image_meta, vif)
def detach_interface(self, instance, vif):
"""Detach an interface from the instance."""
self._vmops.detach_interface(instance, vif)
class VMwareAPISession(api.VMwareAPISession):
"""Sets up a session with the VC/ESX host and handles all
the calls made to the host.
"""
def __init__(self, host_ip=CONF.vmware.host_ip,
host_port=CONF.vmware.host_port,
username=CONF.vmware.host_username,
password=CONF.vmware.host_password,
retry_count=CONF.vmware.api_retry_count,
scheme="https"):
super(VMwareAPISession, self).__init__(
host=host_ip,
port=host_port,
server_username=username,
server_password=password,
api_retry_count=retry_count,
task_poll_interval=CONF.vmware.task_poll_interval,
scheme=scheme,
create_session=True,
wsdl_loc=CONF.vmware.wsdl_location
)
def _is_vim_object(self, module):
"""Check if the module is a VIM Object instance."""
return isinstance(module, vim.Vim)
def _call_method(self, module, method, *args, **kwargs):
"""Calls a method within the module specified with
args provided.
"""
if not self._is_vim_object(module):
return self.invoke_api(module, method, self.vim, *args, **kwargs)
else:
return self.invoke_api(module, method, *args, **kwargs)
def _wait_for_task(self, task_ref):
"""Return a Deferred that will give the result of the given task.
The task is polled until it completes.
"""
return self.wait_for_task(task_ref)
|
[
"[email protected]"
] | |
653fdc5ad9a6a125e22f78463105e985e68a1d81
|
79a61715a94e0a78e3268a514f97e5211c3e770b
|
/processors/notes_to_comments.py
|
4653bc214790c24cf0339ee01b7a1289fbb470ce
|
[
"MIT",
"ISC"
] |
permissive
|
eads/desapariciones
|
2f120c18316e9ee3416b4c9eae1d68f72ec00e9c
|
6069b21f26cc5175e78af54efb5cda0a64a2d9c5
|
refs/heads/master
| 2023-03-09T09:16:30.321455 | 2022-05-15T18:40:16 | 2022-05-15T18:40:16 | 188,893,244 | 5 | 0 |
MIT
| 2023-03-03T00:20:30 | 2019-05-27T18:42:01 |
R
|
UTF-8
|
Python
| false | false | 577 |
py
|
import click
import csv
from jinja2 import Template
from slugify import slugify
TEMPLATE = """
comment on column processed.cenapi.{{nombre_variable}} is '{{descripción}}';
"""
@click.command()
@click.argument('input', type=click.File('r', encoding='utf-8-sig'))
@click.argument('output', type=click.File('w'))
def generate(input, output):
"""Generate comments from a csv."""
t = Template(TEMPLATE)
reader = csv.DictReader(input)
for row in reader:
comment = t.render(**row)
output.write(comment)
if __name__ == '__main__':
generate()
|
[
"[email protected]"
] | |
2cff4d3069d8f77d32a1cdbe93772331360cc8ba
|
89dedd7f3c7acc81d12e2bcb2e716f9af9e5fa04
|
/third_party/libwebp/libwebp.gyp
|
67c03685dd3d9b42591b4f5d3b52fc8671446cb8
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-google-patent-license-webm",
"LGPL-2.0-or-later",
"GPL-1.0-or-later",
"MIT",
"Apache-2.0"
] |
permissive
|
bino7/chromium
|
8d26f84a1b6e38a73d1b97fea6057c634eff68cb
|
4666a6bb6fdcb1114afecf77bdaa239d9787b752
|
refs/heads/master
| 2022-12-22T14:31:53.913081 | 2016-09-06T10:05:11 | 2016-09-06T10:05:11 | 67,410,510 | 1 | 3 |
BSD-3-Clause
| 2022-12-17T03:08:52 | 2016-09-05T10:11:59 | null |
UTF-8
|
Python
| false | false | 7,956 |
gyp
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'target_defaults': {
'conditions': [
['os_posix==1 and (target_arch=="arm" or target_arch=="arm64")', {
'cflags!': [ '-Os' ],
'cflags': [ '-O2' ],
}],
],
},
'targets': [
{
'target_name': 'libwebp_dec',
'type': 'static_library',
'dependencies' : [
'libwebp_dsp',
'libwebp_dsp_neon',
'libwebp_utils',
],
'include_dirs': ['.'],
'sources': [
'dec/alpha.c',
'dec/buffer.c',
'dec/frame.c',
'dec/idec.c',
'dec/io.c',
'dec/quant.c',
'dec/tree.c',
'dec/vp8.c',
'dec/vp8l.c',
'dec/webp.c',
],
},
{
'target_name': 'libwebp_demux',
'type': 'static_library',
'include_dirs': ['.'],
'sources': [
'demux/demux.c',
],
'dependencies' : [
'libwebp_utils',
],
},
{
'target_name': 'libwebp_dsp',
'type': 'static_library',
'include_dirs': ['.'],
'sources': [
'dsp/alpha_processing.c',
'dsp/alpha_processing_mips_dsp_r2.c',
'dsp/argb.c',
'dsp/argb_mips_dsp_r2.c',
'dsp/cost.c',
'dsp/cost_mips32.c',
'dsp/cost_mips_dsp_r2.c',
'dsp/cpu.c',
'dsp/dec.c',
'dsp/dec_clip_tables.c',
'dsp/dec_mips32.c',
'dsp/dec_mips_dsp_r2.c',
'dsp/dec_msa.c',
'dsp/enc.c',
'dsp/enc_avx2.c',
'dsp/enc_mips32.c',
'dsp/enc_mips_dsp_r2.c',
'dsp/filters.c',
'dsp/filters_mips_dsp_r2.c',
'dsp/lossless.c',
'dsp/lossless_enc.c',
'dsp/lossless_enc_mips32.c',
'dsp/lossless_enc_mips_dsp_r2.c',
'dsp/lossless_mips_dsp_r2.c',
'dsp/rescaler.c',
'dsp/rescaler_mips32.c',
'dsp/rescaler_mips_dsp_r2.c',
'dsp/upsampling.c',
'dsp/upsampling_mips_dsp_r2.c',
'dsp/yuv.c',
'dsp/yuv_mips32.c',
'dsp/yuv_mips_dsp_r2.c',
],
'dependencies' : [
'libwebp_dsp_sse2',
'libwebp_dsp_sse41',
'libwebp_utils',
],
'conditions': [
['OS == "android"', {
'dependencies': [ '../../build/android/ndk.gyp:cpu_features' ],
}],
# iOS uses the same project to generate build project for both device
# and simulator and do not use "target_arch" variable. Other platform
# set it correctly.
['OS!="ios" and (target_arch=="ia32" or target_arch=="x64")', {
'defines': [ 'WEBP_HAVE_SSE2', 'WEBP_HAVE_SSE41' ],
}],
],
},
{
'target_name': 'libwebp_dsp_sse2',
'type': 'static_library',
'include_dirs': ['.'],
'sources': [
'dsp/alpha_processing_sse2.c',
'dsp/argb_sse2.c',
'dsp/cost_sse2.c',
'dsp/dec_sse2.c',
'dsp/enc_sse2.c',
'dsp/filters_sse2.c',
'dsp/lossless_enc_sse2.c',
'dsp/lossless_sse2.c',
'dsp/rescaler_sse2.c',
'dsp/upsampling_sse2.c',
'dsp/yuv_sse2.c',
],
'conditions': [
# iOS uses the same project to generate build project for both device
# and simulator and do not use "target_arch" variable. Other platform
# set it correctly.
['OS!="ios" and (target_arch=="ia32" or target_arch=="x64") and msan==0', {
'cflags': [ '-msse2', ],
'xcode_settings': { 'OTHER_CFLAGS': [ '-msse2' ] },
}],
],
},
{
'target_name': 'libwebp_dsp_sse41',
'type': 'static_library',
'include_dirs': ['.'],
'sources': [
'dsp/alpha_processing_sse41.c',
'dsp/dec_sse41.c',
'dsp/enc_sse41.c',
'dsp/lossless_enc_sse41.c',
],
'conditions': [
['OS=="win" and clang==1', {
# cl.exe's /arch flag doesn't have a setting for SSSE3/4, and cl.exe
# doesn't need it for intrinsics. clang-cl does need it, though.
'msvs_settings': {
'VCCLCompilerTool': { 'AdditionalOptions': [ '-msse4.1' ] },
},
}],
# iOS uses the same project to generate build project for both device
# and simulator and do not use "target_arch" variable. Other platform
# set it correctly.
['OS!="ios" and (target_arch=="ia32" or target_arch=="x64") and msan==0', {
'cflags': [ '-msse4.1', ],
'xcode_settings': { 'OTHER_CFLAGS': [ '-msse4.1' ] },
}],
],
},
{
'target_name': 'libwebp_dsp_neon',
'includes' : [
# Disable LTO due to Neon issues.
# crbug.com/408997
'../../build/android/disable_gcc_lto.gypi',
],
'conditions': [
# iOS uses the same project to generate build project for both device
# and simulator and do not use "target_arch" variable. Other platform
# set it correctly.
['OS == "ios" or (target_arch == "arm" and arm_version >= 7 and (arm_neon == 1 or arm_neon_optional == 1)) or (target_arch == "arm64")', {
'type': 'static_library',
'include_dirs': ['.'],
'sources': [
'dsp/dec_neon.c',
'dsp/enc_neon.c',
'dsp/lossless_enc_neon.c',
'dsp/lossless_neon.c',
'dsp/rescaler_neon.c',
'dsp/upsampling_neon.c',
],
'conditions': [
['target_arch == "arm" and arm_version >= 7 and (arm_neon == 1 or arm_neon_optional == 1)', {
# behavior similar to *.c.neon in an Android.mk
'cflags!': [ '-mfpu=vfpv3-d16' ],
'cflags': [ '-mfpu=neon' ],
}],
['target_arch == "arm64" and clang != 1', {
# avoid an ICE with gcc-4.9: b/15574841
'cflags': [ '-frename-registers' ],
}],
]
}, {
'type': 'none',
}],
],
},
{
'target_name': 'libwebp_enc',
'type': 'static_library',
'include_dirs': ['.'],
'sources': [
'enc/alpha.c',
'enc/analysis.c',
'enc/backward_references.c',
'enc/config.c',
'enc/cost.c',
'enc/delta_palettization.c',
'enc/filter.c',
'enc/frame.c',
'enc/histogram.c',
'enc/iterator.c',
'enc/near_lossless.c',
'enc/picture.c',
'enc/picture_csp.c',
'enc/picture_psnr.c',
'enc/picture_rescale.c',
'enc/picture_tools.c',
'enc/quant.c',
'enc/syntax.c',
'enc/token.c',
'enc/tree.c',
'enc/vp8l.c',
'enc/webpenc.c',
],
'dependencies' : [
'libwebp_utils',
],
},
{
'target_name': 'libwebp_utils',
'type': 'static_library',
'include_dirs': ['.'],
'sources': [
'utils/bit_reader.c',
'utils/bit_writer.c',
'utils/color_cache.c',
'utils/filters.c',
'utils/huffman.c',
'utils/huffman_encode.c',
'utils/quant_levels.c',
'utils/quant_levels_dec.c',
'utils/random.c',
'utils/rescaler.c',
'utils/thread.c',
'utils/utils.c',
],
'variables': {
'clang_warning_flags': [
# See https://code.google.com/p/webp/issues/detail?id=253.
'-Wno-incompatible-pointer-types',
]
},
},
{
'target_name': 'libwebp',
'type': 'none',
'dependencies' : [
'libwebp_dec',
'libwebp_demux',
'libwebp_dsp',
'libwebp_dsp_neon',
'libwebp_enc',
'libwebp_utils',
],
'direct_dependent_settings': {
'include_dirs': ['.'],
},
'conditions': [
['OS!="win"', {'product_name': 'webp'}],
],
},
],
}
|
[
"[email protected]"
] | |
c64d298bed0f1f677a17ca3bac15570c447556a7
|
48e124e97cc776feb0ad6d17b9ef1dfa24e2e474
|
/sdk/python/pulumi_azure_native/resources/v20190801/get_resource_group.py
|
2f0d4bd30068643085cd612cac2f5e047a4d6f33
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
bpkgoud/pulumi-azure-native
|
0817502630062efbc35134410c4a784b61a4736d
|
a3215fe1b87fba69294f248017b1591767c2b96c
|
refs/heads/master
| 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 5,102 |
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetResourceGroupResult',
'AwaitableGetResourceGroupResult',
'get_resource_group',
'get_resource_group_output',
]
@pulumi.output_type
class GetResourceGroupResult:
"""
Resource group information.
"""
def __init__(__self__, id=None, location=None, managed_by=None, name=None, properties=None, tags=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if managed_by and not isinstance(managed_by, str):
raise TypeError("Expected argument 'managed_by' to be a str")
pulumi.set(__self__, "managed_by", managed_by)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if properties and not isinstance(properties, dict):
raise TypeError("Expected argument 'properties' to be a dict")
pulumi.set(__self__, "properties", properties)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
The ID of the resource group.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> str:
"""
The location of the resource group. It cannot be changed after the resource group has been created. It must be one of the supported Azure locations.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="managedBy")
def managed_by(self) -> Optional[str]:
"""
The ID of the resource that manages this resource group.
"""
return pulumi.get(self, "managed_by")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource group.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def properties(self) -> 'outputs.ResourceGroupPropertiesResponse':
"""
The resource group properties.
"""
return pulumi.get(self, "properties")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
The tags attached to the resource group.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource group.
"""
return pulumi.get(self, "type")
class AwaitableGetResourceGroupResult(GetResourceGroupResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetResourceGroupResult(
id=self.id,
location=self.location,
managed_by=self.managed_by,
name=self.name,
properties=self.properties,
tags=self.tags,
type=self.type)
def get_resource_group(resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetResourceGroupResult:
"""
Resource group information.
:param str resource_group_name: The name of the resource group to get. The name is case insensitive.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:resources/v20190801:getResourceGroup', __args__, opts=opts, typ=GetResourceGroupResult).value
return AwaitableGetResourceGroupResult(
id=__ret__.id,
location=__ret__.location,
managed_by=__ret__.managed_by,
name=__ret__.name,
properties=__ret__.properties,
tags=__ret__.tags,
type=__ret__.type)
@_utilities.lift_output_func(get_resource_group)
def get_resource_group_output(resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetResourceGroupResult]:
"""
Resource group information.
:param str resource_group_name: The name of the resource group to get. The name is case insensitive.
"""
...
|
[
"[email protected]"
] | |
3621b7bff933e2d4959271d79594fd29094aa68c
|
1698fe3ff15a6737c70501741b32b24fe68052f4
|
/django-request-master/request/models.py
|
102d2270e1323708bb13dd219052f6e37150f524
|
[] |
no_license
|
menhswu/djangoapps
|
4f3718244c8678640af2d2a095d20a405e337884
|
039a42aa9d1537e7beb4071d86bea7a42253d8b3
|
refs/heads/master
| 2023-03-04T03:56:01.070921 | 2021-01-28T07:35:02 | 2021-01-28T07:35:02 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 4,049 |
py
|
from socket import gethostbyaddr
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import models
from django.utils import timezone
from django.utils.translation import gettext_lazy as _
from . import settings as request_settings
from .managers import RequestManager
from .utils import HTTP_STATUS_CODES, browsers, engines, request_is_ajax
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
class Request(models.Model):
# Response information.
response = models.SmallIntegerField(_('response'), choices=HTTP_STATUS_CODES, default=200)
# Request information.
method = models.CharField(_('method'), default='GET', max_length=7)
path = models.CharField(_('path'), max_length=255)
time = models.DateTimeField(_('time'), default=timezone.now, db_index=True)
is_secure = models.BooleanField(_('is secure'), default=False)
is_ajax = models.BooleanField(
_('is ajax'),
default=False,
help_text=_('Wheather this request was used via javascript.'),
)
# User information.
ip = models.GenericIPAddressField(_('ip address'))
user = models.ForeignKey(AUTH_USER_MODEL, blank=True, null=True, verbose_name=_('user'), on_delete=models.SET_NULL)
referer = models.URLField(_('referer'), max_length=255, blank=True, null=True)
user_agent = models.CharField(_('user agent'), max_length=255, blank=True, null=True)
language = models.CharField(_('language'), max_length=255, blank=True, null=True)
objects = RequestManager()
class Meta:
app_label = 'request'
verbose_name = _('request')
verbose_name_plural = _('requests')
ordering = ('-time',)
def __str__(self):
return '[{0}] {1} {2} {3}'.format(self.time, self.method, self.path, self.response)
def get_user(self):
return get_user_model().objects.get(pk=self.user_id)
def from_http_request(self, request, response=None, commit=True):
# Request information.
self.method = request.method
self.path = request.path[:255]
self.is_secure = request.is_secure()
self.is_ajax = request_is_ajax(request)
# User information.
self.ip = request.META.get('REMOTE_ADDR', '')
self.referer = request.META.get('HTTP_REFERER', '')[:255]
self.user_agent = request.META.get('HTTP_USER_AGENT', '')[:255]
self.language = request.META.get('HTTP_ACCEPT_LANGUAGE', '')[:255]
if hasattr(request, 'user') and hasattr(request.user, 'is_authenticated'):
is_authenticated = request.user.is_authenticated
if is_authenticated:
self.user = request.user
if response:
self.response = response.status_code
if (response.status_code == 301) or (response.status_code == 302):
self.redirect = response['Location']
if commit:
self.save()
@property
def browser(self):
if not self.user_agent:
return
if not hasattr(self, '_browser'):
self._browser = browsers.resolve(self.user_agent)
return self._browser[0]
@property
def keywords(self):
if not self.referer:
return
if not hasattr(self, '_keywords'):
self._keywords = engines.resolve(self.referer)
if self._keywords:
return ' '.join(self._keywords[1]['keywords'].split('+'))
@property
def hostname(self):
try:
return gethostbyaddr(self.ip)[0]
except Exception: # socket.gaierror, socket.herror, etc
return self.ip
def save(self, *args, **kwargs):
if not request_settings.LOG_IP:
self.ip = request_settings.IP_DUMMY
elif request_settings.ANONYMOUS_IP:
parts = self.ip.split('.')[0:-1]
parts.append('1')
self.ip = '.'.join(parts)
if not request_settings.LOG_USER:
self.user = None
super().save(*args, **kwargs)
|
[
"[email protected]"
] | |
dbe246c5716bc5805439e941fe6ceb98c1161194
|
e9539de5b8832e2a09365917fe201a945bf5d99b
|
/leetcode16.py
|
c6fc3a8fc977080489f30d083f334013a3f341b1
|
[] |
no_license
|
JoshuaW1990/leetcode-session1
|
56d57df30b21ccade3fe54e3fd56a2b3383bd793
|
6fc170c04fadec6966fb7938a07474d4ee107b61
|
refs/heads/master
| 2021-09-20T16:18:15.640839 | 2018-08-12T09:40:51 | 2018-08-12T09:40:51 | 76,912,955 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 874 |
py
|
class Solution(object):
def threeSumClosest(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
ans = None
minDiff = float('inf')
nums.sort()
for i in xrange(len(nums) - 2):
if i == 0 or nums[i] > nums[i - 1]:
left = i + 1
right = len(nums) - 1
while left < right:
diff = target - (nums[left] + nums[right] + nums[i])
if abs(diff) < minDiff:
ans = nums[left] + nums[right] + nums[i]
minDiff = abs(diff)
if diff == 0:
return ans
elif diff > 0:
left += 1
else:
right -= 1
return ans
|
[
"[email protected]"
] | |
0c28ae315ae4aca9b257d2bda00cbfc798cdca4e
|
092a13e08cc412d85f2115b9efaad17e1afdfc1a
|
/common/models/food/Food.py
|
068207c4915f3f47faa1deb6364f04b1ec590403
|
[] |
no_license
|
Willanzhang/flask_mvc
|
283196e1850f8676f1db52fe6361aa8706276e9d
|
408470329494cd40691e4014b85ccdc9ba11711d
|
refs/heads/master
| 2022-12-12T11:13:06.202893 | 2019-08-04T14:21:48 | 2019-08-04T14:21:48 | 173,304,987 | 0 | 0 | null | 2022-12-08T04:54:05 | 2019-03-01T13:18:14 |
JavaScript
|
UTF-8
|
Python
| false | false | 1,501 |
py
|
# coding: utf-8
from sqlalchemy import Column, DateTime, Integer, Numeric, String
from sqlalchemy.schema import FetchedValue
from application import db
class Food(db.Model):
__tablename__ = 'food'
id = db.Column(db.Integer, primary_key=True)
cat_id = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
name = db.Column(db.String(100), nullable=False, server_default=db.FetchedValue())
price = db.Column(db.Numeric(10, 2), nullable=False, server_default=db.FetchedValue())
main_image = db.Column(db.String(100), nullable=False, server_default=db.FetchedValue())
summary = db.Column(db.String(10000), nullable=False, server_default=db.FetchedValue())
stock = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
tags = db.Column(db.String(200), nullable=False, server_default=db.FetchedValue())
status = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
month_count = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
total_count = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
view_count = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
comment_count = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
updated_time = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue())
created_time = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue())
|
[
"[email protected]"
] | |
9514c4458b97cb367130adb69501908c8ee29532
|
51a37b7108f2f69a1377d98f714711af3c32d0df
|
/src/leetcode/P5664.py
|
146ff2962cd3355bf82250c46bf4e1657269de8d
|
[] |
no_license
|
stupidchen/leetcode
|
1dd2683ba4b1c0382e9263547d6c623e4979a806
|
72d172ea25777980a49439042dbc39448fcad73d
|
refs/heads/master
| 2022-03-14T21:15:47.263954 | 2022-02-27T15:33:15 | 2022-02-27T15:33:15 | 55,680,865 | 7 | 1 | null | null | null | null |
UTF-8
|
Python
| false | false | 610 |
py
|
class Solution:
def minimumBoxes(self, n: int):
t = 0
m = 0
i = 1
while True:
t += (i * (i + 1)) >> 1
if t >= n:
m = i
break
i += 1
if t == n:
return (m * (m + 1)) >> 1
t -= (m * (m + 1)) >> 1
m -= 1
j = 1
r = (m * (m + 1)) >> 1
while True:
r += 1
t += j
if t >= n:
return r
j += 1
if __name__ == '__main__':
for i in range(20):
print(Solution().minimumBoxes(i + 1))
|
[
"[email protected]"
] | |
bca93284647db372a5236cc8a447f4654434f78c
|
9743d5fd24822f79c156ad112229e25adb9ed6f6
|
/xai/brain/wordbase/nouns/_grout.py
|
989f969bf4769d1c0640642da950d88a9f05f5b4
|
[
"MIT"
] |
permissive
|
cash2one/xai
|
de7adad1758f50dd6786bf0111e71a903f039b64
|
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
|
refs/heads/master
| 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 302 |
py
|
#calss header
class _GROUT():
def __init__(self,):
self.name = "GROUT"
self.definitions = [u'mortar used for grouting']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
[
"[email protected]"
] | |
f421e5d3c9490ad52705ea07bdcf8dd9763a729b
|
ec551303265c269bf1855fe1a30fdffe9bc894b6
|
/old/t20190416_divide/divide.py
|
19521d6d57481134378c263ec0fe597333aca22a
|
[] |
no_license
|
GongFuXiong/leetcode
|
27dbda7a5ced630ae2ae65e19d418ebbc65ae167
|
f831fd9603592ae5bee3679924f962a3ebce381c
|
refs/heads/master
| 2023-06-25T01:05:45.683510 | 2021-07-26T10:05:25 | 2021-07-26T10:05:25 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,694 |
py
|
#!/usr/bin/env python
# encoding: utf-8
'''
@author: KM
@license: (C) Copyright 2013-2017, Node Supply Chain Manager Corporation Limited.
@contact: [email protected]
@software: garner
@file: divide.py
@time: 2019/4/16 11:56
@desc:
22. 括号生成
给定两个整数,被除数 dividend 和除数 divisor。将两数相除,要求不使用乘法、除法和 mod 运算符。
返回被除数 dividend 除以除数 divisor 得到的商。
示例 1:
输入: dividend = 10, divisor = 3
输出: 3
示例 2:
输入: dividend = 7, divisor = -3
输出: -2
说明:
被除数和除数均为 32 位有符号整数。
除数不为 0。
假设我们的环境只能存储 32 位有符号整数,其数值范围是 [−2^31, 2^31 − 1]。
本题中,如果除法结果溢出,则返回 2^31 − 1。
'''
import math
class Solution:
def __init__(self):
self.MIN_VALUE = - math.pow(2,31)
self.MAX_VALUE = math.pow(2,31)-1
def divide(self, dividend, divisor):
sym_flag = 1
if divisor == 0:
return 0
if dividend < 0 and divisor > 0:
sym_flag = -1
dividend = -1 * dividend
elif dividend > 0 and divisor < 0:
sym_flag = -1
divisor = -1 * divisor
elif dividend < 0 and divisor < 0:
dividend = -1 * dividend
divisor = -1 * divisor
result = 0
for i in range(31,-1 ,-1):
if (dividend>>i) >= divisor:
result = result + (1<<i)
dividend = dividend - (divisor<<i)
result = sym_flag * result
if result < - math.pow(2, 31) or result > math.pow(2, 31) - 1:
result = math.pow(2, 31) - 1
return int(result)
def divide1(self, dividend, divisor):
sym_flag = 1
if divisor == 0:
return 0
if dividend<0 and divisor>0:
sym_flag = -1
dividend = -1 * dividend
elif dividend>0 and divisor<0:
sym_flag = -1
divisor = -1 * divisor
elif dividend<0 and divisor<0:
dividend = -1 * dividend
divisor = -1 * divisor
quotient = 0
while dividend >= divisor:
dividend = dividend - divisor
quotient = quotient + 1
quotient = sym_flag * quotient
if quotient < - math.pow(2,31) or quotient > math.pow(2,31)-1:
quotient = math.pow(2,31)-1
return quotient
if __name__ == "__main__":
solution=Solution()
res = solution.divide(-2147483648,-1)
print("res:{0}".format(res))
|
[
"[email protected]"
] | |
c9e8f678bb4ebbb9ba7388c4c1ddb3c1ffd42804
|
e37a4775935435eda9f176c44005912253a720d8
|
/datadriven/python/uq/quadrature/marginalization/__init__.py
|
36cc5a9cbcc3e8cf29631596630f546a4e697170
|
[] |
no_license
|
JihoYang/SGpp
|
b1d90d2d9e8f8be0092e1a9fa0f37a5f49213c29
|
7e547110584891beed194d496e23194dd90ccd20
|
refs/heads/master
| 2020-04-25T10:27:58.081281 | 2018-09-29T19:33:13 | 2018-09-29T19:33:13 | null | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 42 |
py
|
from marginalization import doMarginalize
|
[
"[email protected]"
] | |
899c7ea862d1368e04b45d33aabd03d943fa2d16
|
1b845de8123c750e8735ccf4297bf1e5861cbb4b
|
/katas/5kyu/kata11.py
|
4c378777c4c279724b5df14f510e2318865c2f63
|
[] |
no_license
|
jorgemira/codewars
|
3ef2b05fa90722cdd48bb8afb0f7536627bcfec9
|
701a756f3d466dbfe93f228b2e294cf49a7af2ae
|
refs/heads/master
| 2020-12-19T19:07:51.485286 | 2020-01-23T15:54:52 | 2020-01-23T15:54:52 | 235,824,468 | 3 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 941 |
py
|
"""
Codewars 5 kyu kata: Finding an appointment
URL: https://www.codewars.com/kata/525f277c7103571f47000147/python
"""
def t2n(time):
h, m = (int(i) for i in time.split(':'))
return (h - 9) * 60 + m
def n2t(num):
h = ('' if num / 60 else '0') + str(num / 60 + 9)
m = ('' if num - num / 60 * 60 else '0') + str(num - num / 60 * 60)
return ':'.join([h, m])
def overlaps(period1, period2):
return period1[0] <= period2[0] < period1[1] or period2[0] <= period1[0] < period2[1]
def get_start_time(schedules, duration):
day = []
end_times = set([0])
for appointments in schedules:
for start, end in appointments:
day.append([t2n(start), t2n(end)])
end_times.add(t2n(end))
for end_time in sorted(end_times):
if end_time + duration <= 600 and all(not overlaps(d, (end_time, end_time + duration)) for d in day):
return n2t(end_time)
return None
|
[
"[email protected]"
] | |
15a699a1ba9a4aff30701aeada95d57169f75f68
|
fc5708b8f291c314dad85f53e23c5728d7b9627f
|
/week3/excercise_3.py
|
81fb9030a46a6251b5d5e74230201b77b1756328
|
[] |
no_license
|
OnestoneW/UZH
|
d271beeb9cfa31c67a5ce9e6f8a7b9ec5d3b8658
|
9c243a6e92c504f83d5fc091a5d67bd05e79d905
|
refs/heads/master
| 2021-05-07T05:03:58.526573 | 2017-11-22T19:55:46 | 2017-11-22T19:55:46 | 111,398,660 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 904 |
py
|
import pylab as plb
import numpy as np
import matplotlib
def calc_average_radiation(data_set):
values = data_set[:,0]
errors = data_set[:,1]
#calc the measurements for "per year":
values = values*365*24
errors = errors*365*24
weights = 1/(errors**2)
average = sum((weights*values))/sum(weights)
error_on_average = (1/np.sqrt(sum(weights)))/np.sqrt(len(weights))
return average, error_on_average
if __name__ == "__main__":
data = plb.loadtxt("radiation.dat")
average, error = calc_average_radiation(data)
result = "({} +/- {}) mSv/year".format(round(average, 3), round(error, 3))
print("Average radiation per year:",result)
'''Taking a look onto the natural radiation of 2.4 mSv/year, our result is about (0.191 +/- 0.012) mSv/year higher.
Since this is only about 8% higher as the natural background radiation, it is compatible.'''
|
[
"[email protected]"
] | |
45f25cb7ac03ba31a0808b28ae688e323ba4c5d7
|
1d928c3f90d4a0a9a3919a804597aa0a4aab19a3
|
/python/youtube-dl/2016/12/amcnetworks.py
|
87c803e948fd2e04cde6b0b43251d3f804b952a0
|
[] |
no_license
|
rosoareslv/SED99
|
d8b2ff5811e7f0ffc59be066a5a0349a92cbb845
|
a062c118f12b93172e31e8ca115ce3f871b64461
|
refs/heads/main
| 2023-02-22T21:59:02.703005 | 2021-01-28T19:40:51 | 2021-01-28T19:40:51 | 306,497,459 | 1 | 1 | null | 2020-11-24T20:56:18 | 2020-10-23T01:18:07 | null |
UTF-8
|
Python
| false | false | 4,211 |
py
|
# coding: utf-8
from __future__ import unicode_literals
from .theplatform import ThePlatformIE
from ..utils import (
update_url_query,
parse_age_limit,
int_or_none,
)
class AMCNetworksIE(ThePlatformIE):
_VALID_URL = r'https?://(?:www\.)?(?:amc|bbcamerica|ifc|wetv)\.com/(?:movies/|shows/[^/]+/(?:full-episodes/)?[^/]+/episode-\d+(?:-(?:[^/]+/)?|/))(?P<id>[^/?#]+)'
_TESTS = [{
'url': 'http://www.ifc.com/shows/maron/season-04/episode-01/step-1',
'md5': '',
'info_dict': {
'id': 's3MX01Nl4vPH',
'ext': 'mp4',
'title': 'Maron - Season 4 - Step 1',
'description': 'In denial about his current situation, Marc is reluctantly convinced by his friends to enter rehab. Starring Marc Maron and Constance Zimmer.',
'age_limit': 17,
'upload_date': '20160505',
'timestamp': 1462468831,
'uploader': 'AMCN',
},
'params': {
# m3u8 download
'skip_download': True,
},
'skip': 'Requires TV provider accounts',
}, {
'url': 'http://www.bbcamerica.com/shows/the-hunt/full-episodes/season-1/episode-01-the-hardest-challenge',
'only_matching': True,
}, {
'url': 'http://www.amc.com/shows/preacher/full-episodes/season-01/episode-00/pilot',
'only_matching': True,
}, {
'url': 'http://www.wetv.com/shows/million-dollar-matchmaker/season-01/episode-06-the-dumped-dj-and-shallow-hal',
'only_matching': True,
}, {
'url': 'http://www.ifc.com/movies/chaos',
'only_matching': True,
}, {
'url': 'http://www.bbcamerica.com/shows/doctor-who/full-episodes/the-power-of-the-daleks/episode-01-episode-1-color-version',
'only_matching': True,
}]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
query = {
'mbr': 'true',
'manifest': 'm3u',
}
media_url = self._search_regex(r'window\.platformLinkURL\s*=\s*[\'"]([^\'"]+)', webpage, 'media url')
theplatform_metadata = self._download_theplatform_metadata(self._search_regex(
r'https?://link.theplatform.com/s/([^?]+)', media_url, 'theplatform_path'), display_id)
info = self._parse_theplatform_metadata(theplatform_metadata)
video_id = theplatform_metadata['pid']
title = theplatform_metadata['title']
rating = theplatform_metadata['ratings'][0]['rating']
auth_required = self._search_regex(r'window\.authRequired\s*=\s*(true|false);', webpage, 'auth required')
if auth_required == 'true':
requestor_id = self._search_regex(r'window\.requestor_id\s*=\s*[\'"]([^\'"]+)', webpage, 'requestor id')
resource = self._get_mvpd_resource(requestor_id, title, video_id, rating)
query['auth'] = self._extract_mvpd_auth(url, video_id, requestor_id, resource)
media_url = update_url_query(media_url, query)
formats, subtitles = self._extract_theplatform_smil(media_url, video_id)
self._sort_formats(formats)
info.update({
'id': video_id,
'subtitles': subtitles,
'formats': formats,
'age_limit': parse_age_limit(parse_age_limit(rating)),
})
ns_keys = theplatform_metadata.get('$xmlns', {}).keys()
if ns_keys:
ns = list(ns_keys)[0]
series = theplatform_metadata.get(ns + '$show')
season_number = int_or_none(theplatform_metadata.get(ns + '$season'))
episode = theplatform_metadata.get(ns + '$episodeTitle')
episode_number = int_or_none(theplatform_metadata.get(ns + '$episode'))
if season_number:
title = 'Season %d - %s' % (season_number, title)
if series:
title = '%s - %s' % (series, title)
info.update({
'title': title,
'series': series,
'season_number': season_number,
'episode': episode,
'episode_number': episode_number,
})
return info
|
[
"[email protected]"
] | |
5df31c267567c47d939a5162d5348aab8c0045f5
|
e91f9c9688e348a2374d42d0df2f22319073e266
|
/venv/bin/python-config
|
6a0e1e93d04a6a93d5c9ecb17e8a1d021cd8656a
|
[] |
no_license
|
jobmassaro/motocustom
|
8e5be423356e8dfbdc46806b1dfb3dfb3c409e3e
|
a644317a016244049c6835e33497c79b0207060c
|
refs/heads/master
| 2021-01-13T09:14:46.586785 | 2016-11-05T17:19:13 | 2016-11-05T17:19:13 | 72,467,393 | 0 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 2,356 |
#!/home/developer/tutflask/ciaoline/venv/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
|
[
"="
] |
=
|
|
8c3cc63178cb6a0d72b3a1b00ef7ca7fcc050cf5
|
523f8f5febbbfeb6d42183f2bbeebc36f98eadb5
|
/140.py
|
8533e0a38d71ce3286ff6fbbbf15ed5d61c38e6e
|
[] |
no_license
|
saleed/LeetCode
|
655f82fdfcc3000400f49388e97fc0560f356af0
|
48b43999fb7e2ed82d922e1f64ac76f8fabe4baa
|
refs/heads/master
| 2022-06-15T21:54:56.223204 | 2022-05-09T14:05:50 | 2022-05-09T14:05:50 | 209,430,056 | 2 | 0 | null | null | null | null |
UTF-8
|
Python
| false | false | 1,708 |
py
|
class Solution(object):
def wordBreak(self, s, wordDict):
"""
:type s: str
:type wordDict: List[str]
:rtype: List[str]
"""
if len(s)==0 or len(wordDict)==0:
return []
dict=set(wordDict)
dp=[False for _ in range(len(s)+1)]
pre=[[] for _ in range(len(s)+1)]
dp[0]=True
for i in range(1,len(s)+1):
for j in range(i):
if dp[j] and s[j:i] in dict:
dp[i]=True
pre[i].append(j)
print(pre)
if dp[len(s)]==True:
path=self.generatePath(pre,s)
# print(path)
strres=[]
for i in range(len(path)):
path[i]=list(reversed(path[i]))
strres.append(" ".join(path[i]))
return strres
# return dp[len(s)]
return []
def generatePath(self,pre,s):
cur=len(s)
res=[]
self.recursiveSearch(res,pre,len(s),s,[])
# strarr=[]
# for i in res:
# strarr.append(" ".join(i))
return res
#回溯的方法??
def recursiveSearch(self,res,pre,id,s,curpath):
if len(pre[id])==0:
res.append(curpath[:])
return
for i in pre[id]:
curpath.append(s[i:id])
self.recursiveSearch(res,pre,i,s,curpath)
curpath.pop()
a=Solution()
s = "catsanddog"
wordDict = ["cat", "cats", "and", "sand", "dog"]
print(a.wordBreak(s,wordDict))
s = "pineapplepenapple"
wordDict = ["apple", "pen", "applepen", "pine", "pineapple"]
print(a.wordBreak(s,wordDict))
test=['pine', 'applepen', 'apple']
print(" ".join(test))
|
[
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.