code
stringlengths 12
2.05k
| label_name
stringclasses 5
values | label
int64 0
4
|
---|---|---|
def setup_db(cls, config_calibre_dir, app_db_path):
cls.dispose()
if not config_calibre_dir:
cls.config.invalidate()
return False
dbpath = os.path.join(config_calibre_dir, "metadata.db")
if not os.path.exists(dbpath):
cls.config.invalidate()
return False
try:
cls.engine = create_engine('sqlite://',
echo=False,
isolation_level="SERIALIZABLE",
connect_args={'check_same_thread': False},
poolclass=StaticPool)
with cls.engine.begin() as connection:
connection.execute(text("attach database '{}' as calibre;".format(dbpath)))
connection.execute(text("attach database '{}' as app_settings;".format(app_db_path)))
conn = cls.engine.connect()
# conn.text_factory = lambda b: b.decode(errors = 'ignore') possible fix for #1302
except Exception as ex:
cls.config.invalidate(ex)
return False
cls.config.db_configured = True
if not cc_classes:
try:
cc = conn.execute(text("SELECT id, datatype FROM custom_columns"))
cls.setup_db_cc_classes(cc)
except OperationalError as e:
log.error_or_exception(e)
cls.session_factory = scoped_session(sessionmaker(autocommit=False,
autoflush=True,
bind=cls.engine))
for inst in cls.instances:
inst.initSession()
cls._init = True
return True | Base | 1 |
def get_image(path):
def get_absolute_path(path):
import os
script_dir = os.path.dirname(__file__) # <-- absolute dir the script is in
rel_path = path
abs_file_path = os.path.join(script_dir, rel_path)
return abs_file_path
return send_file(
get_absolute_path(f"./images/{path}"),
mimetype='image/png',
attachment_filename='snapshot.png',
cache_timeout=0
) | Base | 1 |
def create(request, comment_id):
comment = get_object_or_404(
Comment.objects.exclude(user=request.user),
pk=comment_id)
form = LikeForm(
user=request.user,
comment=comment,
data=post_data(request))
if is_post(request) and form.is_valid():
like = form.save()
like.comment.increase_likes_count()
if is_ajax(request):
return json_response({'url_delete': like.get_delete_url()})
return redirect(request.POST.get('next', comment.get_absolute_url()))
return render(
request=request,
template_name='spirit/comment/like/create.html',
context={
'form': form,
'comment': comment}) | Base | 1 |
def check_auth(username, password):
try:
username = username.encode('windows-1252')
except UnicodeEncodeError:
username = username.encode('utf-8')
user = ub.session.query(ub.User).filter(func.lower(ub.User.name) ==
username.decode('utf-8').lower()).first()
if bool(user and check_password_hash(str(user.password), password)):
return True
else:
ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)
log.warning('OPDS Login failed for user "%s" IP-address: %s', username.decode('utf-8'), ip_Address)
return False | Base | 1 |
def item_to_bm(self, item):
return cPickle.loads(bytes(item.data(Qt.UserRole))) | Base | 1 |
def whitelist(f):
"""Decorator: Whitelist method to be called remotely via REST API."""
f.whitelisted = True
return f | Base | 1 |
def render_prepare_search_form(cc):
# prepare data for search-form
tags = calibre_db.session.query(db.Tags)\
.join(db.books_tags_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_tags_link.tag'))\
.order_by(db.Tags.name).all()
series = calibre_db.session.query(db.Series)\
.join(db.books_series_link)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(text('books_series_link.series'))\
.order_by(db.Series.name)\
.filter(calibre_db.common_filters()).all()
shelves = ub.session.query(ub.Shelf)\
.filter(or_(ub.Shelf.is_public == 1, ub.Shelf.user_id == int(current_user.id)))\
.order_by(ub.Shelf.name).all()
extensions = calibre_db.session.query(db.Data)\
.join(db.Books)\
.filter(calibre_db.common_filters()) \
.group_by(db.Data.format)\
.order_by(db.Data.format).all()
if current_user.filter_language() == u"all":
languages = calibre_db.speaking_language()
else:
languages = None
return render_title_template('search_form.html', tags=tags, languages=languages, extensions=extensions,
series=series,shelves=shelves, title=_(u"Advanced Search"), cc=cc, page="advsearch") | Base | 1 |
def _inject_net_into_fs(net, fs, execute=None):
"""Inject /etc/network/interfaces into the filesystem rooted at fs.
net is the contents of /etc/network/interfaces.
"""
netdir = os.path.join(os.path.join(fs, 'etc'), 'network')
utils.execute('mkdir', '-p', netdir, run_as_root=True)
utils.execute('chown', 'root:root', netdir, run_as_root=True)
utils.execute('chmod', 755, netdir, run_as_root=True)
netfile = os.path.join(netdir, 'interfaces')
utils.execute('tee', netfile, process_input=net, run_as_root=True) | Base | 1 |
def edit_book_comments(comments, book):
modif_date = False
if comments:
comments = clean_html(comments)
if len(book.comments):
if book.comments[0].text != comments:
book.comments[0].text = comments
modif_date = True
else:
if comments:
book.comments.append(db.Comments(text=comments, book=book.id))
modif_date = True
return modif_date | Base | 1 |
def cookies(self) -> RequestsCookieJar:
jar = RequestsCookieJar()
for name, cookie_dict in self['cookies'].items():
jar.set_cookie(create_cookie(
name, cookie_dict.pop('value'), **cookie_dict))
jar.clear_expired_cookies()
return jar | Class | 2 |
def test_reset_nonexistent_extension(self):
url = reverse('reset_due_date', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'student': self.user1.username,
'url': self.week1.location.to_deprecated_string(),
})
self.assertEqual(response.status_code, 400, response.content) | Compound | 4 |
def set_admins(self) -> None:
name = self.results["deploy"]["func-name"]["value"]
key = self.results["deploy"]["func-key"]["value"]
table_service = TableService(account_name=name, account_key=key)
if self.admins:
update_admins(table_service, self.application_name, self.admins) | Class | 2 |
def parse_soap_enveloped_saml_thingy(text, expected_tags):
"""Parses a SOAP enveloped SAML thing and returns the thing as
a string.
:param text: The SOAP object as XML string
:param expected_tags: What the tag of the SAML thingy is expected to be.
:return: SAML thingy as a string
"""
envelope = ElementTree.fromstring(text)
# Make sure it's a SOAP message
assert envelope.tag == '{%s}Envelope' % soapenv.NAMESPACE
assert len(envelope) >= 1
body = None
for part in envelope:
if part.tag == '{%s}Body' % soapenv.NAMESPACE:
assert len(part) == 1
body = part
break
if body is None:
return ""
saml_part = body[0]
if saml_part.tag in expected_tags:
return ElementTree.tostring(saml_part, encoding="UTF-8")
else:
raise WrongMessageType("Was '%s' expected one of %s" % (saml_part.tag,
expected_tags)) | Base | 1 |
def get(self):
if not current_user.is_authenticated():
return "Must be logged in to log out", 200
logout_user()
return "Logged Out", 200 | Base | 1 |
def validate_request(self, request):
"""If configured for webhook basic auth, validate request has correct auth."""
if self.basic_auth:
basic_auth = get_request_basic_auth(request)
if basic_auth is None or basic_auth not in self.basic_auth:
# noinspection PyUnresolvedReferences
raise AnymailWebhookValidationFailure(
"Missing or invalid basic auth in Anymail %s webhook" % self.esp_name) | Class | 2 |
def test_dir(self, tmpdir):
url = QUrl.fromLocalFile(str(tmpdir))
req = QNetworkRequest(url)
reply = filescheme.handler(req)
# The URL will always use /, even on Windows - so we force this here
# too.
tmpdir_path = str(tmpdir).replace(os.sep, '/')
assert reply.readAll() == filescheme.dirbrowser_html(tmpdir_path) | Compound | 4 |
def test_manage_pools(self) -> None:
user1 = uuid4()
user2 = uuid4()
# by default, any can modify
self.assertIsNone(
check_can_manage_pools_impl(
InstanceConfig(allow_pool_management=True), UserInfo()
)
)
# with oid, but no admin
self.assertIsNone(
check_can_manage_pools_impl(
InstanceConfig(allow_pool_management=True), UserInfo(object_id=user1)
)
)
# is admin
self.assertIsNone(
check_can_manage_pools_impl(
InstanceConfig(allow_pool_management=False, admins=[user1]),
UserInfo(object_id=user1),
)
)
# no user oid set
self.assertIsNotNone(
check_can_manage_pools_impl(
InstanceConfig(allow_pool_management=False, admins=[user1]), UserInfo()
)
)
# not an admin
self.assertIsNotNone(
check_can_manage_pools_impl(
InstanceConfig(allow_pool_management=False, admins=[user1]),
UserInfo(object_id=user2),
)
) | Class | 2 |
def _parse_cache_control(headers):
retval = {}
if "cache-control" in headers:
parts = headers["cache-control"].split(",")
parts_with_args = [
tuple([x.strip().lower() for x in part.split("=", 1)])
for part in parts
if -1 != part.find("=")
]
parts_wo_args = [
(name.strip().lower(), 1) for name in parts if -1 == name.find("=")
]
retval = dict(parts_with_args + parts_wo_args)
return retval | Class | 2 |
def test_open_with_filename(self):
tmpname = mktemp('', 'mmap')
fp = memmap(tmpname, dtype=self.dtype, mode='w+',
shape=self.shape)
fp[:] = self.data[:]
del fp
os.unlink(tmpname) | Class | 2 |
def _sanitize(value: str) -> str:
return re.sub(r"[^\w _-]+", "", value) | Base | 1 |
def main():
app = create_app()
init_errorhandler()
app.register_blueprint(web)
app.register_blueprint(opds)
app.register_blueprint(jinjia)
app.register_blueprint(about)
app.register_blueprint(shelf)
app.register_blueprint(admi)
app.register_blueprint(remotelogin)
app.register_blueprint(meta)
app.register_blueprint(gdrive)
app.register_blueprint(editbook)
if kobo_available:
app.register_blueprint(kobo)
app.register_blueprint(kobo_auth)
if oauth_available:
app.register_blueprint(oauth)
success = web_server.start()
sys.exit(0 if success else 1) | Base | 1 |
def test_removes_expired_cookies_from_session_obj(self, initial_cookie, expired_cookie, httpbin):
session = Session(self.config_dir)
session['cookies'] = initial_cookie
session.remove_cookies([expired_cookie])
assert expired_cookie not in session.cookies | Class | 2 |
def test_modify_access_with_fake_user(self):
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': 'GandalfTheGrey',
'rolename': 'staff',
'action': 'revoke',
})
self.assertEqual(response.status_code, 200)
expected = {
'unique_student_identifier': 'GandalfTheGrey',
'userDoesNotExist': True,
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected) | Compound | 4 |
def get_user_profile(access_token):
headers = {"Authorization": "OAuth {}".format(access_token)}
response = requests.get(
"https://www.googleapis.com/oauth2/v1/userinfo", headers=headers
)
if response.status_code == 401:
logger.warning("Failed getting user profile (response code 401).")
return None
return response.json() | Base | 1 |
def auth_user_registration(self):
return self.appbuilder.get_app.config["AUTH_USER_REGISTRATION"] | Class | 2 |
def test_bad_host_header(self):
# https://corte.si/posts/code/pathod/pythonservers/index.html
to_send = "GET / HTTP/1.0\n" " Host: 0\n\n"
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, response_body = read_http(fp)
self.assertline(line, "400", "Bad Request", "HTTP/1.0")
self.assertEqual(headers.get("server"), "waitress")
self.assertTrue(headers.get("date")) | Base | 1 |
def delete_user_session(user_id, session_key):
try:
log.info("Deleted session_key : " + session_key)
session.query(User_Sessions).filter(User_Sessions.user_id==user_id,
User_Sessions.session_key==session_key).delete()
session.commit()
except (exc.OperationalError, exc.InvalidRequestError):
session.rollback()
log.exception(e) | Base | 1 |
def sentences_stats(self, type, vId = None):
return self.sql_execute(self.prop_sentences_stats(type, vId)) | Base | 1 |
def set_admins(self) -> None:
name = self.results["deploy"]["func-name"]["value"]
key = self.results["deploy"]["func-key"]["value"]
table_service = TableService(account_name=name, account_key=key)
if self.admins:
update_admins(table_service, self.application_name, self.admins) | Class | 2 |
async def _has_watch_regex_match(self, text: str) -> Tuple[Union[bool, re.Match], Optional[str]]:
"""
Return True if `text` matches any regex from `word_watchlist` or `token_watchlist` configs.
`word_watchlist`'s patterns are placed between word boundaries while `token_watchlist` is
matched as-is. Spoilers are expanded, if any, and URLs are ignored.
Second return value is a reason written to database about blacklist entry (can be None).
"""
if SPOILER_RE.search(text):
text = self._expand_spoilers(text)
text = self.clean_input(text)
# Make sure it's not a URL
if URL_RE.search(text):
return False, None
watchlist_patterns = self._get_filterlist_items('filter_token', allowed=False)
for pattern in watchlist_patterns:
match = re.search(pattern, text, flags=re.IGNORECASE)
if match:
return match, self._get_filterlist_value('filter_token', pattern, allowed=False)['comment']
return False, None | Class | 2 |
def create_class_from_xml_string(target_class, xml_string):
"""Creates an instance of the target class from a string.
:param target_class: The class which will be instantiated and populated
with the contents of the XML. This class must have a c_tag and a
c_namespace class variable.
:param xml_string: A string which contains valid XML. The root element
of the XML string should match the tag and namespace of the desired
class.
:return: An instance of the target class with members assigned according to
the contents of the XML - or None if the root XML tag and namespace did
not match those of the target class.
"""
if not isinstance(xml_string, six.binary_type):
xml_string = xml_string.encode('utf-8')
tree = ElementTree.fromstring(xml_string)
return create_class_from_element_tree(target_class, tree) | Base | 1 |
def store_user_session():
if flask_session.get('_user_id', ""):
try:
if not check_user_session(flask_session.get('_user_id', ""), flask_session.get('_id', "")):
user_session = User_Sessions(flask_session.get('_user_id', ""), flask_session.get('_id', ""))
session.add(user_session)
session.commit()
log.info("Login and store session : " + flask_session.get('_id', ""))
else:
log.info("Found stored session : " + flask_session.get('_id', ""))
except (exc.OperationalError, exc.InvalidRequestError) as e:
session.rollback()
log.exception(e)
else:
log.error("No user id in session") | Base | 1 |
def unarchive(byte_array: bytes, directory: Text) -> Text:
"""Tries to unpack a byte array interpreting it as an archive.
Tries to use tar first to unpack, if that fails, zip will be used."""
try:
tar = tarfile.open(fileobj=IOReader(byte_array))
tar.extractall(directory)
tar.close()
return directory
except tarfile.TarError:
zip_ref = zipfile.ZipFile(IOReader(byte_array))
zip_ref.extractall(directory)
zip_ref.close()
return directory | Base | 1 |
def mysql_ends_with(field: Field, value: str) -> Criterion:
return functions.Cast(field, SqlTypes.CHAR).like(f"%{value}") | Base | 1 |
def test_can_read_token_from_headers(self):
"""Tests that Sydent correct extracts an auth token from request headers"""
self.sydent.run()
request, _ = make_request(
self.sydent.reactor, "GET", "/_matrix/identity/v2/hash_details"
)
request.requestHeaders.addRawHeader(
b"Authorization", b"Bearer " + self.test_token.encode("ascii")
)
token = tokenFromRequest(request)
self.assertEqual(token, self.test_token) | Class | 2 |
def test_get_students_features(self):
"""
Test that some minimum of information is formatted
correctly in the response to get_students_features.
"""
for student in self.students:
student.profile.city = "Mos Eisley {}".format(student.id)
student.profile.save()
url = reverse('get_students_features', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {})
res_json = json.loads(response.content)
self.assertIn('students', res_json)
for student in self.students:
student_json = [
x for x in res_json['students']
if x['username'] == student.username
][0]
self.assertEqual(student_json['username'], student.username)
self.assertEqual(student_json['email'], student.email)
self.assertEqual(student_json['city'], student.profile.city)
self.assertEqual(student_json['country'], "") | Compound | 4 |
def test_stopped_typing(self):
self.room_members = [U_APPLE, U_BANANA, U_ONION]
# Gut-wrenching
from synapse.handlers.typing import RoomMember
member = RoomMember(ROOM_ID, U_APPLE.to_string())
self.handler._member_typing_until[member] = 1002000
self.handler._room_typing[ROOM_ID] = {U_APPLE.to_string()}
self.assertEquals(self.event_source.get_current_key(), 0)
self.get_success(
self.handler.stopped_typing(
target_user=U_APPLE,
requester=create_requester(U_APPLE),
room_id=ROOM_ID,
)
)
self.on_new_event.assert_has_calls([call("typing_key", 1, rooms=[ROOM_ID])])
put_json = self.hs.get_http_client().put_json
put_json.assert_called_once_with(
"farm",
path="/_matrix/federation/v1/send/1000000",
data=_expect_edu_transaction(
"m.typing",
content={
"room_id": ROOM_ID,
"user_id": U_APPLE.to_string(),
"typing": False,
},
),
json_data_callback=ANY,
long_retries=True,
backoff_on_404=True,
try_trailing_slash_on_400=True,
)
self.assertEquals(self.event_source.get_current_key(), 1)
events = self.get_success(
self.event_source.get_new_events(room_ids=[ROOM_ID], from_key=0)
)
self.assertEquals(
events[0],
[{"type": "m.typing", "room_id": ROOM_ID, "content": {"user_ids": []}}],
) | Base | 1 |
def _on_load_started(self) -> None:
self._progress = 0
self._has_ssl_errors = False
self.data.viewing_source = False
self._set_load_status(usertypes.LoadStatus.loading)
self.load_started.emit() | Class | 2 |
def test_send_with_body(self):
to_send = "GET / HTTP/1.0\n" "Content-Length: 5\n\n"
to_send += "hello"
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, echo = self._read_echo(fp)
self.assertline(line, "200", "OK", "HTTP/1.0")
self.assertEqual(echo.content_length, "5")
self.assertEqual(echo.body, b"hello") | Base | 1 |
def load(doc):
code = config.retrieveBoilerplateFile(doc, "bs-extensions")
exec(code, globals()) | Base | 1 |
def authorize_and_redirect(request):
if not request.GET.get("redirect"):
return HttpResponse("You need to pass a url to ?redirect=", status=401)
if not request.META.get("HTTP_REFERER"):
return HttpResponse('You need to make a request that includes the "Referer" header.', status=400)
referer_url = urlparse(request.META["HTTP_REFERER"])
redirect_url = urlparse(request.GET["redirect"])
if referer_url.hostname != redirect_url.hostname:
return HttpResponse(f"Can only redirect to the same domain as the referer: {referer_url.hostname}", status=400)
if referer_url.scheme != redirect_url.scheme:
return HttpResponse(f"Can only redirect to the same scheme as the referer: {referer_url.scheme}", status=400)
if referer_url.port != redirect_url.port:
return HttpResponse(
f"Can only redirect to the same port as the referer: {referer_url.port or 'no port in URL'}", status=400
)
return render_template(
"authorize_and_redirect.html",
request=request,
context={"domain": redirect_url.hostname, "redirect_url": request.GET["redirect"]},
) | Base | 1 |
def post_json_get_nothing(self, uri, post_json, opts):
"""Make a POST request to an endpoint returning JSON and parse result
:param uri: The URI to make a POST request to.
:type uri: unicode
:param post_json: A Python object that will be converted to a JSON
string and POSTed to the given URI.
:type post_json: dict[any, any]
:param opts: A dictionary of request options. Currently only opts.headers
is supported.
:type opts: dict[str,any]
:return: a response from the remote server.
:rtype: twisted.internet.defer.Deferred[twisted.web.iweb.IResponse]
"""
json_bytes = json.dumps(post_json).encode("utf8")
headers = opts.get('headers', Headers({
b"Content-Type": [b"application/json"],
}))
logger.debug("HTTP POST %s -> %s", json_bytes, uri)
response = yield self.agent.request(
b"POST",
uri.encode("utf8"),
headers,
bodyProducer=FileBodyProducer(BytesIO(json_bytes))
)
# Ensure the body object is read otherwise we'll leak HTTP connections
# as per
# https://twistedmatrix.com/documents/current/web/howto/client.html
yield readBody(response)
defer.returnValue(response) | Base | 1 |
def feed_booksindex():
shift = 0
off = int(request.args.get("offset") or 0)
entries = calibre_db.session.query(func.upper(func.substr(db.Books.sort, 1, 1)).label('id'))\
.filter(calibre_db.common_filters()).group_by(func.upper(func.substr(db.Books.sort, 1, 1))).all()
elements = []
if off == 0:
elements.append({'id': "00", 'name':_("All")})
shift = 1
for entry in entries[
off + shift - 1:
int(off + int(config.config_books_per_page) - shift)]:
elements.append({'id': entry.id, 'name': entry.id})
pagination = Pagination((int(off) / (int(config.config_books_per_page)) + 1), config.config_books_per_page,
len(entries) + 1)
return render_xml_template('feed.xml',
letterelements=elements,
folder='opds.feed_letter_books',
pagination=pagination) | Base | 1 |
def _inject_admin_password_into_fs(admin_passwd, fs, execute=None):
"""Set the root password to admin_passwd
admin_password is a root password
fs is the path to the base of the filesystem into which to inject
the key.
This method modifies the instance filesystem directly,
and does not require a guest agent running in the instance.
"""
# The approach used here is to copy the password and shadow
# files from the instance filesystem to local files, make any
# necessary changes, and then copy them back.
admin_user = 'root'
fd, tmp_passwd = tempfile.mkstemp()
os.close(fd)
fd, tmp_shadow = tempfile.mkstemp()
os.close(fd)
utils.execute('cp', os.path.join(fs, 'etc', 'passwd'), tmp_passwd,
run_as_root=True)
utils.execute('cp', os.path.join(fs, 'etc', 'shadow'), tmp_shadow,
run_as_root=True)
_set_passwd(admin_user, admin_passwd, tmp_passwd, tmp_shadow)
utils.execute('cp', tmp_passwd, os.path.join(fs, 'etc', 'passwd'),
run_as_root=True)
os.unlink(tmp_passwd)
utils.execute('cp', tmp_shadow, os.path.join(fs, 'etc', 'shadow'),
run_as_root=True)
os.unlink(tmp_shadow) | Base | 1 |
def setUp(self, request):
self.db_path = tempfile.NamedTemporaryFile(
prefix='tmp_b2_tests_%s__' % (request.node.name,), delete=True
).name
try:
os.unlink(self.db_path)
except OSError:
pass
self.home = tempfile.mkdtemp()
yield
for cleanup_method in [lambda: os.unlink(self.db_path), lambda: shutil.rmtree(self.home)]:
try:
cleanup_method
except OSError:
pass | Base | 1 |
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
obj = self.node.as_const(eval_ctx)
# don't evaluate context functions
args = [x.as_const(eval_ctx) for x in self.args]
if isinstance(obj, _context_function_types):
if getattr(obj, 'contextfunction', False):
raise Impossible()
elif getattr(obj, 'evalcontextfunction', False):
args.insert(0, eval_ctx)
elif getattr(obj, 'environmentfunction', False):
args.insert(0, self.environment)
kwargs = dict(x.as_const(eval_ctx) for x in self.kwargs)
if self.dyn_args is not None:
try:
args.extend(self.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if self.dyn_kwargs is not None:
try:
kwargs.update(self.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
try:
return obj(*args, **kwargs)
except Exception:
raise Impossible() | Base | 1 |
def ratings_list():
if current_user.check_visibility(constants.SIDEBAR_RATING):
if current_user.get_view_property('ratings', 'dir') == 'desc':
order = db.Ratings.rating.desc()
order_no = 0
else:
order = db.Ratings.rating.asc()
order_no = 1
entries = calibre_db.session.query(db.Ratings, func.count('books_ratings_link.book').label('count'),
(db.Ratings.rating / 2).label('name')) \
.join(db.books_ratings_link).join(db.Books).filter(calibre_db.common_filters()) \
.group_by(text('books_ratings_link.rating')).order_by(order).all()
return render_title_template('list.html', entries=entries, folder='web.books_list', charlist=list(),
title=_(u"Ratings list"), page="ratingslist", data="ratings", order=order_no)
else:
abort(404) | Base | 1 |
async def message(self, ctx, *, message: str):
"""Set the message that is shown at the start of each ticket channel.\n\nUse ``{user.mention}`` to mention the person who created the ticket."""
try:
message.format(user=ctx.author)
await self.config.guild(ctx.guild).message.set(message)
await ctx.send(f"The message has been set to `{message}`.")
except KeyError:
await ctx.send(
"Setting the message failed. Please make sure to only use supported variables in `\{\}`"
) | Class | 2 |
def merge_list_book():
vals = request.get_json().get('Merge_books')
to_file = list()
if vals:
# load all formats from target book
to_book = calibre_db.get_book(vals[0])
vals.pop(0)
if to_book:
for file in to_book.data:
to_file.append(file.format)
to_name = helper.get_valid_filename(to_book.title, chars=96) + ' - ' + \
helper.get_valid_filename(to_book.authors[0].name, chars=96)
for book_id in vals:
from_book = calibre_db.get_book(book_id)
if from_book:
for element in from_book.data:
if element.format not in to_file:
# create new data entry with: book_id, book_format, uncompressed_size, name
filepath_new = os.path.normpath(os.path.join(config.config_calibre_dir,
to_book.path,
to_name + "." + element.format.lower()))
filepath_old = os.path.normpath(os.path.join(config.config_calibre_dir,
from_book.path,
element.name + "." + element.format.lower()))
copyfile(filepath_old, filepath_new)
to_book.data.append(db.Data(to_book.id,
element.format,
element.uncompressed_size,
to_name))
delete_book_from_table(from_book.id,"", True)
return json.dumps({'success': True})
return "" | Base | 1 |
def test_constant_initializer_with_numpy(self):
initializer = initializers.Constant(np.ones((3, 2)))
model = sequential.Sequential()
model.add(layers.Dense(2, input_shape=(3,), kernel_initializer=initializer))
model.add(layers.Dense(3))
model.compile(
loss='mse',
optimizer='sgd',
metrics=['acc'],
run_eagerly=testing_utils.should_run_eagerly())
json_str = model.to_json()
models.model_from_json(json_str)
if yaml is not None:
yaml_str = model.to_yaml()
models.model_from_yaml(yaml_str) | Base | 1 |
def _build_ssl_context(
disable_ssl_certificate_validation, ca_certs, cert_file=None, key_file=None,
maximum_version=None, minimum_version=None, key_password=None, | Class | 2 |
def _load_yamlconfig(self, configfile):
yamlconfig = None
try:
if self._recent_pyyaml():
# https://github.com/yaml/pyyaml/wiki/PyYAML-yaml.load(input)-Deprecation
# only for 5.1+
yamlconfig = yaml.load(open(configfile), Loader=yaml.FullLoader)
else:
yamlconfig = yaml.load(open(configfile))
except yaml.YAMLError as exc:
logger.error("Error in configuration file {0}:".format(configfile))
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
raise PystemonConfigException("error position: (%s:%s)" % (mark.line + 1, mark.column + 1))
for includes in yamlconfig.get("includes", []):
try:
logger.debug("loading include '{0}'".format(includes))
yamlconfig.update(yaml.load(open(includes)))
except Exception as e:
raise PystemonConfigException("failed to load '{0}': {1}".format(includes, e))
return yamlconfig | Base | 1 |
def _get_shared_models(self, args: "DictConfig") -> Dict[str, dict]:
with open(args.blueprint.model_opt_path) as f:
all_model_opts = yaml.load(f.read())
active_model_opts = {
model: opt
for model, opt in all_model_opts.items()
if self.conversations_needed[model] > 0
}
return TurkLikeAgent.get_bot_agents(args=args, model_opts=active_model_opts) | Base | 1 |
def should_deny_admin(self):
return self.totp_status != TOTPStatus.ENABLED and config.get(
"enable_force_admin_2fa"
) | Class | 2 |
def _copy_file(self, in_path, out_path):
if not os.path.exists(in_path):
raise errors.AnsibleFileNotFound("file or module does not exist: %s" % in_path)
try:
shutil.copyfile(in_path, out_path)
except shutil.Error:
traceback.print_exc()
raise errors.AnsibleError("failed to copy: %s and %s are the same" % (in_path, out_path))
except IOError:
traceback.print_exc()
raise errors.AnsibleError("failed to transfer file to %s" % out_path) | Base | 1 |
def feed_read_books():
off = request.args.get("offset") or 0
result, pagination = render_read_books(int(off) / (int(config.config_books_per_page)) + 1, True, True)
return render_xml_template('feed.xml', entries=result, pagination=pagination) | Base | 1 |
def _should_decode(typ):
# either a basetype which needs to be clamped
# or a complex type which contains something that
# needs to be clamped.
if isinstance(typ, BaseType):
return typ.typ not in ("int256", "uint256", "bytes32")
if isinstance(typ, (ByteArrayLike, DArrayType)):
return True
if isinstance(typ, SArrayType):
return _should_decode(typ.subtype)
if isinstance(typ, TupleLike):
return any(_should_decode(t) for t in typ.tuple_members())
raise CompilerPanic(f"_should_decode({typ})") # pragma: notest | Base | 1 |
def testInvalidSparseTensor(self):
with test_util.force_cpu():
shape = [2, 2]
val = [0]
dense = constant_op.constant(np.zeros(shape, dtype=np.int32))
for bad_idx in [
[[-1, 0]], # -1 is invalid.
[[1, 3]], # ...so is 3.
]:
sparse = sparse_tensor.SparseTensorValue(bad_idx, val, shape)
s = sparse_ops.sparse_add(sparse, dense)
with self.assertRaisesRegex(errors_impl.InvalidArgumentError,
"invalid index"):
self.evaluate(s) | Class | 2 |
def __init__(
self,
proxy_type,
proxy_host,
proxy_port,
proxy_rdns=True,
proxy_user=None,
proxy_pass=None,
proxy_headers=None, | Class | 2 |
def get_cc_columns(filter_config_custom_read=False):
tmpcc = calibre_db.session.query(db.Custom_Columns)\
.filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
cc = []
r = None
if config.config_columns_to_ignore:
r = re.compile(config.config_columns_to_ignore)
for col in tmpcc:
if filter_config_custom_read and config.config_read_column and config.config_read_column == col.id:
continue
if r and r.match(col.name):
continue
cc.append(col)
return cc | Base | 1 |
def builtin_roles(self):
return self._builtin_roles | Class | 2 |
def feed_get_cover(book_id):
return get_book_cover(book_id) | Base | 1 |
def make_homeserver(self, reactor, clock):
hs = self.setup_test_homeserver(
http_client=None, homeserver_to_use=GenericWorkerServer
)
return hs | Base | 1 |
def insensitive_ends_with(field: Term, value: str) -> Criterion:
return Upper(field).like(Upper(f"%{value}")) | Base | 1 |
def load_djpeg(self):
# ALTERNATIVE: handle JPEGs via the IJG command line utilities
import tempfile, os
file = tempfile.mktemp()
os.system("djpeg %s >%s" % (self.filename, file))
try:
self.im = Image.core.open_ppm(file)
finally:
try: os.unlink(file)
except: pass
self.mode = self.im.mode
self.size = self.im.size
self.tile = [] | Base | 1 |
def all(cls, **kwargs):
"""Return a `Page` of instances of this `Resource` class from
its general collection endpoint.
Only `Resource` classes with specified `collection_path`
endpoints can be requested with this method. Any provided
keyword arguments are passed to the API endpoint as query
parameters.
"""
url = urljoin(recurly.base_uri(), cls.collection_path)
if kwargs:
url = '%s?%s' % (url, urlencode(kwargs))
return Page.page_for_url(url) | Base | 1 |
async def send_transaction(self, account, to, selector_name, calldata, nonce=None, max_fee=0):
return await self.send_transactions(account, [(to, selector_name, calldata)], nonce, max_fee) | Class | 2 |
def test_proxy_headers(self):
to_send = (
"GET / HTTP/1.0\n"
"Content-Length: 0\n"
"Host: www.google.com:8080\n"
"X-Forwarded-For: 192.168.1.1\n"
"X-Forwarded-Proto: https\n"
"X-Forwarded-Port: 5000\n\n"
)
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, echo = self._read_echo(fp)
self.assertline(line, "200", "OK", "HTTP/1.0")
self.assertEqual(headers.get("server"), "waitress")
self.assertTrue(headers.get("date"))
self.assertIsNone(echo.headers.get("X_FORWARDED_PORT"))
self.assertEqual(echo.headers["HOST"], "www.google.com:8080")
self.assertEqual(echo.scheme, "https")
self.assertEqual(echo.remote_addr, "192.168.1.1")
self.assertEqual(echo.remote_host, "192.168.1.1") | Base | 1 |
def test_change_response_class_to_text():
mw = _get_mw()
req = SplashRequest('http://example.com/', magic_response=True)
req = mw.process_request(req, None)
# Such response can come when downloading a file,
# or returning splash:html(): the headers say it's binary,
# but it can be decoded so it becomes a TextResponse.
resp = TextResponse('http://mysplash.example.com/execute',
headers={b'Content-Type': b'application/pdf'},
body=b'ascii binary data',
encoding='utf-8')
resp2 = mw.process_response(req, resp, None)
assert isinstance(resp2, TextResponse)
assert resp2.url == 'http://example.com/'
assert resp2.headers == {b'Content-Type': [b'application/pdf']}
assert resp2.body == b'ascii binary data' | Class | 2 |
def test_notfilelike_http11(self):
to_send = "GET /notfilelike HTTP/1.1\n\n"
to_send = tobytes(to_send)
self.connect()
for t in range(0, 2):
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, response_body = read_http(fp)
self.assertline(line, "200", "OK", "HTTP/1.1")
cl = int(headers["content-length"])
self.assertEqual(cl, len(response_body))
ct = headers["content-type"]
self.assertEqual(ct, "image/jpeg")
self.assertTrue(b"\377\330\377" in response_body) | Base | 1 |
def login():
form = forms.UserForm()
if form.validate_on_submit():
db = get_db()
user = db.search(
(Query().username == form.username.data) & (Query().type == "user")
)
if user and check_password_hash(user[0]["hashed_password"], form.password.data):
user = User.from_db(user[0])
login_user(user, remember=True)
flash("Login successful!", "success")
next_url = request.args.get("next")
return redirect(next_url or "/")
flash("Invalid credentials", "error")
return redirect("/login")
return render_template("users/login.html", form=form, title="Login") | Base | 1 |
def authenticate(self, username, password):
child = None
try:
child = pexpect.spawn('/bin/sh', ['-c', '/bin/su -c "/bin/echo SUCCESS" - %s' % username], timeout=5)
child.expect('.*:')
child.sendline(password)
result = child.expect(['su: .*', 'SUCCESS'])
except Exception as err:
if child and child.isalive():
child.close()
logging.error('Error checking password: %s', err)
return False
if result == 0:
return False
else:
return True | Base | 1 |
async def send_transactions(self, account, calls, nonce=None, max_fee=0):
if nonce is None:
execution_info = await account.get_nonce().call()
nonce, = execution_info.result
build_calls = []
for call in calls:
build_call = list(call)
build_call[0] = hex(build_call[0])
build_calls.append(build_call)
(call_array, calldata, sig_r, sig_s) = self.signer.sign_transaction(hex(account.contract_address), build_calls, nonce, max_fee)
return await account.__execute__(call_array, calldata, nonce).invoke(signature=[sig_r, sig_s]) | Class | 2 |
def render_search_results(term, offset=None, order=None, limit=None):
join = db.books_series_link, db.Books.id == db.books_series_link.c.book, db.Series
entries, result_count, pagination = calibre_db.get_search_results(term,
offset,
order,
limit,
False,
config.config_read_column,
*join)
return render_title_template('search.html',
searchterm=term,
pagination=pagination,
query=term,
adv_searchterm=term,
entries=entries,
result_count=result_count,
title=_(u"Search"),
page="search",
order=order[1]) | Base | 1 |
def test_modify_access_revoke_self(self):
"""
Test that an instructor cannot remove instructor privelages from themself.
"""
url = reverse('modify_access', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url, {
'unique_student_identifier': self.instructor.email,
'rolename': 'instructor',
'action': 'revoke',
})
self.assertEqual(response.status_code, 200)
# check response content
expected = {
'unique_student_identifier': self.instructor.username,
'rolename': 'instructor',
'action': 'revoke',
'removingSelfAsInstructor': True,
}
res_json = json.loads(response.content)
self.assertEqual(res_json, expected) | Compound | 4 |
def feed_unread_books():
off = request.args.get("offset") or 0
result, pagination = render_read_books(int(off) / (int(config.config_books_per_page)) + 1, False, True)
return render_xml_template('feed.xml', entries=result, pagination=pagination) | Base | 1 |
def kebab_case(value: str) -> str:
return stringcase.spinalcase(group_title(_sanitize(value))) | Base | 1 |
def gravatar(context, user, size=None):
"""
Outputs the HTML for displaying a user's gravatar.
This can take an optional size of the image (defaults to 80 if not
specified).
This is also influenced by the following settings:
GRAVATAR_SIZE - Default size for gravatars
GRAVATAR_RATING - Maximum allowed rating (g, pg, r, x)
GRAVATAR_DEFAULT - Default image set to show if the user hasn't
specified a gravatar (identicon, monsterid, wavatar)
See http://www.gravatar.com/ for more information.
"""
url = get_gravatar_url(context['request'], user, size)
if url:
return ('<img src="%s" width="%s" height="%s" alt="%s" '
' class="gravatar"/>' %
(url, size, size, user.get_full_name() or user.username))
else:
return '' | Base | 1 |
def test_get_student_progress_url_noparams(self):
""" Test that the endpoint 404's without the required query params. """
url = reverse('get_student_progress_url', kwargs={'course_id': self.course.id.to_deprecated_string()})
response = self.client.get(url)
self.assertEqual(response.status_code, 400) | Compound | 4 |
def import_bookmarks(self):
files = choose_files(self, 'export-viewer-bookmarks', _('Import bookmarks'),
filters=[(_('Saved bookmarks'), ['pickle'])], all_files=False, select_only_single_file=True)
if not files:
return
filename = files[0]
imported = None
with open(filename, 'rb') as fileobj:
imported = cPickle.load(fileobj)
if imported is not None:
bad = False
try:
for bm in imported:
if 'title' not in bm:
bad = True
break
except Exception:
pass
if not bad:
bookmarks = self.get_bookmarks()
for bm in imported:
if bm not in bookmarks:
bookmarks.append(bm)
self.set_bookmarks([bm for bm in bookmarks if bm['title'] != 'calibre_current_page_bookmark'])
self.edited.emit(self.get_bookmarks()) | Base | 1 |
def _cnonce():
dig = _md5(
"%s:%s"
% (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])
).hexdigest()
return dig[:16] | Class | 2 |
def test_file_position_after_tofile(self):
# gh-4118
sizes = [io.DEFAULT_BUFFER_SIZE//8,
io.DEFAULT_BUFFER_SIZE,
io.DEFAULT_BUFFER_SIZE*8]
for size in sizes:
err_msg = "%d" % (size,)
f = open(self.filename, 'wb')
f.seek(size-1)
f.write(b'\0')
f.seek(10)
f.write(b'12')
np.array([0], dtype=np.float64).tofile(f)
pos = f.tell()
f.close()
assert_equal(pos, 10 + 2 + 8, err_msg=err_msg)
f = open(self.filename, 'r+b')
f.read(2)
f.seek(0, 1) # seek between read&write required by ANSI C
np.array([0], dtype=np.float64).tofile(f)
pos = f.tell()
f.close()
assert_equal(pos, 10, err_msg=err_msg)
os.unlink(self.filename) | Class | 2 |
def test_file(self, tmpdir):
filename = tmpdir / 'foo'
filename.ensure()
url = QUrl.fromLocalFile(str(filename))
req = QNetworkRequest(url)
reply = filescheme.handler(req)
assert reply is None | Compound | 4 |
def _handle_carbon_sent(self, msg):
self.xmpp.event('carbon_sent', msg) | Class | 2 |
def check_username(username):
username = username.strip()
if ub.session.query(ub.User).filter(func.lower(ub.User.name) == username.lower()).scalar():
log.error(u"This username is already taken")
raise Exception (_(u"This username is already taken"))
return username | Base | 1 |
def test_request_body_too_large_chunked_encoding(self):
control_line = "20;\r\n" # 20 hex = 32 dec
s = "This string has 32 characters.\r\n"
to_send = "GET / HTTP/1.1\nTransfer-Encoding: chunked\n\n"
repeat = control_line + s
to_send += repeat * ((self.toobig // len(repeat)) + 1)
to_send = tobytes(to_send)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line, headers, response_body = read_http(fp)
# body bytes counter caught a max_request_body_size overrun
self.assertline(line, "413", "Request Entity Too Large", "HTTP/1.1")
cl = int(headers["content-length"])
self.assertEqual(cl, len(response_body))
self.assertEqual(headers["content-type"], "text/plain")
# connection has been closed
self.send_check_error(to_send)
self.assertRaises(ConnectionClosed, read_http, fp) | Base | 1 |
def __post_init__(self, title: str) -> None: # type: ignore
super().__post_init__()
reference = Reference.from_ref(title)
dedup_counter = 0
while reference.class_name in _existing_enums:
existing = _existing_enums[reference.class_name]
if self.values == existing.values:
break # This is the same Enum, we're good
dedup_counter += 1
reference = Reference.from_ref(f"{reference.class_name}{dedup_counter}")
self.reference = reference
inverse_values = {v: k for k, v in self.values.items()}
if self.default is not None:
self.default = f"{self.reference.class_name}.{inverse_values[self.default]}"
_existing_enums[self.reference.class_name] = self | Base | 1 |
def parse_env_variables(cls: Type["pl.Trainer"], template: str = "PL_%(cls_name)s_%(cls_argument)s") -> Namespace:
"""Parse environment arguments if they are defined.
Examples:
>>> from pytorch_lightning import Trainer
>>> parse_env_variables(Trainer)
Namespace()
>>> import os
>>> os.environ["PL_TRAINER_GPUS"] = '42'
>>> os.environ["PL_TRAINER_BLABLABLA"] = '1.23'
>>> parse_env_variables(Trainer)
Namespace(gpus=42)
>>> del os.environ["PL_TRAINER_GPUS"]
"""
cls_arg_defaults = get_init_arguments_and_types(cls)
env_args = {}
for arg_name, _, _ in cls_arg_defaults:
env = template % {"cls_name": cls.__name__.upper(), "cls_argument": arg_name.upper()}
val = os.environ.get(env)
if not (val is None or val == ""):
# todo: specify the possible exception
with suppress(Exception):
# converting to native types like int/float/bool
val = eval(val)
env_args[arg_name] = val
return Namespace(**env_args) | Base | 1 |
async def on_PUT(self, origin, content, query, room_id):
content = await self.handler.on_exchange_third_party_invite_request(
room_id, content
)
return 200, content | Class | 2 |
def test_expect_continue(self):
# specifying Connection: close explicitly
data = "I have expectations"
to_send = tobytes(
"GET / HTTP/1.1\n"
"Connection: close\n"
"Content-Length: %d\n"
"Expect: 100-continue\n"
"\n"
"%s" % (len(data), data)
)
self.connect()
self.sock.send(to_send)
fp = self.sock.makefile("rb", 0)
line = fp.readline() # continue status line
version, status, reason = (x.strip() for x in line.split(None, 2))
self.assertEqual(int(status), 100)
self.assertEqual(reason, b"Continue")
self.assertEqual(version, b"HTTP/1.1")
fp.readline() # blank line
line = fp.readline() # next status line
version, status, reason = (x.strip() for x in line.split(None, 2))
headers = parse_headers(fp)
length = int(headers.get("content-length")) or None
response_body = fp.read(length)
self.assertEqual(int(status), 200)
self.assertEqual(length, len(response_body))
self.assertEqual(response_body, tobytes(data)) | Base | 1 |
def test_login_post(self):
login_code = LoginCode.objects.create(user=self.user, code='foobar', next='/private/')
response = self.client.post('/accounts/login/code/', {
'code': login_code.code,
})
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], '/private/')
self.assertEqual(response.wsgi_request.user, self.user)
self.assertFalse(LoginCode.objects.filter(pk=login_code.pk).exists()) | Base | 1 |
def parse_configuration_file(config_path):
"""
Read the config file for an experiment to get ParlAI settings.
:param config_path:
path to config
:return:
parsed configuration dictionary
"""
result = {}
result["configs"] = {}
with open(config_path) as f:
cfg = yaml.load(f.read(), Loader=yaml.FullLoader)
# get world path
result["world_path"] = cfg.get("world_module")
if not result["world_path"]:
raise ValueError("Did not specify world module")
result["overworld"] = cfg.get("overworld")
if not result["overworld"]:
raise ValueError("Did not specify overworld")
result["max_workers"] = cfg.get("max_workers")
if not result["max_workers"]:
raise ValueError("Did not specify max_workers")
result["task_name"] = cfg.get("task_name")
if not result["task_name"]:
raise ValueError("Did not specify task name")
task_world = cfg.get("tasks")
if task_world is None or len(task_world) == 0:
raise ValueError("task not in config file")
# get task file
for task_name, configuration in task_world.items():
if "task_world" not in configuration:
raise ValueError("{} does not specify a task".format(task_name))
result["configs"][task_name] = WorldConfig(
world_name=task_name,
onboarding_name=configuration.get("onboard_world"),
task_name=configuration.get("task_world"),
max_time_in_pool=configuration.get("timeout") or 300,
agents_required=configuration.get("agents_required") or 1,
backup_task=configuration.get("backup_task"),
)
# get world options, additional args
result["world_opt"] = cfg.get("opt", {})
result["additional_args"] = cfg.get("additional_args", {})
return result | Base | 1 |
def _normalize_path(self, path, prefix):
if not path.startswith(os.path.sep):
path = os.path.join(os.path.sep, path)
normpath = os.path.normpath(path)
return os.path.join(prefix, normpath[1:]) | Base | 1 |
def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
''' run a command on the chroot '''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
raise errors.AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
if in_data:
raise errors.AnsibleError("Internal Error: this module does not support optimized module pipelining")
# We enter chroot as root so we ignore privlege escalation?
if executable:
local_cmd = [self.chroot_cmd, self.chroot, executable, '-c', cmd]
else:
local_cmd = '%s "%s" %s' % (self.chroot_cmd, self.chroot, cmd)
vvv("EXEC %s" % (local_cmd), host=self.chroot)
p = subprocess.Popen(local_cmd, shell=isinstance(local_cmd, basestring),
cwd=self.runner.basedir,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return (p.returncode, '', stdout, stderr) | Base | 1 |
public void testEqualsInsertionOrderDifferentHeaderNames() {
final HttpHeadersBase h1 = newEmptyHeaders();
h1.add("a", "b");
h1.add("c", "d");
final HttpHeadersBase h2 = newEmptyHeaders();
h2.add("c", "d");
h2.add("a", "b");
assertThat(h1).isEqualTo(h2);
} | Class | 2 |
static XmlSchema createSchemaInstance(ThreadContext context, RubyClass klazz, Source source) {
Ruby runtime = context.getRuntime();
XmlSchema xmlSchema = (XmlSchema) NokogiriService.XML_SCHEMA_ALLOCATOR.allocate(runtime, klazz);
xmlSchema.setInstanceVariable("@errors", runtime.newEmptyArray());
try {
SchemaErrorHandler error_handler = new SchemaErrorHandler(context.getRuntime(), (RubyArray)xmlSchema.getInstanceVariable("@errors"));
Schema schema = xmlSchema.getSchema(source, context.getRuntime().getCurrentDirectory(), context.getRuntime().getInstanceConfig().getScriptFileName(), error_handler);
xmlSchema.setValidator(schema.newValidator());
return xmlSchema;
} catch (SAXException ex) {
throw context.getRuntime().newRuntimeError("Could not parse document: " + ex.getMessage());
}
} | Base | 1 |
protected SymbolContext getContextLegacy() {
throw new UnsupportedOperationException();
} | Base | 1 |
public static SocketFactory getSocketFactory(Properties info) throws PSQLException {
// Socket factory
String socketFactoryClassName = PGProperty.SOCKET_FACTORY.get(info);
if (socketFactoryClassName == null) {
return SocketFactory.getDefault();
}
try {
return (SocketFactory) ObjectFactory.instantiate(socketFactoryClassName, info, true,
PGProperty.SOCKET_FACTORY_ARG.get(info));
} catch (Exception e) {
throw new PSQLException(
GT.tr("The SocketFactory class provided {0} could not be instantiated.",
socketFactoryClassName),
PSQLState.CONNECTION_FAILURE, e);
}
} | Class | 2 |
public void canMixConvertedAndNormalValues() {
final HttpHeadersBase headers = newEmptyHeaders();
headers.add("name", "value");
headers.addInt("name", 100);
assertThat(headers.size()).isEqualTo(2);
assertThat(headers.contains("name")).isTrue();
assertThat(headers.contains("name", "value")).isTrue();
assertThat(headers.containsInt("name", 100)).isTrue();
} | Class | 2 |
public int encryptWithAd(byte[] ad, byte[] plaintext, int plaintextOffset,
byte[] ciphertext, int ciphertextOffset, int length)
throws ShortBufferException {
int space;
if (ciphertextOffset > ciphertext.length)
space = 0;
else
space = ciphertext.length - ciphertextOffset;
if (keySpec == null) {
// The key is not set yet - return the plaintext as-is.
if (length > space)
throw new ShortBufferException();
if (plaintext != ciphertext || plaintextOffset != ciphertextOffset)
System.arraycopy(plaintext, plaintextOffset, ciphertext, ciphertextOffset, length);
return length;
}
if (space < 16 || length > (space - 16))
throw new ShortBufferException();
try {
setup(ad);
int result = cipher.update(plaintext, plaintextOffset, length, ciphertext, ciphertextOffset);
cipher.doFinal(ciphertext, ciphertextOffset + result);
} catch (InvalidKeyException e) {
// Shouldn't happen.
throw new IllegalStateException(e);
} catch (InvalidAlgorithmParameterException e) {
// Shouldn't happen.
throw new IllegalStateException(e);
} catch (IllegalBlockSizeException e) {
// Shouldn't happen.
throw new IllegalStateException(e);
} catch (BadPaddingException e) {
// Shouldn't happen.
throw new IllegalStateException(e);
}
ghash.update(ciphertext, ciphertextOffset, length);
ghash.pad(ad != null ? ad.length : 0, length);
ghash.finish(ciphertext, ciphertextOffset + length, 16);
for (int index = 0; index < 16; ++index)
ciphertext[ciphertextOffset + length + index] ^= hashKey[index];
return length + 16;
} | Base | 1 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.