prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>_configuration_async.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class ArtifactsClientConfiguration(Configuration):
"""Configuration for ArtifactsClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param endpoint: The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net.
:type endpoint: str
"""
def __init__(
self,
credential: "AsyncTokenCredential",
endpoint: str,
**kwargs: Any
) -> None:
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if endpoint is None:
raise ValueError("Parameter 'endpoint' must not be None.")
super(ArtifactsClientConfiguration, self).__init__(**kwargs)
self.credential = credential
self.endpoint = endpoint
self.api_version = "2019-06-01-preview"
self.credential_scopes = ['https://dev.azuresynapse.net/.default']
self.credential_scopes.extend(kwargs.pop('credential_scopes', []))
kwargs.setdefault('sdk_moniker', 'synapse/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)<|fim▁hole|> self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AsyncBearerTokenCredentialPolicy(self.credential, *self.credential_scopes, **kwargs)<|fim▁end|> | self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs) |
<|file_name|>abilities.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2018 Chef Software Inc. and/or applicable contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|>// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Provides functions for determining if the current user has certain
//! abilities based on querying the available Linux capabilities; the
//! functioning of the Supervisor may change depending on the answer.
pub use self::imp::*;
#[cfg(target_os = "linux")]
mod imp {
use caps::{self, CapSet, Capability};
/// This is currently the "master check" for whether the Supervisor
/// can behave "as root".
///
/// All capabilities must be present. If we can run processes as other
/// users, but can't change ownership, then the processes won't be
/// able to access their files. Similar logic holds for the reverse.
pub fn can_run_services_as_svc_user() -> bool {
has(Capability::CAP_SETUID) && has(Capability::CAP_SETGID) && has(Capability::CAP_CHOWN)
}
/// Helper function; does the current thread have `cap` in its
/// effective capability set?
fn has(cap: Capability) -> bool {
caps::has_cap(None, CapSet::Effective, cap).unwrap_or(false)
}
}
#[cfg(target_os = "macos")]
mod imp {
pub fn can_run_services_as_svc_user() -> bool {
true
}
}<|fim▁end|> | // you may not use this file except in compliance with the License. |
<|file_name|>hedgewars_ar.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.0" language="fr">
<context>
<name>AmmoSchemeModel</name>
<message>
<source>new</source>
<translation>جديد</translation>
</message>
<message>
<source>copy of</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>FreqSpinBox</name>
<message>
<source>Never</source>
<translation>ابدا</translation>
</message>
<message numerus="yes">
<source>Every %1 turn</source>
<translation type="unfinished">
<numerusform>كل %1 دور
</numerusform>
<numerusform></numerusform>
</translation>
</message>
</context>
<context>
<name>GameCFGWidget</name>
<message>
<source>Edit weapons</source>
<translation>تغيير سلاح</translation>
</message>
<message>
<source>Error</source>
<translation>خطأ</translation>
</message>
<message>
<source>Illegal ammo scheme</source>
<translation>نظام اسلحة غير صحيح</translation>
</message>
<message>
<source>Edit schemes</source>
<translation>Edit schemes</translation>
</message>
<message>
<source>When this option is enabled selecting a game scheme will auto-select a weapon</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>HWChatWidget</name>
<message>
<source>%1 *** %2 has been removed from your ignore list</source>
<translation>%1 *** %2 تم حذفة من قائمة الترك</translation>
</message>
<message>
<source>%1 *** %2 has been added to your ignore list</source>
<translation>%1 *** %2 تم اضافته الى قائمة النرك</translation>
</message>
<message>
<source>%1 *** %2 has been removed from your friends list</source>
<translation>%1 *** %2 تم حذقه الى قائمة الاصدقاء</translation>
</message>
<message>
<source>%1 *** %2 has been added to your friends list</source>
<translation>%1 *** %2 تم حذفة من قائمة الاصدقاء</translation>
</message>
</context>
<context>
<name>HWForm</name>
<message>
<source>new</source>
<translation type="obsolete">جديد</translation>
</message>
<message>
<source>Error</source>
<translation>خطا</translation>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Unable to start the server</source>
<translation>لم اتمكن من بدا الخادم</translation>
</message>
<message>
<source>Cannot save record to file %1</source>
<translation>لم اتمكن من حقظ الملف %1</translation>
</message>
<message>
<source>Please select record from the list above</source>
<translation>اختار من القائمة</translation>
</message>
<message>
<source>DefaultTeam</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hedgewars Demo File</source>
<comment>File Types</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hedgewars Save File</source>
<comment>File Types</comment>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>HWGame</name>
<message>
<source>en.txt</source>
<translation type="unfinished">ar.txt</translation>
</message>
<message>
<source>Cannot open demofile %1</source>
<translation>لم اتمكن من حفظ ملف اللعب %1</translation>
</message>
</context>
<context>
<name>HWMapContainer</name>
<message>
<source>Map</source>
<translation>خارطة</translation>
</message>
<message>
<source>Themes</source>
<translation>نمط</translation>
</message>
<message>
<source>Filter</source>
<translation>فلنر</translation>
</message>
<message>
<source>All</source>
<translation>كل</translation>
</message>
<message>
<source>Small</source>
<translation>صغير</translation>
</message>
<message>
<source>Medium</source>
<translation>متوسط</translation>
</message>
<message>
<source>Large</source>
<translation>كبير</translation>
</message>
<message>
<source>Cavern</source>
<translation>كهف</translation>
</message>
<message>
<source>Wacky</source>
<translation>تعبان</translation>
</message>
<message>
<source>Type</source>
<translation type="unfinished">نوع</translation>
</message>
<message>
<source>Small tunnels</source>
<translation type="unfinished">انقاق صغيرة</translation>
</message>
<message>
<source>Medium tunnels</source>
<translation type="unfinished">انفاق متوسطة</translation>
</message>
<message>
<source>Large tunnels</source>
<translation type="unfinished">انفاق كبيرة</translation>
</message>
<message>
<source>Small floating islands</source>
<translation type="unfinished">جزر طائفة صغيرة</translation>
</message>
<message>
<source>Medium floating islands</source>
<translation type="unfinished">جزر طائفة متوسطة</translation>
</message>
<message>
<source>Large floating islands</source>
<translation type="unfinished">جزر طائفة كبيرة</translation>
</message>
<message>
<source>Seed</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Set</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>HWNetServersModel</name>
<message>
<source>Title</source>
<translation>عنوان</translation>
</message>
<message>
<source>IP</source>
<translation>IP</translation>
</message>
<message>
<source>Port</source>
<translation>Port</translation>
</message>
</context>
<context>
<name>HWNewNet</name>
<message>
<source>The host was not found. Please check the host name and port settings.</source>
<translation type="unfinished">الحاسوب لم يوجد. تأكد من الاعدادات</translation>
</message>
<message>
<source>Connection refused</source>
<translation>الاتصال رفض</translation>
</message>
<message>
<source>Room destroyed</source>
<translation>الغرفة اغلقت</translation>
</message>
<message>
<source>Quit reason: </source>
<translation type="unfinished">سبب الخروج</translation>
</message>
<message>
<source>You got kicked</source>
<translation>تم طردك</translation>
</message>
<message>
<source>Password</source>
<translation>كلمة السر</translation>
</message>
<message>
<source>Your nickname %1 is
registered on Hedgewars.org
Please provide your password
or pick another nickname:</source>
<translation type="obsolete">اسمك %1
سجلت على Hedgewars.org
اعطي كلمة السر
او اختر اسم ثاني</translation>
</message>
<message>
<source>%1 *** %2 has joined the room</source>
<translation>%1 *** %2 انضم للغرفة</translation>
</message>
<message>
<source>%1 *** %2 has joined</source>
<translation>%1 *** %2 انضم</translation><|fim▁hole|> <source>%1 *** %2 has left (%3)</source>
<translation>%1 *** %2 خرج (%3)</translation>
</message>
<message>
<source>%1 *** %2 has left</source>
<translation>%1 *** %2 خرج</translation>
</message>
<message>
<source>Your nickname %1 is
registered on Hedgewars.org
Please provide your password below
or pick another nickname in game config:</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>KB</name>
<message>
<source>SDL_ttf returned error while rendering text, most propably it is related to the bug in freetype2. It's recommended to update your freetype lib.</source>
<translation>SDL_ttf returned error while rendering text, most propably it is related to the bug in freetype2. It's recommended to update your freetype lib.</translation>
</message>
</context>
<context>
<name>PageAdmin</name>
<message>
<source>Server message:</source>
<translation type="obsolete">Server message:</translation>
</message>
<message>
<source>Set message</source>
<translation type="obsolete">Set message</translation>
</message>
<message>
<source>Clear Accounts Cache</source>
<translation>Clear Accounts Cache</translation>
</message>
<message>
<source>Fetch data</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Server message for latest version:</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Server message for previous versions:</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Latest version protocol number:</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>MOTD preview:</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Set data</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PageConnecting</name>
<message>
<source>Connecting...</source>
<translation type="unfinished">جاري الاتصال</translation>
</message>
</context>
<context>
<name>PageDrawMap</name>
<message>
<source>Undo</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Clear</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Load</source>
<translation type="unfinished">تحميل</translation>
</message>
<message>
<source>Save</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Load drawn map</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Drawn Maps (*.hwmap);;All files (*.*)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Save drawn map</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PageEditTeam</name>
<message>
<source>General</source>
<translation>عام</translation>
</message>
<message>
<source>Advanced</source>
<translation>متقدم</translation>
</message>
</context>
<context>
<name>PageGameStats</name>
<message>
<source><p>The best shot award was won by <b>%1</b> with <b>%2</b> pts.</p></source>
<translation type="obsolete"><p>افضل ضربة كانت من قبل <b>%1</b> with <b>%2</b> pts.</p></translation>
</message>
<message>
<source><p>The best killer is <b>%1</b> with <b>%2</b> kills in a turn.</p></source>
<translation type="obsolete"><p>افضل لاعب هو <b>%1</b> with <b>%2</b> kills in a turn.</p>
</translation>
</message>
<message>
<source><p>A total of <b>%1</b> hedgehog(s) were killed during this round.</p></source>
<translation type="obsolete"><p>المجموع<b>%1</b> من اللاعبين قضوا في اللعبة.</p>
</translation>
</message>
<message>
<source>Details</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Health graph</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Ranking</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>The best shot award was won by <b>%1</b> with <b>%2</b> pts.</source>
<translation type="unfinished"></translation>
</message>
<message numerus="yes">
<source>The best killer is <b>%1</b> with <b>%2</b> kills in a turn.</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<source>A total of <b>%1</b> hedgehog(s) were killed during this round.</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<source>(%1 kill)</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<source><b>%1</b> thought it's good to shoot his own hedgehogs with <b>%2</b> pts.</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<source><b>%1</b> killed <b>%2</b> of his own hedgehogs.</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<source><b>%1</b> was scared and skipped turn <b>%2</b> times.</source>
<translation type="unfinished">
<numerusform></numerusform>
<numerusform></numerusform>
</translation>
</message>
</context>
<context>
<name>PageMain</name>
<message>
<source>Local Game (Play a game on a single computer)</source>
<translation>لعبة محلية</translation>
</message>
<message>
<source>Network Game (Play a game across a network)</source>
<translation>لعبة شبكية (عن طريق شبكة اتصال)</translation>
</message>
<message>
<source>Simply pick the same color as a friend to play together as a team. Each of you will still control his or her own hedgehogs but they'll win or lose together.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Some weapons might do only low damage but they can be a lot more devastating in the right situation. Try to use the Desert Eagle to knock multiple hedgehogs into the water.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>If you're unsure what to do and don't want to waste ammo, skip one round. But don't let too much time pass as there will be Sudden Death!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>If you'd like to keep others from using your preferred nickname on the official server, register an account at http://www.hedgewars.org/.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>You're bored of default gameplay? Try one of the missions - they'll offer different gameplay depending on the one you picked.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>By default the game will always record the last game played as a demo. Select 'Local Game' and pick the 'Demos' button on the lower right corner to play or manage them.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hedgewars is Open Source and Freeware we create in our spare time. If you've got problems, ask on our forums but please don't expect 24/7 support!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hedgewars is Open Source and Freeware we create in our spare time. If you like it, help us with a small donation or contribute your own work!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hedgewars is Open Source and Freeware we create in our spare time. Share it with your family and friends as you like!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hedgewars is Open Source and Freeware we create in our spare time. If someone sold you the game, you should try get a refund!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>From time to time there will be official tournaments. Upcoming events will be announced at http://www.hedgewars.org/ some days in advance.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hedgewars is available in many languages. If the translation in your language seems to be missing or outdated, feel free to contact us!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hedgewars can be run on lots of different operating systems including Microsoft Windows, Mac OS X and Linux.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Always remember you're able to set up your own games in local and network/online play. You're not restricted to the 'Simple Game' option.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Connect one or more gamepads before starting the game to be able to assign their controls to your teams.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Create an account on %1 to keep others from using your most favourite nickname while playing on the official server.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>While playing you should give yourself a short break at least once an hour.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>If your graphics card isn't able to provide hardware accelerated OpenGL, try to enable the low quality mode to improve performance.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>If your graphics card isn't able to provide hardware accelerated OpenGL, try to update the associated drivers.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>We're open to suggestions and constructive feedback. If you don't like something or got a great idea, let us know!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Especially while playing online be polite and always remember there might be some minors playing with or against you as well!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Special game modes such as 'Vampirism' or 'Karma' allow you to develop completely new tactics. Try them in a custom game!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>You should never install Hedgewars on computers you don't own (school, university, work, etc.). Please ask the responsible person instead!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hedgewars can be perfect for short games during breaks. Just ensure you don't add too many hedgehogs or use an huge map. Reducing time and health might help as well.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>No hedgehogs were harmed in making this game.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>There are three different jumps available. Tap [high jump] twice to do a very high/backwards jump.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Afraid of falling off a cliff? Hold down [precise] to turn [left] or [right] without actually moving.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Some weapons require special strategies or just lots of training, so don't give up on a particular tool if you miss an enemy once.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Most weapons won't work once they touch the water. The Homing Bee as well as the Cake are exceptions to this.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>The Old Limbuger only causes a small explosion. However the wind affected smelly cloud can poison lots of hogs at once.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>The Piano Strike is the most damaging air strike. You'll lose the hedgehog performing it, so there's a huge downside as well.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Sticky Mines are a perfect tool to create small chain reactions knocking enemy hedgehogs into dire situations ... or water.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>The Hammer is most effective when used on bridges or girders. Hit hogs will just break through the ground.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>If you're stuck behind an enemy hedgehog, use the Hammer to free yourself without getting damaged by an explosion.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>The Cake's maximum walking distance depends on the ground it has to pass. Use [attack] to detonate it early.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>The Flame Thrower is a weapon but it can be used for tunnel digging as well.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Want to know who's behind the game? Click on the Hedgewars logo in the main menu to see the credits.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Like Hedgewars? Become a fan on %1 or follow us on %2!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Feel free to draw your own graves, hats, flags or even maps and themes! But note that you'll have to share them somewhere to use them online.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Really want to wear a specific hat? Donate to us and receive an exclusive hat of your choice!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Keep your video card drivers up to date to avoid issues playing the game.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>You can find your Hedgewars configuration files under "My Documents\Hedgewars". Create backups or take the files with you, but don't edit them by hand.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>You're able to associate Hedgewars related files (savegames and demo recordings) with the game to launch them right from your favorite file or internet browser.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Want to save ropes? Release the rope in mid air and then shoot again. As long as you don't touch the ground you'll reuse your rope without wasting ammo!</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>You can find your Hedgewars configuration files under "Library/Application Support/Hedgewars" in your home directory. Create backups or take the files with you, but don't edit them by hand.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>You can find your Hedgewars configuration files under ".hedgewars" in your home directory. Create backups or take the files with you, but don't edit them by hand.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>The Windows version of Hedgewars supports Xfire. Make sure to add Hedgewars to its game list so your friends can see you playing.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>The Homing Bee can be tricky to use. Its turn radius depends on it's velocity, so try to not use full power.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
<message>
<source>Use the Molotov or Flame Thrower to temporary keep hedgehogs from passing terrain such as tunnels or platforms.</source>
<comment>Tips</comment>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PageMultiplayer</name>
<message>
<source>Start</source>
<translation>ابدا</translation>
</message>
</context>
<context>
<name>PageNet</name>
<message>
<source>Error</source>
<translation>خطا</translation>
</message>
<message>
<source>Please select server from the list above</source>
<translation>اختار من القائمة</translation>
</message>
</context>
<context>
<name>PageNetGame</name>
<message>
<source>Control</source>
<translation>تحكم</translation>
</message>
</context>
<context>
<name>PageNetType</name>
<message>
<source>LAN game</source>
<translation>لعبة شبكية</translation>
</message>
<message>
<source>Official server</source>
<translation>الخادم الرسمي</translation>
</message>
</context>
<context>
<name>PageOptions</name>
<message>
<source>New team</source>
<translation>فريق جديد</translation>
</message>
<message>
<source>Edit team</source>
<translation>تغيير فريق</translation>
</message>
<message>
<source>Delete team</source>
<translation>حذف فريق</translation>
</message>
<message>
<source>New weapon scheme</source>
<translation type="obsolete">طريقة اسلحة جديدة</translation>
</message>
<message>
<source>Edit weapon scheme</source>
<translation type="obsolete">تغيير طريقة الاسلحة</translation>
</message>
<message>
<source>Delete weapon scheme</source>
<translation type="obsolete">حذف طريقة الاسلحة</translation>
</message>
<message>
<source>You can't edit teams from team selection. Go back to main menu to add, edit or delete teams.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>New scheme</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Edit scheme</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Delete scheme</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>New weapon set</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Edit weapon set</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Delete weapon set</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PagePlayDemo</name>
<message>
<source>Error</source>
<translation>خطأ</translation>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Rename dialog</source>
<translation>تغيير الشباك</translation>
</message>
<message>
<source>Enter new file name:</source>
<translation type="unfinished">ادخل اسم الملف</translation>
</message>
<message>
<source>Cannot rename to</source>
<translation>لا استطيع التغيير الى</translation>
</message>
<message>
<source>Cannot delete file</source>
<translation>لا استطيع حذف الملف</translation>
</message>
<message>
<source>Please select record from the list</source>
<translation>اختر المقطع من القائمة</translation>
</message>
</context>
<context>
<name>PageRoomsList</name>
<message>
<source>Create</source>
<translation>اصنع</translation>
</message>
<message>
<source>Join</source>
<translation>انضم</translation>
</message>
<message>
<source>Refresh</source>
<translation>تحديث</translation>
</message>
<message>
<source>Error</source>
<translation>خطأ</translation>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Admin features</source>
<translation>الادارة</translation>
</message>
<message>
<source>Room Name:</source>
<translation type="unfinished">رقم الغرقة</translation>
</message>
<message>
<source>This game is in lobby.
You may join and start playing once the game starts.</source>
<translation>هذه غرقة اللعب
يمكنك الانضمام و بدء اللعب عند الاتضمام الى غرفة
You may join and start playing once the game starts.</translation>
</message>
<message>
<source>This game is in progress.
You may join and spectate now but you'll have to wait for the game to end to start playing.</source>
<translation type="unfinished">اللعبة قيد اللعب
يمكنك الانضمام و المشاهدة</translation>
</message>
<message>
<source>%1 is the host. He may adjust settings and start the game.</source>
<translation type="unfinished">%1هو المضيف الذي يبدا و يغيير اعدادات اللعبة</translation>
</message>
<message>
<source>Random Map</source>
<translation>خارطة عشوائية</translation>
</message>
<message>
<source>Games may be played on precreated or randomized maps.</source>
<translation type="unfinished">اللعبة يمكن ان تكون على خارطة عشوائية او يدوية</translation>
</message>
<message>
<source>The Game Scheme defines general options and preferences like Round Time, Sudden Death or Vampirism.</source>
<translation type="unfinished">طراز اللعية يحدد الخيارات مثل وقت الجولة، الموت المفاجئ و مصاص الدماء</translation>
</message>
<message>
<source>The Weapon Scheme defines available weapons and their ammunition count.</source>
<translation type="unfinished">طراز الاسلحة يحدد المتوفرة منها و عددها</translation>
</message>
<message numerus="yes">
<source>There are %1 clients connected to this room.</source>
<translation type="unfinished">
<numerusform>يوجد %1 مرتبطون بالغرقة
</numerusform>
<numerusform></numerusform>
</translation>
</message>
<message numerus="yes">
<source>There are %1 teams participating in this room.</source>
<translation type="unfinished">
<numerusform>يوجد %1 فريق في الغرفة
</numerusform>
<numerusform></numerusform>
</translation>
</message>
<message>
<source>Please enter room name</source>
<translation>ادخل رقم الغرقة</translation>
</message>
<message>
<source>Please select room from the list</source>
<translation>اختر الغرقة من القائمة</translation>
</message>
<message>
<source>Random Maze</source>
<translation type="unfinished">متاهة عشوائية</translation>
</message>
<message>
<source>State:</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Rules:</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Weapons:</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Search:</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Clear</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Warning</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>The game you are trying to join has started.
Do you still want to join the room?</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PageScheme</name>
<message>
<source>Defend your fort and destroy the opponents, two team colours max!</source>
<translation type="unfinished">دافع عن القلعة و دمر الاعداء. فريقان الحد الاقصى</translation>
</message>
<message>
<source>Teams will start on opposite sides of the terrain, two team colours max!</source>
<translation type="unfinished">الفرق تبدا في مكانين متقابلين. فريقان الحد الاقصى</translation>
</message>
<message>
<source>Land can not be destroyed!</source>
<translation type="unfinished">الارض لا يمكن ان تدمر</translation>
</message>
<message>
<source>Add an indestructable border around the terrain</source>
<translation>اضف اطار لا يمكن تدميره</translation>
</message>
<message>
<source>Lower gravity</source>
<translation>جاذبية قليلة</translation>
</message>
<message>
<source>Assisted aiming with laser sight</source>
<translation>منظار ليزري</translation>
</message>
<message>
<source>All hogs have a personal forcefield</source>
<translation>كل اللاعبين لهم حقل قوى</translation>
</message>
<message>
<source>Enable random mines</source>
<translation type="obsolete">فعل الالغام العشوائية</translation>
</message>
<message>
<source>Gain 80% of the damage you do back in health</source>
<translation>احصل على 80% من التدمير في صحتك</translation>
</message>
<message>
<source>Share your opponents pain, share their damage</source>
<translation>شارك في صحة عدوك</translation>
</message>
<message>
<source>Your hogs are unable to move, put your artillery skills to the test</source>
<translation>الاعبين لا يمكنهم التحرك</translation>
</message>
<message>
<source>Random</source>
<translation>عشوائي</translation>
</message>
<message>
<source>Seconds</source>
<translation>ثواني</translation>
</message>
<message>
<source>New</source>
<translation>جديد</translation>
</message>
<message>
<source>Delete</source>
<translation>حذف</translation>
</message>
<message>
<source>Order of play is random instead of in room order.</source>
<translation type="unfinished">تسلسل اللعب عشواي</translation>
</message>
<message>
<source>Play with a King. If he dies, your side dies.</source>
<translation type="unfinished">اذا مات الملك، خسر الفريق</translation>
</message>
<message>
<source>Take turns placing your hedgehogs before the start of play.</source>
<translation type="unfinished">ضع لاعبين بالادوار قبل اللعب</translation>
</message>
<message>
<source>Ammo is shared between all teams that share a colour.</source>
<translation type="unfinished">العتاد مشترك</translation>
</message>
<message>
<source>Disable girders when generating random maps.</source>
<translation type="unfinished">ابطال البناء</translation>
</message>
<message>
<source>Disable land objects when generating random maps.</source>
<translation type="unfinished">ابطال الاجسام الساقطة</translation>
</message>
<message>
<source>All (living) hedgehogs are fully restored at the end of turn</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>AI respawns on death.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Attacking does not end your turn.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Weapons are reset to starting values each turn.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Each hedgehog has its own ammo. It does not share with the team.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You will not have to worry about wind anymore.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Wind will affect almost everything.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Copy</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PageSelectWeapon</name>
<message>
<source>Default</source>
<translation>التلقائي</translation>
</message>
<message>
<source>Delete</source>
<translation>حذف</translation>
</message>
<message>
<source>New</source>
<translation type="unfinished">جديد</translation>
</message>
<message>
<source>Copy</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>PageSinglePlayer</name>
<message>
<source>Simple Game (a quick game against the computer, settings are chosen for you)</source>
<translation>لعية بسيطة ضد الحاسوب</translation>
</message>
<message>
<source>Multiplayer (play a hotseat game against your friends, or AI teams)</source>
<translation>لعبة متعددة</translation>
</message>
<message>
<source>Training Mode (Practice your skills in a range of training missions). IN DEVELOPMENT</source>
<translation>نمط التدريب، تحت التطوير</translation>
</message>
<message>
<source>Demos (Watch recorded demos)</source>
<translation>عرض</translation>
</message>
<message>
<source>Load (Load a previously saved game)</source>
<translation>تحميل</translation>
</message>
<message>
<source>Campaign Mode (...). IN DEVELOPMENT</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QAction</name>
<message>
<source>Kick</source>
<translation>ارفس</translation>
</message>
<message>
<source>Info</source>
<translation>معلومات</translation>
</message>
<message>
<source>Start</source>
<translation>ابدا</translation>
</message>
<message>
<source>Restrict Joins</source>
<translation>امنع الانضمام</translation>
</message>
<message>
<source>Restrict Team Additions</source>
<translation>امنع اضافات الفرق</translation>
</message>
<message>
<source>Ban</source>
<translation>امنع</translation>
</message>
<message>
<source>Follow</source>
<translation>اتبع</translation>
</message>
<message>
<source>Ignore</source>
<translation>اهمل</translation>
</message>
<message>
<source>Add friend</source>
<translation>اضف صديق</translation>
</message>
<message>
<source>Unignore</source>
<translation>حذف الاهمال</translation>
</message>
<message>
<source>Remove friend</source>
<translation>امحي صديق</translation>
</message>
</context>
<context>
<name>QCheckBox</name>
<message>
<source>Check for updates at startup</source>
<translation>تحرى عن التحديثات</translation>
</message>
<message>
<source>Fullscreen</source>
<translation type="unfinished">ملء الشاشة</translation>
</message>
<message>
<source>Frontend fullscreen</source>
<translation>شاشة القائمة ملء العرض</translation>
</message>
<message>
<source>Enable sound</source>
<translation>فعل الصوت</translation>
</message>
<message>
<source>Enable music</source>
<translation>فعل الموسيقى</translation>
</message>
<message>
<source>Show FPS</source>
<translation>اضهر عدد الاطارات في الثانية</translation>
</message>
<message>
<source>Alternative damage show</source>
<translation>عرض الدمار</translation>
</message>
<message>
<source>Append date and time to record file name</source>
<translation>اضف التاريخ و اليوم الى الملف</translation>
</message>
<message>
<source>Reduced quality</source>
<translation type="obsolete">قلل الجودة</translation>
</message>
<message>
<source>Show ammo menu tooltips</source>
<translation>اضهر قوائم للعتاد</translation>
</message>
<message>
<source>Enable frontend sounds</source>
<translation>فعل اصوات شاشة المقدمة</translation>
</message>
<message>
<source>Enable frontend music</source>
<translation>فعل موسيقى شاشة المقدمة</translation>
</message>
<message>
<source>Frontend effects</source>
<translation>تأثيرات المقدمة</translation>
</message>
</context>
<context>
<name>QComboBox</name>
<message>
<source>generated map...</source>
<translation type="unfinished">ولد خارطة</translation>
</message>
<message>
<source>Human</source>
<translation>انسان</translation>
</message>
<message>
<source>Level</source>
<translation type="unfinished">مرحلة</translation>
</message>
<message>
<source>(System default)</source>
<translation>نمط النظام</translation>
</message>
<message>
<source>Mission</source>
<translation>مهمة</translation>
</message>
<message>
<source>generated maze...</source>
<translation type="unfinished">ولد متاهة</translation>
</message>
<message>
<source>Community</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Any</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>In lobby</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>In progress</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Default</source>
<translation type="obsolete">التلقائي</translation>
</message>
<message>
<source>hand drawn map...</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Disabled</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Red/Cyan</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Cyan/Red</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Red/Blue</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Blue/Red</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Red/Green</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Green/Red</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Side-by-side</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Top-Bottom</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Wiggle</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QGroupBox</name>
<message>
<source>Team Members</source>
<translation>اعضاء الفريق</translation>
</message>
<message>
<source>Fort</source>
<translation>حصن</translation>
</message>
<message>
<source>Key binds</source>
<translation>ربط المفاتيج</translation>
</message>
<message>
<source>Teams</source>
<translation>فرق</translation>
</message>
<message>
<source>Weapons</source>
<translation type="obsolete">اسلحة</translation>
</message>
<message>
<source>Audio/Graphic options</source>
<translation>قوائم الصوتيات و المرئيات</translation>
</message>
<message>
<source>Net game</source>
<translation>لعبة شبكية</translation>
</message>
<message>
<source>Playing teams</source>
<translation>فرق اللعب</translation>
</message>
<message>
<source>Game Modifiers</source>
<translation>مغيرات اللعبة</translation>
</message>
<message>
<source>Basic Settings</source>
<translation>اعدادات الاساسية</translation>
</message>
<message>
<source>Team Settings</source>
<translation>اعدادات الفريق</translation>
</message>
<message>
<source>Misc</source>
<translation>متنوعة</translation>
</message>
<message>
<source>Schemes and Weapons</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QLabel</name>
<message>
<source>Mines Time</source>
<translation>وقت اللغم</translation>
</message>
<message>
<source>Mines</source>
<translation>الغام</translation>
</message>
<message>
<source>Version</source>
<translation>نسخة</translation>
</message>
<message>
<source>This program is distributed under the GNU General Public License</source>
<translation>This program is distributed under the GNU General Public License</translation>
</message>
<message>
<source>Developers:</source>
<translation type="unfinished">المطورون</translation>
</message>
<message>
<source>Art:</source>
<translation type="unfinished">قنون</translation>
</message>
<message>
<source>Sounds:</source>
<translation type="unfinished">الاصوات</translation>
</message>
<message>
<source>Translations:</source>
<translation type="unfinished">المترجمون</translation>
</message>
<message>
<source>Special thanks:</source>
<translation type="unfinished">شكر خاص</translation>
</message>
<message>
<source>Weapons</source>
<translation>اسلحة</translation>
</message>
<message>
<source>Host:</source>
<translation>Host:</translation>
</message>
<message>
<source>Port:</source>
<translation>Port:</translation>
</message>
<message>
<source>Net nick</source>
<translation>اسم اللاعب</translation>
</message>
<message>
<source>Resolution</source>
<translation>الوضوح</translation>
</message>
<message>
<source>FPS limit</source>
<translation>حد الاقصى لعدد الاطر في الثانية</translation>
</message>
<message>
<source>Server name:</source>
<translation type="unfinished">اسم الخادم</translation>
</message>
<message>
<source>Server port:</source>
<translation>Server port:</translation>
</message>
<message>
<source>Initial sound volume</source>
<translation>ارتقاع الصوت</translation>
</message>
<message>
<source>Damage Modifier</source>
<translation>مغير الدمار</translation>
</message>
<message>
<source>Turn Time</source>
<translation>وقت الجولة</translation>
</message>
<message>
<source>Initial Health</source>
<translation>الصحة الاولية</translation>
</message>
<message>
<source>Sudden Death Timeout</source>
<translation>وقت الموت المفاجئ</translation>
</message>
<message>
<source>Scheme Name:</source>
<translation type="unfinished">اسم النمط:</translation>
</message>
<message>
<source>Crate Drops</source>
<translation>المساعدات</translation>
</message>
<message>
<source>Game scheme</source>
<translation>نمط اللعبة</translation>
</message>
<message>
<source>% Dud Mines</source>
<translation>% الغام</translation>
</message>
<message>
<source>Name</source>
<translation>اسم</translation>
</message>
<message>
<source>Type</source>
<translation>نوع</translation>
</message>
<message>
<source>Grave</source>
<translation>تابوت</translation>
</message>
<message>
<source>Flag</source>
<translation>علم</translation>
</message>
<message>
<source>Voice</source>
<translation>صوت</translation>
</message>
<message>
<source>Locale</source>
<translation>محلي</translation>
</message>
<message>
<source>Restart game to apply</source>
<translation>اعد تشغيل اللعبة لتفعيل التغيير</translation>
</message>
<message>
<source>Explosives</source>
<translation>متفجرات</translation>
</message>
<message>
<source>Tip: </source>
<translation type="unfinished"></translation>
</message>
<message>
<source>This development build is 'work in progress' and may not be compatible with other versions of the game. Some features might be broken or incomplete. Use at your own risk!</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Quality</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>% Health Crates</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Health in Crates</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Sudden Death Water Rise</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Sudden Death Health Decrease</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>% Rope Length</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Gameplay</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Stereo rendering</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QLineEdit</name>
<message>
<source>unnamed</source>
<translation>غير مسمى</translation>
</message>
</context>
<context>
<name>QMainWindow</name>
<message>
<source>Hedgewars %1</source>
<translation>Hedgewars %1</translation>
</message>
</context>
<context>
<name>QMessageBox</name>
<message>
<source>Network</source>
<translation>شبكة</translation>
</message>
<message>
<source>Connection to server is lost</source>
<translation>ضاع الاتصال للخادم</translation>
</message>
<message>
<source>Error</source>
<translation>خطأ</translation>
</message>
<message>
<source>Failed to open data directory:
%1
Please check your installation</source>
<translation>Failed to open data directory:
%1
Please check your installation</translation>
</message>
<message>
<source>Weapons</source>
<translation>اسلحة</translation>
</message>
<message>
<source>Can not edit default weapon set</source>
<translation type="obsolete">Can not edit default weapon set</translation>
</message>
<message>
<source>Can not delete default weapon set</source>
<translation type="obsolete">Can not delete default weapon set</translation>
</message>
<message>
<source>Really delete this weapon set?</source>
<translation type="unfinished">هل تريد حذف قائمة الاسلحة</translation>
</message>
<message>
<source>All file associations have been set.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>File association failed.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Can not overwrite default weapon set '%1'!</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Teams</source>
<translation type="unfinished">فرق</translation>
</message>
<message>
<source>Really delete this team?</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Schemes</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Can not delete default scheme '%1'!</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Really delete this game scheme?</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Can not delete default weapon set '%1'!</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Error</source>
<translation>خطأ</translation>
</message>
<message>
<source>Cannot create directory %1</source>
<translation>Cannot create directory %1</translation>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Nickname</source>
<translation>اسم اللاعب</translation>
</message>
<message>
<source>Please enter your nickname</source>
<translation>ادحل اسم اللاعب</translation>
</message>
</context>
<context>
<name>QPushButton</name>
<message>
<source>default</source>
<translation>التلقائي</translation>
</message>
<message>
<source>OK</source>
<translation>OK</translation>
</message>
<message>
<source>Cancel</source>
<translation>الغاء</translation>
</message>
<message>
<source>Start server</source>
<translation>تشغيل الخادم</translation>
</message>
<message>
<source>Connect</source>
<translation>اتصل</translation>
</message>
<message>
<source>Update</source>
<translation type="unfinished">تحديث</translation>
</message>
<message>
<source>Specify</source>
<translation>تحديد</translation>
</message>
<message>
<source>Start</source>
<translation>ابدا</translation>
</message>
<message>
<source>Go!</source>
<translation type="unfinished">ابدا</translation>
</message>
<message>
<source>Play demo</source>
<translation>ابدا العرض</translation>
</message>
<message>
<source>Rename</source>
<translation>تغيير الاسم</translation>
</message>
<message>
<source>Delete</source>
<translation>حذف</translation>
</message>
<message>
<source>Load</source>
<translation>تحميل</translation>
</message>
<message>
<source>Setup</source>
<translation>تنصيب</translation>
</message>
<message>
<source>Ready</source>
<translation>ابدا</translation>
</message>
<message>
<source>Random Team</source>
<translation>فريق عشوائي</translation>
</message>
<message>
<source>Associate file extensions</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>more</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>QTableWidget</name>
<message>
<source>Room Name</source>
<translation>اسم الغرقة</translation>
</message>
<message>
<source>C</source>
<translation>C</translation>
</message>
<message>
<source>T</source>
<translation>T</translation>
</message>
<message>
<source>Owner</source>
<translation>المالك</translation>
</message>
<message>
<source>Map</source>
<translation>خارطة</translation>
</message>
<message>
<source>Rules</source>
<translation>قوانين</translation>
</message>
<message>
<source>Weapons</source>
<translation>اسلحة</translation>
</message>
</context>
<context>
<name>SelWeaponWidget</name>
<message>
<source>Weapon set</source>
<translation>نمط الاسلحة</translation>
</message>
<message>
<source>Probabilities</source>
<translation>احتماليات</translation>
</message>
<message>
<source>Ammo in boxes</source>
<translation>العتاد في الصناديق</translation>
</message>
<message>
<source>Delays</source>
<translation>التأخير</translation>
</message>
<message>
<source>new</source>
<translation type="unfinished">جديد</translation>
</message>
<message>
<source>copy of</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TCPBase</name>
<message>
<source>Error</source>
<translation>خطأ</translation>
</message>
<message>
<source>Unable to start the server: %1.</source>
<translation>Unable to start the server: %1.</translation>
</message>
<message>
<source>Unable to run engine: %1 (</source>
<translation>Unable to run engine: %1 (</translation>
</message>
</context>
<context>
<name>ToggleButtonWidget</name>
<message>
<source>Vampirism</source>
<translation>مصاص دماء</translation>
</message>
<message>
<source>Karma</source>
<translation>كارما</translation>
</message>
<message>
<source>Artillery</source>
<translation>مدفعية</translation>
</message>
<message>
<source>Fort Mode</source>
<translation type="unfinished">طريقة الحصن</translation>
</message>
<message>
<source>Divide Teams</source>
<translation>قسم الفرق</translation>
</message>
<message>
<source>Solid Land</source>
<translation>ارض صلبة</translation>
</message>
<message>
<source>Add Border</source>
<translation>اضف اطار</translation>
</message>
<message>
<source>Low Gravity</source>
<translation>جاذبية قليلة</translation>
</message>
<message>
<source>Laser Sight</source>
<translation>منظار ليزري</translation>
</message>
<message>
<source>Invulnerable</source>
<translation>غير قابل للتدمير</translation>
</message>
<message>
<source>Add Mines</source>
<translation type="obsolete">اضف الغام</translation>
</message>
<message>
<source>Random Order</source>
<translation>توزيع عشوائي</translation>
</message>
<message>
<source>King</source>
<translation>ملك</translation>
</message>
<message>
<source>Place Hedgehogs</source>
<translation>ضع الاعبين</translation>
</message>
<message>
<source>Clan Shares Ammo</source>
<translation>الفريق يتشارك بالعتاد</translation>
</message>
<message>
<source>Disable Girders</source>
<translation>ابطال البناء</translation>
</message>
<message>
<source>Disable Land Objects</source>
<translation type="unfinished">ابطال الاجسام الارضية</translation>
</message>
<message>
<source>Reset Health</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>AI Survival Mode</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Unlimited Attacks</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Reset Weapons</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Per Hedgehog Ammo</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Disable Wind</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>More Wind</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>binds</name>
<message>
<source>up</source>
<translation>up</translation>
</message>
<message>
<source>left</source>
<translation>left</translation>
</message>
<message>
<source>right</source>
<translation>right</translation>
</message>
<message>
<source>down</source>
<translation>down</translation>
</message>
<message>
<source>attack</source>
<translation>attack</translation>
</message>
<message>
<source>precise aim</source>
<translation>precise aim</translation>
</message>
<message>
<source>put</source>
<translation>put</translation>
</message>
<message>
<source>switch</source>
<translation>switch</translation>
</message>
<message>
<source>find hedgehog</source>
<translation>find hedgehog</translation>
</message>
<message>
<source>ammo menu</source>
<translation>ammo menu</translation>
</message>
<message>
<source>slot 1</source>
<translation>slot 1</translation>
</message>
<message>
<source>slot 2</source>
<translation>slot 2</translation>
</message>
<message>
<source>slot 3</source>
<translation>slot 3</translation>
</message>
<message>
<source>slot 4</source>
<translation>slot 4</translation>
</message>
<message>
<source>slot 5</source>
<translation>slot 5</translation>
</message>
<message>
<source>slot 6</source>
<translation>slot 6</translation>
</message>
<message>
<source>slot 7</source>
<translation>slot 7</translation>
</message>
<message>
<source>slot 8</source>
<translation>slot 8</translation>
</message>
<message>
<source>slot 9</source>
<translation>slot 9</translation>
</message>
<message>
<source>timer 1 sec</source>
<translation>timer 1 sec</translation>
</message>
<message>
<source>timer 2 sec</source>
<translation>timer 2 sec</translation>
</message>
<message>
<source>timer 3 sec</source>
<translation>timer 3 sec</translation>
</message>
<message>
<source>timer 4 sec</source>
<translation>timer 4 sec</translation>
</message>
<message>
<source>timer 5 sec</source>
<translation>timer 5 sec</translation>
</message>
<message>
<source>chat</source>
<translation>chat</translation>
</message>
<message>
<source>chat history</source>
<translation>chat history</translation>
</message>
<message>
<source>pause</source>
<translation>pause</translation>
</message>
<message>
<source>confirmation</source>
<translation>confirmation</translation>
</message>
<message>
<source>volume down</source>
<translation>volume down</translation>
</message>
<message>
<source>volume up</source>
<translation>volume up</translation>
</message>
<message>
<source>change mode</source>
<translation>change mode</translation>
</message>
<message>
<source>capture</source>
<translation>capture</translation>
</message>
<message>
<source>hedgehogs
info</source>
<translation>hedgehogs
info</translation>
</message>
<message>
<source>quit</source>
<translation>quit</translation>
</message>
<message>
<source>zoom in</source>
<translation>zoom in</translation>
</message>
<message>
<source>zoom out</source>
<translation>zoom out</translation>
</message>
<message>
<source>reset zoom</source>
<translation>reset zoom</translation>
</message>
<message>
<source>long jump</source>
<translation>long jump</translation>
</message>
<message>
<source>high jump</source>
<translation>high jump</translation>
</message>
<message>
<source>slot 10</source>
<translation type="unfinished">slot 10</translation>
</message>
</context>
<context>
<name>binds (categories)</name>
<message>
<source>Basic controls</source>
<translation>الاسلحة الاولية</translation>
</message>
<message>
<source>Weapon controls</source>
<translation>السيطرة على الاسلحة</translation>
</message>
<message>
<source>Camera and cursor controls</source>
<translation type="unfinished">السيطرة على الكامرة و المؤشر</translation>
</message>
<message>
<source>Other</source>
<translation>اخرى</translation>
</message>
</context>
<context>
<name>binds (descriptions)</name>
<message>
<source>Move your hogs and aim:</source>
<translation type="unfinished">تحريك اللاعب و التصويب</translation>
</message>
<message>
<source>Traverse gaps and obstacles by jumping:</source>
<translation type="unfinished">قفز فوق الحواجز</translation>
</message>
<message>
<source>Fire your selected weapon or trigger an utility item:</source>
<translation type="unfinished">اطلاق السلاح</translation>
</message>
<message>
<source>Pick a weapon or a target location under the cursor:</source>
<translation type="unfinished">أخذ السلاح تحت المؤشر</translation>
</message>
<message>
<source>Switch your currently active hog (if possible):</source>
<translation type="unfinished">تغيير اختيار اللاعب الحالي</translation>
</message>
<message>
<source>Pick a weapon or utility item:</source>
<translation type="unfinished">اختر السلاح</translation>
</message>
<message>
<source>Set the timer on bombs and timed weapons:</source>
<translation type="unfinished">وقت الانفجار</translation>
</message>
<message>
<source>Move the camera to the active hog:</source>
<translation type="unfinished">الكامرة على اللاعب</translation>
</message>
<message>
<source>Move the cursor or camera without using the mouse:</source>
<translation type="unfinished">تحريك الكامرة او اللاعب بلا المؤشر</translation>
</message>
<message>
<source>Modify the camera's zoom level:</source>
<translation type="unfinished">تغيير مدى التقريب البصري</translation>
</message>
<message>
<source>Talk to your team or all participants:</source>
<translation type="unfinished">ارسال رسالة لاعضاء الفريق</translation>
</message>
<message>
<source>Pause, continue or leave your game:</source>
<translation type="unfinished">توقيف اللعبة، الاستمرار او الغائها</translation>
</message>
<message>
<source>Modify the game's volume while playing:</source>
<translation type="unfinished">تغيير ارتقاع الصوت اثناء اللعبة</translation>
</message>
<message>
<source>Toggle fullscreen mode:</source>
<translation type="unfinished">تبديل ملئ الشاشة</translation>
</message>
<message>
<source>Take a screenshot:</source>
<translation type="unfinished">خد صورة</translation>
</message>
<message>
<source>Toggle labels above hedgehogs:</source>
<translation type="unfinished">تغيير العناوبن فوق اللاعبين</translation>
</message>
</context>
<context>
<name>binds (keys)</name>
<message>
<source>Axis</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>(Up)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>(Down)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hat</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>(Left)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>(Right)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Keyboard</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Delete</source>
<translation>Delete</translation>
</message>
<message>
<source>Mouse: Left button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Mouse: Middle button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Mouse: Right button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Mouse: Wheel up</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Mouse: Wheel down</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Backspace</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Tab</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Clear</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Return</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Pause</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Escape</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Space</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 0</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 1</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 2</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 3</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 4</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 5</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 6</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 7</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 8</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad 9</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad .</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad /</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad *</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad -</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Numpad +</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Enter</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Equals</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Up</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Down</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Insert</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Home</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>End</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Page up</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Page down</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Num lock</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Caps lock</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Scroll lock</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right shift</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left shift</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right ctrl</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left ctrl</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right alt</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left alt</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right meta</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left meta</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>A button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>B button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>X button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Y button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>LB button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>RB button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Back button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Start button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left stick</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right stick</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left stick (Right)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left stick (Left)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left stick (Down)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left stick (Up)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Left trigger</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right trigger</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right stick (Down)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right stick (Up)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right stick (Right)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Right stick (Left)</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>DPad</source>
<translation type="unfinished"></translation>
</message>
</context>
</TS><|fim▁end|> | </message>
<message> |
<|file_name|>detect_test.go<|end_file_name|><|fim▁begin|>package multigz
import (
"os"
"testing"
)
func TestIsMultiGzip(t *testing.T) {
f, err := os.Open("testdata/divina.txt.gz")
if err != nil {
t.Fatal(err)
}
defer f.Close()
if IsProbablyMultiGzip(f, DefaultPeekSize) {
t.Error("divina.txt.gz detected as multigz but it isn't")
}
<|fim▁hole|> }
defer f.Close()
if !IsProbablyMultiGzip(f2, DefaultPeekSize) {
t.Error("divina2.txt.gz not detected as multigz but it is")
}
}<|fim▁end|> | f2, err := os.Open("testdata/divina2.txt.gz")
if err != nil {
t.Fatal(err) |
<|file_name|>NameEmitter.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.jso.impl;
import java.io.IOException;
/**
*
* @author Alexey Andreev
*/<|fim▁hole|> void emit(int precedence) throws IOException;
}<|fim▁end|> | interface NameEmitter { |
<|file_name|>dailylog.go<|end_file_name|><|fim▁begin|>// Copyright 2013 Ibrahim Ghazal. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package main
import (
"github.com/qedus/nds"
"golang.org/x/net/context"
"google.golang.org/appengine"
"google.golang.org/appengine/datastore"
"google.golang.org/appengine/log"
"fmt"
"net/http"
"net/url"
"time"
)
func init() {
http.HandleFunc("/dailylog", accessHandler(dailylogHandler))
http.HandleFunc("/dailylog/student", accessHandler(dailylogStudentHandler))
http.HandleFunc("/dailylog/edit", accessHandler(dailylogEditHandler))
http.HandleFunc("/dailylog/save", accessHandler(dailylogSaveHandler))
http.HandleFunc("/viewdailylog", accessHandler(viewDailylogHandler))
http.HandleFunc("/viewdailylog/day", accessHandler(viewDailylogDayHandler))
}
type dailylogType struct {
// TODO: per-year?
StudentID string
Date time.Time
Behavior string
Attendance string
Details string
}
func getDailylog(c context.Context, studentID, date string) (dailylogType, error) {
key := datastore.NewKey(c, "dailylog", fmt.Sprintf("%s|%s", studentID, date), 0, nil)
var dailylog dailylogType
err := nds.Get(c, key, &dailylog)
if err != nil {
return dailylogType{}, err
}
return dailylog, nil
}
func getDailylogs(c context.Context, StudentID string) ([]dailylogType, error) {
q := datastore.NewQuery("dailylog").Filter("StudentID =", StudentID)
var dailylogs []dailylogType
_, err := q.GetAll(c, &dailylogs)
if err != nil {
return nil, err
}
return dailylogs, nil
}
func (dl dailylogType) save(c context.Context) error {
keyStr := fmt.Sprintf("%s|%s", dl.StudentID, dl.Date.Format("2006-01-02"))
key := datastore.NewKey(c, "dailylog", keyStr, 0, nil)
_, err := nds.Put(c, key, &dl)
if err != nil {
return err
}
return nil
}
func (dl dailylogType) delete(c context.Context) error {
keyStr := fmt.Sprintf("%s|%s", dl.StudentID, dl.Date.Format("2006-01-02"))
key := datastore.NewKey(c, "dailylog", keyStr, 0, nil)
err := nds.Delete(c, key)
if err != nil {
return err
}
return nil
}
func dailylogHandler(w http.ResponseWriter, r *http.Request) {
c := appengine.NewContext(r)
sy := getSchoolYear(c)
err := r.ParseForm()
if err != nil {
log.Errorf(c, "Could not parse form: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
classSection := r.Form.Get("classsection")
var students []studentClass
if classSection != "" {
students, err = findStudents(c, sy, classSection)
if err != nil {
log.Errorf(c, "Could not retrieve students: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
}
classGroups := getClassGroups(c, sy)
data := struct {
S []studentClass
CG []classGroup
ClassSection string
}{
students,
classGroups,
classSection,
}
if err := render(w, r, "dailylog", data); err != nil {
log.Errorf(c, "Could not render template dailylog: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
}
func dailylogStudentHandler(w http.ResponseWriter, r *http.Request) {
c := appengine.NewContext(r)
if err := r.ParseForm(); err != nil {
log.Errorf(c, "Could not parse form: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
id := r.Form.Get("id")
stu, err := getStudent(c, id)
if err != nil {
log.Errorf(c, "Could not retrieve student details: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
sy := getSchoolYear(c)
cs, err := getStudentClass(c, stu.ID, sy)
if err != nil {
log.Errorf(c, "Could not retrieve student class: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
class, section := cs.Class, cs.Section
dailylogs, err := getDailylogs(c, id)
if err != nil {
log.Errorf(c, "Could not retrieve daily logs: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
data := struct {
S studentType
Class string
Section string
Today time.Time
Dailylogs []dailylogType
}{
stu,
class,
section,
time.Now(),
dailylogs,
}
if err := render(w, r, "dailylogstudent", data); err != nil {
log.Errorf(c, "Could not render template dailylogstudent: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
}
func dailylogEditHandler(w http.ResponseWriter, r *http.Request) {
c := appengine.NewContext(r)
if err := r.ParseForm(); err != nil {
log.Errorf(c, "Could not parse form: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
id := r.Form.Get("id")
date := r.Form.Get("date")
if id == "" || date == "" {
log.Errorf(c, "Empty student (%s) or daily log (%s)", id, date)
renderError(w, r, http.StatusInternalServerError)
return
}
dailylog, err := getDailylog(c, id, date)
if err == datastore.ErrNoSuchEntity {
d, err := time.Parse("2006-01-02", date)
if err != nil {
d = time.Now()
}
dailylog.StudentID = id
dailylog.Date = d
} else if err != nil {
log.Errorf(c, "Could not retrieve daily log details: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
data := struct {
Dailylog dailylogType
}{
dailylog,
}
if err := render(w, r, "dailylogedit", data); err != nil {
log.Errorf(c, "Could not render template dailylogedit: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
}
func dailylogSaveHandler(w http.ResponseWriter, r *http.Request) {
c := appengine.NewContext(r)
if err := r.ParseForm(); err != nil {
log.Errorf(c, "Could not parse form: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
f := r.PostForm
id := f.Get("ID")
date, err := time.Parse("2006-01-02", f.Get("Date"))
if err != nil {
log.Errorf(c, "Invalid date: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
behavior := f.Get("Behavior")
attendance := f.Get("Attendance")
details := f.Get("Details")
dailylog := dailylogType{
StudentID: id,
Date: date,
Behavior: behavior,
Attendance: attendance,
Details: details,
}
isSave := false
if f.Get("submit") == "Delete" {
err = dailylog.delete(c)
} else {
err = dailylog.save(c)
isSave = true
}
if err != nil {
// TODO: message to user
log.Errorf(c, "Could not store dailylog: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
if isSave {
subject := "New Daily Log Added"
body := "A new daily log is added. To view it, go to: https://creativity-private-school-2015.appspot.com/viewdailylog/day?date=" + f.Get("Date")
sendStudentEmails(c, []string{id}, subject, body)
}
// TODO: message of success
urlValues := url.Values{
"id": []string{id},
"date": []string{date.Format("2006-01-02")},
}
redirectURL := fmt.Sprintf("/dailylog/student?%s", urlValues.Encode())
http.Redirect(w, r, redirectURL, http.StatusFound)
}
func viewDailylogHandler(w http.ResponseWriter, r *http.Request) {
c := appengine.NewContext(r)
if err := r.ParseForm(); err != nil {
log.Errorf(c, "Could not parse form: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
user, err := getUser(c)
if err != nil {
log.Errorf(c, "Could not get user: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
if user.Student == nil {
log.Errorf(c, "User is not a student: %s", user.Email)
renderError(w, r, http.StatusInternalServerError)
return
}
stu := *user.Student
sy := getSchoolYear(c)
cs, err := getStudentClass(c, stu.ID, sy)
if err != nil {
log.Errorf(c, "Could not retrieve student class: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
class, section := cs.Class, cs.Section
dailylogs, err := getDailylogs(c, stu.ID)
if err != nil {
log.Errorf(c, "Could not retrieve daily logs: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
data := struct {
S studentType
Class string
Section string
Today time.Time
Dailylogs []dailylogType
}{
stu,
class,
section,
time.Now(),
dailylogs,
}
if err := render(w, r, "viewdailylog", data); err != nil {
log.Errorf(c, "Could not render template viewdailylog: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
}
func viewDailylogDayHandler(w http.ResponseWriter, r *http.Request) {
c := appengine.NewContext(r)
if err := r.ParseForm(); err != nil {<|fim▁hole|> }
user, err := getUser(c)
if err != nil {
log.Errorf(c, "Could not get user: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
if user.Student == nil {
log.Errorf(c, "User is not a student: %s", user.Email)
renderError(w, r, http.StatusInternalServerError)
return
}
stu := *user.Student
id := stu.ID
date := r.Form.Get("date")
if id == "" || date == "" {
log.Errorf(c, "Empty student (%s) or daily log (%s)", id, date)
renderError(w, r, http.StatusInternalServerError)
return
}
dailylog, err := getDailylog(c, id, date)
if err == datastore.ErrNoSuchEntity {
d, err := time.Parse("2006-01-02", date)
if err != nil {
d = time.Now()
}
dailylog.StudentID = id
dailylog.Date = d
} else if err != nil {
log.Errorf(c, "Could not retrieve daily log details: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
data := struct {
Dailylog dailylogType
}{
dailylog,
}
if err := render(w, r, "viewdailylogday", data); err != nil {
log.Errorf(c, "Could not render template viewdailylogday: %s", err)
renderError(w, r, http.StatusInternalServerError)
return
}
}<|fim▁end|> | log.Errorf(c, "Could not parse form: %s", err)
renderError(w, r, http.StatusInternalServerError)
return |
<|file_name|>SimpleAction.java<|end_file_name|><|fim▁begin|>package br.com.caelum.rest.server;
import javax.servlet.http.HttpServletRequest;
public class SimpleAction implements Action {
public String getUri() {
return uri;
}
public String getRel() {
return rel;
}
private final String uri;
private final String rel;
public SimpleAction(String rel, String uri) {
this.rel = rel;
this.uri = uri;
}
<|fim▁hole|> this.rel = rel;
this.uri = "http://restful-server.appspot.com" + uri;
// this.uri = "http://" + request.getServerName() + ":" + request.getServerPort() + request.getContextPath() + uri;
}
}<|fim▁end|> | public SimpleAction(String rel, HttpServletRequest request, String uri) { |
<|file_name|>HandDetails.py<|end_file_name|><|fim▁begin|>from triple_draw_poker.model.Pot import Pot
class HandDetails:
def __init__(self):
self.pot = Pot()
self.raised = 0
self.street = 0
self.number_of_streets = 4
self.in_draw = False
self.hands = []
self.dealt_cards_index = 0
def getDealtCardsIndex(self):
return dealt_cards_index
def getHands(self):
return self.hands
def getPot(self):
return self.pot
def getRaised(self):
return self.raised
def getStreet(self):
return self.street
def getStreetPremium(self):
if self.street < 3:
return 2
return 1
def getNumberOfStreets(self):
return self.number_of_streets
def getInDraw(self):
return self.in_draw
def setDealtCardsIndex(self, index):
self.dealt_cards_index = index
def addHand(self, hand):
self.hands.append(hand)
def incrementRaised(self):
self.raised += 1
def incrementStreet(self):<|fim▁hole|><|fim▁end|> | self.street += 1
def changeInDraw(self):
self.in_draw = not self.in_draw |
<|file_name|>de.js<|end_file_name|><|fim▁begin|>H5PEditor.language.core = {
missingTranslation: '[Fehlende Übersetzung :key]',
loading: 'Lädt :type, bitte warten...',
selectLibrary: 'Auswählen der Bibliothek, die für den Inhalt verwendet werden soll.',
unknownFieldPath: '":path" kann nicht gefunden werden.',
notImageField: '":path" ist kein Bild.',
notImageOrDimensionsField: '":path" ist kein Bild oder Dimensionsfeld.',
requiredProperty: 'Die :property wird benötigt und muss einen Wert besitzen.',
onlyNumbers: 'Der :property Wert kann nur Nummern beinhalten.',
exceedsMax: 'Der :property Wert übersteigt das Maximum von :max.',
belowMin: 'Der :property Wert liegt unter dem Minimum von :min.',
outOfStep: 'Der :property Wert kann nur in Schritten von :step geändert werden.',
addFile: 'Datei hinzufügen',
add: 'Hinzuf\u00fcgen',
removeFile: 'Datei entfernen',
confirmRemoval: 'Diesen :type ganz sicher entfernen?',
removeImage: 'Bild entfernen',
confirmImageRemoval: 'Dies wird das Bild entfernen. Ganz sicher fortfahren?',
changeFile: 'Datei ändern',
changeLibrary: 'Inhaltstyp ändern?',
semanticsError: 'Semantischer Fehler: :error',
missingProperty: 'Im Feld :index fehlt :property property.',
expandCollapse: 'Erweitern/Verkleinern',
addEntity: ':entity hinzufügen',
tooLong: 'Wert des Feldes ist zu lang. Es sollte :max Buchstaben oder weniger beinhalten.',
invalidFormat: 'Der Feldwert beinhaltet ein ungültiges Format oder verbotene Zeichen.',
confirmChangeLibrary: 'Wenn dies ausgeführt wird, dann geht alles verloren, was mit dem aktuellen Inhaltstyp erstellt wurde. Ganz sicher den Inhaltstyp wechseln?',
moreLibraries: 'Nach <a href="http://h5p.org/content-types-and-applications" target="_blank">mehr Inhaltstypen</a> auf h5p.org Ausschau halten',
commonFields: 'Einstellungen und Texte',
commonFieldsDescription: 'Hier können Einstellungen bearbeitet oder Texte übersetzt werden, die in diesem Inhalt Verwendung finden.',
uploading: 'Lädt hoch, bitte warten...',
noFollow: 'Dem Feld ":path" kann nicht gefolgt werden.',
editCopyright: 'Urheberrecht bearbeiten',
close: 'Schließen',
tutorialAvailable: 'Anleitung verfügbar',
editMode: 'Bearbeitungsmodus',
listLabel: 'Liste',
uploadError: 'Datenuploadfehler',
fileToLarge: 'Die Datei, die hochgeladen werden soll, könnte zu groß sein.',
noSemantics: 'Fehler, das Formular des Inhaltstypen konnte nicht geladen werden.',
editImage: 'Bild bearbeiten',
saveLabel: 'Speichern',
cancelLabel: 'Abbrechen',
resetToOriginalLabel: 'Auf Original zurücksetzen',
loadingImageEditor: 'Bildeditor lädt, bitte warten...',
selectFiletoUpload: 'Datei zum Hochladen ausw\u00e4hlen',
or: 'oder',
enterAudioUrl: 'URL der Audiodatei eingeben',<|fim▁hole|> enterAudioTitle: 'Link oder URL zu Audiodatei einf\u00fcgen',
enterVideoTitle: 'YouTube-Link oder andere Video-URL einf\u00fcgen',
uploadAudioTitle: 'Audio-Datei hochladen',
uploadVideoTitle: 'Video-Datei hochladen',
addVideoDescription: 'H5P unterst\u00fctzt externe Videodateien im Format mp4, webm oder ogv, wie bei Vimeo Pro, und unterst\u00fctzt YouTube-Links.',
insert: 'Einf\u00fcgen',
cancel: 'Abbrechen',
height: 'H\u00f6he',
width: 'Breite',
textField: 'Textfeld',
numberField: 'Nummernfeld',
orderItemUp: 'Element nach vorne sortieren',
orderItemDown: 'Element nach hinten sortieren',
removeItem: 'Element entfernen',
hubPanelLabel: 'Inhaltstyp ausw\u00e4hlen',
importantInstructions: 'Wichtige Hinweise',
hideImportantInstructions: 'Wichtige Hinweise ausblenden',
hide: 'Ausblenden',
example: 'Beispiel',
createContentTabLabel: 'Inhalt erstellen',
uploadTabLabel: 'Hochladen',
uploadPlaceholder: 'Keine Datei ausgew\u00e4hlt',
uploadInstructionsTitle: 'Eine H5P-Datei hochladen.',
uploadInstructionsContent: 'Du kannst mit Beispielen von <a href="https://h5p.org/content-types-and-applications" target="blank">H5P.org</a> starten.',
uploadFileButtonLabel: 'Datei hochladen',
uploadFileButtonChangeLabel: 'Datei \u00e4ndern',
uploadingThrobber: 'Lade hoch ...',
h5pFileWrongExtensionTitle: 'Die ausgew\u00e4hlte Datei konnte nicht hochgeladen werden',
h5pFileWrongExtensionContent: 'Nur Dateien mit der Endung .h5p sind erlaubt.',
h5pFileValidationFailedTitle: 'Die H5P-Datei konnte nicht \u00fcberpr\u00fcft werden.',
h5pFileValidationFailedContent: 'Stelle sicher, dass die hochgeladene Datei g\u00fcltigen H5P-Inhalt enth\u00e4lt. H5P' +
'-Dateien, die nur Bibliotheken enthalten, sollten \u00fcber die H5P-Bibliothekenseite hochgeladen werden.',
h5pFileUploadServerErrorTitle: 'Die H5P-Datei konnte nicht hochgeladen werden',
h5pFileUploadServerErrorContent: 'Ein unerwarteter Fehler ist aufgetreten. Bitte pr\u00fcfe die Fehlerlogdatei des Servers f\u00fcr' +
' mehr Details.',
contentTypeSectionAll: 'Alle',
contentTypeSectionMine: 'Meine Inhaltstypen',
contentTypeSectionPopular: 'Am beliebtesten',
contentTypeSectionTitle: 'Inhaltstypen durchst\u00f6bern',
contentTypeSearchFieldPlaceholder: 'Nach Inhaltstypen suchen',
contentTypeInstallButtonLabel: 'Installieren',
contentTypeInstallingButtonLabel: 'Installiere',
contentTypeUseButtonLabel: 'Benutzen',
contentTypeUpdateButtonLabel: 'Aktualisieren',
contentTypeUpdatingButtonLabel: 'Aktualisiere',
contentTypeGetButtonLabel: '\u00dcbernehmen',
contentTypeBackButtonLabel: 'Zurück',
contentTypeIconAltText: 'Symbolbild',
contentTypeInstallSuccess: ':contentType erfolgreich installiert!',
contentTypeUpdateSuccess: ':contentType erfolgreich aktualisiert!',
contentTypeInstallError: ':contentType konnte nicht installiert werden. Kontaktiere bitte deinen Administrator.',
contentTypeLicensePanelTitle: 'Lizenz',
contentTypeDemoButtonLabel: 'Inhalts-Demo',
readMore: 'Mehr lesen',
readLess: 'Weniger lesen',
contentTypeOwner: 'Von :owner',
contentTypeUnsupportedApiVersionTitle: 'Dieser Inhaltstyp erfordert eine neuere Version des H5P-Kerns.',
contentTypeUnsupportedApiVersionContent: 'Kontaktiere bitte deinen Systemadministrator, um die notwendigen Aktualisierungen zu erhalten.',
contentTypeUpdateAvailable: 'Aktualisierung verf\u00fcgbar',
theContentType: 'dem Inhaltstyp',
currentMenuSelected: 'aktuelle Auswahl',
errorCommunicatingHubTitle: 'Es ist keine Verbindung zum Hub m\u00f6glich.',
errorCommunicatingHubContent: 'Ein Fehler ist aufgetreten. Bitte versuche es noch einmal.',
warningNoContentTypesInstalled: 'Du hast keine Inhaltstypen installiert.',
warningChangeBrowsingToSeeResults: 'Klicke <em>Alle</em>, um eine Liste aller installierbaren Inhaltstypen zu erhalten.',
warningUpdateAvailableTitle: 'Eine neuere Version von :contentType ist verf\u00fcgbar.',
warningUpdateAvailableBody: 'Aktualisiere auf die neueste Version, um von allen Verbesserungen zu profitieren.',
licenseDescription : 'Einige der Lizenzmerkmale sind unten aufgef\u00fchrt. Klicke auf das Info-Symbolbild, um den Originallizenztext zu lesen.',
licenseModalTitle: 'Lizenzdetails',
licenseModalSubtitle: 'W\u00e4hle eine Lizenz aus, um zu erfahren, welche Auflagen sie umfasst.',
licenseUnspecified: 'Nicht näher angegeben',
licenseCanUseCommercially: 'Darf kommerziell genutzt werden',
licenseCanModify: 'Darf ver\u00e4ndert werden',
licenseCanDistribute: 'Darf weitergegeben werden',
licenseCanSublicense: 'Unterlizenzvertrag ist m\u00f6glich',
licenseCanHoldLiable: 'Haftung wird \u00fcbernommen',
licenseCannotHoldLiable: 'Haftung wird nicht \u00fcbernommen',
licenseMustIncludeCopyright: 'Muss Urheberrechtshinweis enthalten',
licenseMustIncludeLicense: 'Muss Lizenztext enthalten',
licenseFetchDetailsFailed: 'Lizenzdetails konnten nicht geladen werden',
imageLightboxTitle: 'Bilder',
imageLightBoxProgress: ':num von :total',
nextImage: 'N\u00e4chstes Bild',
previousImage: 'Vorheriges Bild',
screenshots: 'Bildschirmfotos',
reloadButtonLabel: 'Neu laden',
videoQuality: 'Videoaufl\u00f6sung',
videoQualityDescription: 'Dieses Label hilft dem Benutzer, die aktuelle Videoaufl\u00f6sung zu erkennen. Z.B.. 1080p, 720p, HD der Mobile',
videoQualityDefaultLabel: 'Aufl\u00f6sung :index'
};<|fim▁end|> | enterVideoUrl: 'URL der Videodatei oder YouTube-Link eingeben', |
<|file_name|>systemjs.config.js<|end_file_name|><|fim▁begin|><|fim▁hole|>(function (global) {
System.config({
paths: {
// paths serve as alias
'npm:': 'lib/'
},
// map tells the System loader where to look for things
map: {
// our app is within the app folder
app: 'app',
//Testing libraries
//'@angular/core/testing': 'npm:@angular/core/bundles/core-testing.umd.js',
//'@angular/common/testing': 'npm:@angular/common/bundles/common-testing.umd.js',
//'@angular/compiler/testing': 'npm:@angular/compiler/bundles/compiler-testing.umd.js',
//'@angular/platform-browser/testing': 'npm:@angular/platform-browser/bundles/platform-browser-testing.umd.js',
//'@angular/platform-browser-dynamic/testing': 'npm:@angular/platform-browser-dynamic/bundles/platform-browser-dynamic-testing.umd.js',
//'@angular/http/testing': 'npm:@angular/http/bundles/http-testing.umd.js',
//'@angular/router/testing': 'npm:@angular/router/bundles/router-testing.umd.js',
//'@angular/forms/testing': 'npm:@angular/forms/bundles/forms-testing.umd.js',
// angular bundles
'@angular/core': 'npm:@angular/core/bundles/core.umd.js',
'@angular/common': 'npm:@angular/common/bundles/common.umd.js',
'@angular/compiler': 'npm:@angular/compiler/bundles/compiler.umd.js',
'@angular/platform-browser': 'npm:@angular/platform-browser/bundles/platform-browser.umd.js',
'@angular/platform-browser-dynamic': 'npm:@angular/platform-browser-dynamic/bundles/platform-browser-dynamic.umd.js',
'@angular/http': 'npm:@angular/http/bundles/http.umd.js',
'@angular/router': 'npm:@angular/router/bundles/router.umd.js',
'@angular/forms': 'npm:@angular/forms/bundles/forms.umd.js',
// other libraries
'rxjs': 'npm:rxjs',
'angular-in-memory-web-api': 'npm:angular-in-memory-web-api/bundles/in-memory-web-api.umd.js'
},
// packages tells the System loader how to load when no filename and/or no extension
packages: {
app: {
main: './main.js',
defaultExtension: 'js'
},
rxjs: {
defaultExtension: 'js'
}
}
});
})(this);<|fim▁end|> | /**
* System configuration for Angular 2 samples
* Adjust as necessary for your application needs.
*/ |
<|file_name|>tweet.go<|end_file_name|><|fim▁begin|>package twitterstream
type Hashtag struct {
Text string `json:"text"`
Indices IndexPair `json:"indices"`
}
type Size struct {
Width int `json:"w"`
Height int `json:"h"`
Resize string `json:"resize"`
}
type Sizes struct {
Large Size `json:"large"`
Medium Size `json:"medium"`
Small Size `json:"small"`
Thumb Size `json:"thumb"`<|fim▁hole|>}
type Medium struct {
Id int64 `json:"id"`
IdStr string `json:"id_str"`
Type string `json:"type"`
MediaUrl string `json:"media_url"`
SecureMediaUrl string `json:"media_url_https"`
Url string `json:"url"`
DisplayUrl string `json:"display_url"`
ExpandedUrl *string `json:"expanded_url"`
Sizes Sizes `json:"sizes"`
Indices IndexPair `json:"indices"`
}
type Mention struct {
Id int64 `json:"id"`
IdStr string `json:"id_str"`
ScreenName string `json:"screen_name"`
FullName string `json:"full_name"`
Indices IndexPair `json:"indices"`
}
type Url struct {
Url string `json:"url"`
DisplayUrl string `json:"display_url"`
ExpandedUrl *string `json:"expanded_url"`
Indices IndexPair `json:"indices"`
}
type Entities struct {
Hashtags []Hashtag `json:"hashtags"`
Media []Medium `json:"media"`
Mentions []Mention `json:"user_mentions"`
Urls []Url `json:"urls"`
}
type Contributor struct {
Id int64 `json:"id"`
IdString string `json:"id_str"`
ScreenName string `json:"screen_name"`
}
type Tweet struct {
// The integer representation of the unique identifier for this Tweet. This number is greater than 53 bits and some programming languages may have difficulty/silent defects in interpreting it. Using a signed 64 bit integer for storing this identifier is safe. Use id_str for fetching the identifier to stay on the safe side. See Twitter IDs, JSON and Snowflake.
Id int64 `json:"id"`
// The string representation of the unique identifier for this Tweet. Implementations should use this rather than the large integer in id.
IdString string `json:"id_str"`
// An collection of brief user objects (usually only one) indicating users who contributed to the authorship of the tweet, on behalf of the official tweet author.
Contributors []Contributor `json:"contributors"`
// Represents the geographic location of this Tweet as reported by the user or client application.
Coordinates *Point `json:"coordinates"`
// Time when this Tweet was created.
CreatedAt Time `json:"created_at"`
// Entities which have been parsed out of the text of the Tweet.
Entities Entities `json:"entities"`
// Perspectival. Indicates whether this Tweet has been favorited by the authenticating user.
Favorited *bool `json:"favorited"`
// If the represented Tweet is a reply, this field will contain the screen name of the original Tweet's author.
InReplyToScreenName *string `json:"in_reply_to_screen_name"`
// If the represented Tweet is a reply, this field will contain the integer representation of the original Tweet's ID.
InReplyToStatusId *int64 `json:"in_reply_to_status_id"`
// If the represented Tweet is a reply, this field will contain the string representation of the original Tweet's ID.
InReplyToStatusIdStr *string `json:"in_reply_to_status_id_str"`
// If the represented Tweet is a reply, this field will contain the integer representation of the original Tweet's author ID.
InReplyToUserId *int64 `json:"in_reply_to_user_id"`
// If the represented Tweet is a reply, this field will contain the string representation of the original Tweet's author ID.
InReplyToUserIdStr *string `json:"in_reply_to_user_id_str"`
// When present, indicates a BCP 47 language identifier corresponding to the machine-detected language of the Tweet text, or “und” if no language could be detected.
Lang string `json:"lang"`
// When present, indicates that the tweet is associated (but not necessarily originating from) a Place.
Place *Place `json:"place"`
// This field only surfaces when a tweet contains a link. The meaning of the field doesn't pertain to the tweet content itself, but instead it is an indicator that the URL contained in the tweet may contain content or media identified as sensitive content.
PossiblySensitive *bool `json:"possibly_sensitive"`
// Number of times this Tweet has been retweeted. This field is no longer capped at 99 and will not turn into a String for "100+"
RetweetCount int `json:"retweet_count"`
// Perspectival. Indicates whether this Tweet has been retweeted by the authenticating user.
Retweeted bool `json:"retweeted"`
// If Retweet the original Tweet can be found here.
RetweetedStatus *Tweet `json:"retweeted_status"`
// Utility used to post the Tweet, as an HTML-formatted string. Tweets from the Twitter website have a source value of web.
Source string `json:"source"`
// The actual UTF-8 text of the status update.
Text string `json:"text"`
// Indicates whether the value of the text parameter was truncated, for example, as a result of a retweet exceeding the 140 character Tweet length. Truncated text will end in ellipsis, like this ...
Truncated bool `json:"truncated"`
// When present, indicates a textual representation of the two-letter country codes this content is withheld from. See New Withheld Content Fields in API Responses.
WithheldInCountries string `json:"withheld_in_countries"`
// When present, indicates whether the content being withheld is the "status" or a "user." See New Withheld Content Fields in API Responses.
WithheldScope string `json:"withheld_scope"`
// The user who posted this Tweet.
User User `json:"user"`
}<|fim▁end|> | |
<|file_name|>traits.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Disk-backed `HashDB` implementation.
use {Bytes, H256, UtilError};
use hashdb::*;
use kvdb::{self, DBTransaction};
use std::sync::Arc;
/// A `HashDB` which can manage a short-term journal potentially containing many forks of mutually
/// exclusive actions.
pub trait JournalDB: HashDB {
/// Return a copy of ourself, in a box.
fn boxed_clone(&self) -> Box<JournalDB>;
/// Returns heap memory size used
fn mem_used(&self) -> usize;
/// Returns the size of journalled state in memory.
/// This function has a considerable speed requirement --
/// it must be fast enough to call several times per block imported.
fn journal_size(&self) -> usize {
0<|fim▁hole|>
/// Check if this database has any commits
fn is_empty(&self) -> bool;
/// Get the earliest era in the DB. None if there isn't yet any data in there.
fn earliest_era(&self) -> Option<u64> {
None
}
/// Get the latest era in the DB. None if there isn't yet any data in there.
fn latest_era(&self) -> Option<u64>;
/// Journal recent database operations as being associated with a given era and id.
// TODO: give the overlay to this function so journaldbs don't manage the overlays themeselves.
fn journal_under(&mut self, batch: &mut DBTransaction, now: u64, id: &H256) -> Result<u32, UtilError>;
/// Mark a given block as canonical, indicating that competing blocks' states may be pruned out.
fn mark_canonical(&mut self, batch: &mut DBTransaction, era: u64, id: &H256) -> Result<u32, UtilError>;
/// Commit all queued insert and delete operations without affecting any journalling -- this requires that all insertions
/// and deletions are indeed canonical and will likely lead to an invalid database if that assumption is violated.
///
/// Any keys or values inserted or deleted must be completely independent of those affected
/// by any previous `commit` operations. Essentially, this means that `inject` can be used
/// either to restore a state to a fresh database, or to insert data which may only be journalled
/// from this point onwards.
fn inject(&mut self, batch: &mut DBTransaction) -> Result<u32, UtilError>;
/// State data query
fn state(&self, _id: &H256) -> Option<Bytes>;
/// Whether this database is pruned.
fn is_pruned(&self) -> bool {
true
}
/// Get backing database.
fn backing(&self) -> &Arc<kvdb::KeyValueDB>;
/// Clear internal strucutres. This should called after changes have been written
/// to the backing strage
fn flush(&self) {}
/// Consolidate all the insertions and deletions in the given memory overlay.
fn consolidate(&mut self, overlay: ::memorydb::MemoryDB);
/// Commit all changes in a single batch
#[cfg(test)]
fn commit_batch(&mut self, now: u64, id: &H256, end: Option<(u64, H256)>) -> Result<u32, UtilError> {
let mut batch = self.backing().transaction();
let mut ops = self.journal_under(&mut batch, now, id)?;
if let Some((end_era, canon_id)) = end {
ops += self.mark_canonical(&mut batch, end_era, &canon_id)?;
}
let result = self.backing().write(batch).map(|_| ops).map_err(Into::into);
self.flush();
result
}
/// Inject all changes in a single batch.
#[cfg(test)]
fn inject_batch(&mut self) -> Result<u32, UtilError> {
let mut batch = self.backing().transaction();
let res = self.inject(&mut batch)?;
self.backing().write(batch).map(|_| res).map_err(Into::into)
}
}<|fim▁end|> | } |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse
from .models import *
import requests
import json
user_id = '139169754@N02'
api_key = '41dd3aff041c00c52febdef9786a9ca0'
api_secret = '0f5a3b5047f760f7'
def index(request):
context = {}
context['photos'] = []
method = 'flickr.people.getPublicPhotos'
query = 'https://api.flickr.com/services/rest/?&method=%s&api_key=%s&user_id=%s&format=json&nojsoncallback=1'%(method, api_key, user_id)
query += '&extras=url_z'
response = requests.get(query)
if response.ok:<|fim▁hole|>
for link in response['photos']['photo']:
context['photos'].append(str(link['url_z']))
return render(request, 'photos/index.html', context)<|fim▁end|> | response = json.loads(response.text) |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# b3j0f.sync documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 14 17:29:14 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
from b3j0f.sync import __version__
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.pngmath',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
source_suffix = ['.rst']
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'b3j0f.sync'
copyright = u'2015, b3j0f'
author = u'b3j0f'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'b3j0fsyncdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment<|fim▁hole|># Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'b3j0fsync.tex', u'b3j0f.sync Documentation',
u'b3j0f', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'b3j0fsync', u'b3j0f.sync Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'b3j0fsync', u'b3j0f.sync Documentation',
author, 'b3j0fsync', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
#epub_basename = project
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the Pillow.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}<|fim▁end|> | #'figure_align': 'htbp',
}
|
<|file_name|>test_therm.py<|end_file_name|><|fim▁begin|>import RPi.GPIO as GPIO
import time
import utils
import therm
GPIO.setmode(GPIO.BOARD)
#pwr = utils.PSU(13, 15)
#pwr.on()
#pwr.off()
adresses = therm.get_adr()
samples = 5
therms = []
now = time.time()
t_amb = therm.Therm('28-000004e08693')
t_c_b = therm.Therm('28-000004e0f7cc')
t_c_m = therm.Therm('28-000004e0840a')
t_c_t = therm.Therm('28-000004e08e26')
t_hs = therm.Therm('28-000004e0804f')
print time.time() - now
now = time.time()
for i in range(samples):
temp_row = [t_amb.read_temp(), t_c_b.read_temp(), t_c_m.read_temp(), t_c_t.read_temp(), t_hs.read_temp()]<|fim▁hole|> now = time.time()
print therms
#GPIO.cleanup()<|fim▁end|> | print temp_row
therms.append(temp_row)
print time.time() - now |
<|file_name|>bridge_default.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
import os.path
from StringIO import StringIO
from time import time
__all__ = ['Parser', 'IncrementalParser', 'DispatchParser']
import xml.dom as xd
import xml.dom.minidom as xdm
import xml.sax as xs
import xml.sax.handler as xsh
import xml.sax.saxutils as xss
from xml.sax.saxutils import quoteattr, escape, unescape
from bridge import Element, ENCODING, Attribute, PI, Comment, Document
from bridge.common import ANY_NAMESPACE
class Parser(object):
def __init__(self):
self.buffer = []
def __deserialize_fragment(self, current, parent):
if current.attributes:
for key in iter(current.attributes.keys()):
attr = current.attributes[key]
Attribute(attr.localName, attr.value,
attr.prefix, attr.namespaceURI, parent)
children_num = len(current.childNodes)
children = iter(current.childNodes)
for child in children:
nt = child.nodeType
if nt == xd.Node.TEXT_NODE:
data = escape(child.data)
if children_num == 1:
parent.xml_text = data
else:
parent.xml_children.append(data)
elif nt == xd.Node.CDATA_SECTION_NODE:
parent.as_cdata = True
data = child.data
if children_num == 1:
parent.xml_text = data
else:
parent.xml_children.append(data)
elif nt == xd.Node.COMMENT_NODE:
Comment(data=unicode(child.data), parent=parent)
elif nt == xd.Node.PROCESSING_INSTRUCTION_NODE:
PI(target=unicode(child.target), data=unicode(child.data), parent=parent)
elif nt == xd.Node.ELEMENT_NODE:
element = Element(name=child.localName, prefix=child.prefix,
namespace=child.namespaceURI, parent=parent)
self.__deserialize_fragment(child, element)
def __qname(self, name, prefix=None):
if prefix:
return "%s:%s" % (prefix, name)
return name
def __attrs(self, node):
for attr_ns, attr_name in iter(node.xml_attributes):
if attr_ns == xd.XMLNS_NAMESPACE and attr_name == 'xmlns':
continue
attr = node.xml_attributes[(attr_ns, attr_name)]
ns = attr.xml_ns
prefix = attr.xml_prefix
name = attr.xml_name
yield ns, name, prefix, attr.xml_text or ''
def __append_namespace(self, prefix, ns):
if prefix:
self.buffer.append(' xmlns:%s="%s"' % (prefix, ns))
elif ns is not None:
self.buffer.append(' xmlns="%s"' % (ns, ))
def __build_ns_map(self, ns_map, element):
for child in element.xml_children:
if isinstance(child, Element):
if child.xml_ns not in ns_map:
ns_map[child.xml_prefix] = child.xml_ns
for attr_ns, attr_name in child.xml_attributes:
if attr_ns not in ns_map:
ns_map[attr_ns] = child.xml_attributes[(attr_ns, attr_name)].xml_prefix
def __is_known(self, ns_map, prefix, ns):
if prefix in ns_map and ns_map[prefix] == ns:
return True
ns_map[prefix] = ns
return False
def __append_text(self, text, as_cdata):
if as_cdata:
self.buffer.append('<![CDATA[')
self.buffer.append(text)
if as_cdata:
self.buffer.append(']]>')
def __serialize_element(self, element, parent_ns_map=None):
for child in iter(element.xml_children):
if isinstance(child, basestring):
child = child.strip().strip('\n').strip('\r\n')
if not child:
continue
self.__append_text(child, element.as_cdata)
elif isinstance(child, Element):
ns_map = {}
ns_map.update(parent_ns_map or {})
prefix = ns = name = None
if child.xml_prefix:
prefix = child.xml_prefix
if child.xml_ns:
ns = child.xml_ns
name = child.xml_name
qname = self.__qname(name, prefix=prefix)
self.buffer.append('<%s' % qname)
if not self.__is_known(ns_map, prefix, ns):
self.__append_namespace(prefix, ns)
for ns, name, prefix, value in self.__attrs(child):
if ns is None:
pass
elif ns == xd.XML_NAMESPACE:
name = 'xml:%s' % name
elif ns == xd.XMLNS_NAMESPACE:
if not self.__is_known(ns_map, name, value):
self.__append_namespace(name, value)
continue
else:
name = '%s:%s' % (prefix, name)
if not self.__is_known(ns_map, prefix, ns):
self.__append_namespace(prefix, ns)
self.buffer.append(' %s=%s' % (name, quoteattr(value)))
if child.xml_text or child.xml_children:
self.buffer.append('>')
if child.xml_text:
self.__append_text(child.xml_text, child.as_cdata)
if child.xml_children:<|fim▁hole|>
self.buffer.append('</%s>' % (qname, ))
else:
self.buffer.append(' />')
elif isinstance(child, Comment):
self.buffer.append('<!--%s-->\n' % (child.data,))
elif isinstance(child, PI):
self.buffer.append('<?%s %s?>\n' % (child.target, child.data))
def serialize(self, document, indent=False, encoding=ENCODING, prefixes=None, omit_declaration=False):
if not isinstance(document, Document):
root = document
document = Document()
document.xml_children.append(root)
self.__serialize_element(document)
if not omit_declaration:
self.buffer.insert(0, '<?xml version="1.0" encoding="%s"?>%s' % (encoding, os.linesep))
content = ''.join(self.buffer)
self.buffer = []
if indent:
return content.rstrip(os.linesep).encode(encoding)
return content.encode(encoding)
def deserialize(self, source, prefixes=None, strict=False):
doc = None
if isinstance(source, basestring):
if os.path.exists(source):
doc = xdm.parse(source)
else:
doc = xdm.parseString(source)
elif hasattr(source, 'read'):
doc = xdm.parse(source)
document = Document()
self.__deserialize_fragment(doc, document)
if doc:
try:
doc.unlink()
except KeyError:
pass
return document
import xml.sax as xs
import xml.sax.saxutils as xss
from xml.parsers import expat
try:
import cStringIO as StringIO
except ImportError:
import StringIO
from time import time
class IncrementalHandler(xss.XMLGenerator):
def __init__(self, out, encoding=ENCODING):
xss.XMLGenerator.__init__(self, out, encoding)
self._root = Document()
self._current_el = self._root
self._current_level = 0
self._as_cdata = False
def reset(self):
if self._root:
self._root.forget()
self._root = None
if self._current_el:
self._current_el.forget()
self._current_el = None
self._root = Document()
self._current_el = self._root
self._current_level = 0
def startDocument(self):
self._root = Document()
self._current_el = self._root
self._current_level = 0
self._as_cdata = False
# see http://www.xml.com/pub/a/2003/03/10/python.html
def _split_qname(self, qname):
qname_split = qname.split(':')
if len(qname_split) == 2:
prefix, local = qname_split
else:
prefix = None
local = qname_split
return prefix, local
def processingInstruction(self, target, data):
PI(target, data, self._current_el)
def startElementNS(self, name, qname, attrs):
#print "$%s%s: %f" % (" " * self._current_level, name, time())
uri, local_name = name
prefix = None
if uri and uri in self._current_context:
prefix = self._current_context[uri]
#print "$$%s%s: %f" % (" " * self._current_level, name, time())
e = Element(local_name, prefix=prefix, namespace=uri, parent=self._current_el)
#print "$$$%s%s: %f" % (" " * self._current_level, name, time())
for name, value in iter(attrs.items()):
(namespace, local_name) = name
qname = attrs.getQNameByName(name)
prefix = self._split_qname(qname)[0]
Attribute(local_name, value, prefix, namespace, e)
#print "$$$$%s%s: %f" % (" " * self._current_level, name, time())
self._current_el = e
self._current_level = self._current_level + 1
#print "$$$$$%s%s: %f" % (" " * self._current_level, name, time())
def endElementNS(self, name, qname):
self._current_level = current_level = self._current_level - 1
self._current_el = self._current_el.xml_parent
def characters(self, content):
self._current_el.as_cdata = self._as_cdata
if not self._as_cdata and not self._current_el.xml_text:
self._current_el.xml_text = content
else:
self._current_el.xml_children.append(content)
self._as_cdata = False
def comment(self, data):
Comment(data, self._current_el)
def startCDATA(self):
self._as_cdata = True
def endCDATA(self):
pass
def startDTD(self, name, public_id, system_id):
pass
def endDTD(self):
pass
def doc(self):
"""Returns the root Document instance of the parsed
document. You have to call the close() method of the
parser first.
"""
return self._root
class IncrementalParser(object):
def __init__(self, out=None, encoding=ENCODING):
self.parser = xs.make_parser()
self.parser.setFeature(xs.handler.feature_namespaces, True)
if not out:
out = StringIO.StringIO()
self.out = out
self.handler = IncrementalHandler(self.out, encoding)
self.parser.setContentHandler(self.handler)
self.parser.setProperty(xs.handler.property_lexical_handler, self.handler)
def feed(self, chunk):
self.parser.feed(chunk)
def reset(self):
self.handler.reset()
self.parser.reset()
class DispatchHandler(IncrementalHandler):
def __init__(self, out, encoding='UTF-8'):
IncrementalHandler.__init__(self, out=None, encoding=ENCODING)
"""This handler allows the incremental parsing of an XML document
while providing simple ways to dispatch at precise point of the
parsing back to the caller.
Here's an example:
>>> from parser import DispatchParser
>>> p = DispatchParser()
>>> def dispatch(e):
... print e.xml()
...
>>> h.register_at_level(1, dispatch)
>>> p.feed('<r')
>>> p.feed('><b')
>>> p.feed('/></r>')
<?xml version="1.0" encoding="UTF-8"?>
<b xmlns=""></b>
Alternatively this can even be used as a generic parser. If you
don't need dispatching you simply call ``disable_dispatching``.
>>> from parser import DispatchParser
>>> p = DispatchParser()
>>> h.disable_dispatching()
>>> p.feed('<r><b/></r>')
>>> h.doc()
<r element at 0xb7ca99ccL />
>>> h.doc().xml(omit_declaration=True)
'<r xmlns=""><b></b></r>'
Note that this handler has limitations as it doesn't
manage DTDs.
Note also that this class is not thread-safe.
"""
self._level_dispatchers = {}
self._element_dispatchers = {}
self._element_level_dispatchers = {}
self._path_dispatchers = {}
self.default_dispatcher = None
self.default_dispatcher_start_element = None
self.disable_dispatching()
def register_default(self, handler):
self.default_dispatcher = handler
def unregister_default(self):
self.default_dispatcher = None
def register_default_start_element(self, handler):
self.default_dispatcher_start_element = handler
def unregister_default_start_element(self):
self.default_dispatcher_start_element = None
def disable_dispatching(self):
self.default_dispatcher = None
self.default_dispatcher_start_element = None
self.enable_level_dispatching = False
self.enable_element_dispatching = False
self.enable_element_by_level_dispatching = False
self.enable_dispatching_by_path = False
def enable_dispatching(self):
self.enable_level_dispatching = True
self.enable_element_dispatching = True
self.enable_element_by_level_dispatching = True
self.enable_dispatching_by_path = True
def register_at_level(self, level, dispatcher):
"""Registers a dispatcher at a given level within the
XML tree of elements being built.
The ``level``, an integer, is zero-based. So the root
element of the XML tree is 0 and its direct children
are at level 1.
The ``dispatcher`` is a callable object only taking
one parameter, a Element instance.
"""
self.enable_level_dispatching = True
self._level_dispatchers[level] = dispatcher
def unregister_at_level(self, level):
"""Unregisters a dispatcher at a given level
"""
if level in self._level_dispatchers:
del self._level_dispatchers[level]
if len(self._level_dispatchers) == 0:
self.enable_level_dispatching = False
def register_on_element(self, local_name, dispatcher, namespace=None):
"""Registers a dispatcher on a given element met during
the parsing.
The ``local_name`` is the local name of the element. This
element can be namespaced if you provide the ``namespace``
parameter.
The ``dispatcher`` is a callable object only taking
one parameter, a Element instance.
"""
self.enable_element_dispatching = True
self._element_dispatchers[(namespace, local_name)] = dispatcher
def unregister_on_element(self, local_name, namespace=None):
"""Unregisters a dispatcher for a specific element.
"""
key = (namespace, local_name)
if key in self._element_dispatchers:
del self._element_dispatchers[key]
if len(self._element_dispatchers) == 0:
self.enable_element_dispatching = False
def register_on_element_per_level(self, local_name, level, dispatcher, namespace=None):
"""Registers a dispatcher at a given level within the
XML tree of elements being built as well as for a
specific element.
The ``level``, an integer, is zero-based. So the root
element of the XML tree is 0 and its direct children
are at level 1.
The ``local_name`` is the local name of the element. This
element can be namespaced if you provide the ``namespace``
parameter.
The ``dispatcher`` is a callable object only taking
one parameter, a Element instance.
"""
self.enable_element_by_level_dispatching = True
self._element_level_dispatchers[(level, (namespace, local_name))] = dispatcher
def unregister_on_element_per_level(self, local_name, level, namespace=None):
"""Unregisters a dispatcher at a given level for a specific
element.
"""
key = (level, (namespace, local_name))
if key in self._element_level_dispatchers:
del self._element_level_dispatchers[key]
if len(self._element_level_dispatchers) == 0:
self.enable_element_by_level_dispatching = False
def register_by_path(self, path, dispatcher):
self.enable_dispatching_by_path = True
self._path_dispatchers[path] = dispatcher
def unregister_by_path(self, path):
if path in self._path_dispatchers:
del self._path_dispatchers[path]
if len(self._path_dispatchers) == 0:
self.enable_dispatching_by_path = False
def startElementNS(self, name, qname, attrs):
#print "%s: %f" % (name, time())
IncrementalHandler.startElementNS(self, name, qname, attrs)
if self.default_dispatcher_start_element:
self.default_dispatcher_start_element(self._current_el)
def endElementNS(self, name, qname):
self._current_level = current_level = self._current_level - 1
if not self._current_el:
return
current_element = self._current_el
parent_element = self._current_el.xml_parent
dispatched = False
if self.enable_element_dispatching:
pattern = (current_element.xml_ns, current_element.xml_name)
if pattern in self._element_dispatchers:
self._element_dispatchers[pattern](current_element)
dispatched = True
if not dispatched and self.default_dispatcher:
self.default_dispatcher(current_element)
self._current_el = parent_element
class DispatchParser(object):
def __init__(self, out=None, encoding=ENCODING):
self.parser = xs.make_parser()
self.parser.setFeature(xs.handler.feature_namespaces, True)
if not out:
out = StringIO.StringIO()
self.out = out
self.handler = DispatchHandler(self.out, encoding)
self.parser.setContentHandler(self.handler)
self.parser.setProperty(xs.handler.property_lexical_handler, self.handler)
def feed(self, chunk):
self.parser.feed(chunk)
def register_default(self, handler):
self.handler.register_default(handler)
def unregister_default(self):
self.handler.unregister_default()
def register_default_start_element(self, handler):
self.handler.register_default_start_element(handler)
def unregister_default_start_element(self):
self.handler.unregister_default_start_element()
def reset(self):
self.handler.reset()
self.parser.reset()
def disable_dispatching(self):
self.handler.disable_dispatching()
def enable_dispatching(self):
self.handler.enable_dispatching()
def register_at_level(self, level, dispatcher):
"""Registers a dispatcher at a given level within the
XML tree of elements being built.
The ``level``, an integer, is zero-based. So the root
element of the XML tree is 0 and its direct children
are at level 1.
The ``dispatcher`` is a callable object only taking
one parameter, a Element instance.
"""
self.handler.register_at_level(level, dispatcher)
def unregister_at_level(self, level):
"""Unregisters a dispatcher at a given level
"""
self.handler.unregister_at_level(level, dispatcher)
def register_on_element(self, local_name, dispatcher, namespace=None):
"""Registers a dispatcher on a given element met during
the parsing.
The ``local_name`` is the local name of the element. This
element can be namespaced if you provide the ``namespace``
parameter.
The ``dispatcher`` is a callable object only taking
one parameter, a Element instance.
"""
self.handler.register_on_element(local_name, dispatcher, namespace)
def unregister_on_element(self, local_name, namespace=None):
"""Unregisters a dispatcher for a specific element.
"""
self.handler.unregister_on_element(local_name, namespace)
def register_on_element_per_level(self, local_name, level, dispatcher, namespace=None):
"""Registers a dispatcher at a given level within the
XML tree of elements being built as well as for a
specific element.
The ``level``, an integer, is zero-based. So the root
element of the XML tree is 0 and its direct children
are at level 1.
The ``local_name`` is the local name of the element. This
element can be namespaced if you provide the ``namespace``
parameter.
The ``dispatcher`` is a callable object only taking
one parameter, a Element instance.
"""
self.handler.register_on_element_per_level(local_name, level, dispatcher, namespace)
def unregister_on_element_per_level(self, local_name, level, namespace=None):
"""Unregisters a dispatcher at a given level for a specific
element.
"""
self.handler.unregister_on_element_per_level(local_name, level, namespace)
def register_by_path(self, path, dispatcher):
self.handler.register_by_path(path, dispatcher)
def unregister_by_path(self, path):
self.handler.unregister_by_path(path)<|fim▁end|> | self.__serialize_element(child, ns_map) |
<|file_name|>oc_pvc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # pylint: disable=too-many-lines
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
import atexit
import json
import os
import re
import shutil
import subprocess
import ruamel.yaml as yaml
#import yaml
#
## This is here because of a bug that causes yaml
## to incorrectly handle timezone info on timestamps
#def timestamp_constructor(_, node):
# '''return timestamps as strings'''
# return str(node.value)
#yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = kubeconfig
self.all_namespaces = all_namespaces
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = '/tmp/%s' % rname
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''return all pods '''
cmd = ['-n', self.namespace, 'replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''return all pods '''
fname = '/tmp/%s' % rname
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''return all pods '''
return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
def _delete(self, resource, rname, selector=None):
'''return all pods '''
cmd = ['delete', resource, rname, '-n', self.namespace]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None):
'''return all pods '''
cmd = ['process', '-n', self.namespace]
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = '/tmp/%s' % template_name
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['-n', self.namespace, 'create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % selector)
if self.all_namespaces:
cmd.extend(['--all-namespaces'])
elif self.namespace:
cmd.extend(['-n', self.namespace])
cmd.extend(['-o', 'json'])
if rname:
cmd.append(rname)
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if rval.has_key('items'):
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
#pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
#pylint: disable=too-many-arguments
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = []
if oadm:
cmds = ['/usr/bin/oadm']
else:
cmds = ['/usr/bin/oc']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={'KUBECONFIG': self.kubeconfig})
stdout, stderr = proc.communicate(input_data)
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print stdout
print stderr
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds
})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {},
})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def create_file(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
path = os.path.join('/tmp', rname)
with open(path, 'w') as fds:
if ftype == 'yaml':
fds.write(yaml.dump(data, Dumper=yaml.RoundTripDumper))
elif ftype == 'json':
fds.write(json.dumps(data))
else:
fds.write(data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [path])
return path
@staticmethod
def create_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_file(item['path'], item['data'], ftype=content_type)
files.append({'name': os.path.basename(path), 'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if result.has_key('metadata') and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
contents = yaml.load(contents, yaml.RoundTripLoader)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if not user_def.has_key(key):
if debug:
print 'User data does not have key [%s]' % key
print 'User data: %s' % user_def
return False
if not isinstance(user_def[key], list):
if debug:
print 'user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key])
return False
if len(user_def[key]) != len(value):
if debug:
print "List lengths are not equal."
print "key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value))
print "user_def: %s" % user_def[key]
print "value: %s" % value
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print 'sending list - list'
print type(values[0])
print type(values[1])
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print 'list compare returned false'
return False
elif value != user_def[key]:
if debug:
print 'value should be identical'
print value
print user_def[key]
return False
# recurse on a dictionary
elif isinstance(value, dict):
if not user_def.has_key(key):
if debug:
print "user_def does not have key [%s]" % key
return False
if not isinstance(user_def[key], dict):
if debug:
print "dict returned false: not instance of dict"
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print "keys are not equal in dict"
print api_values
print user_values
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print "dict returned false"
print result
return False
# Verify each key, value pair is the same
else:
if not user_def.has_key(key) or value != user_def[key]:
if debug:
print "value not equal; user_def does not have key"
print key
print value
if user_def.has_key(key):
print user_def[key]
return False
if debug:
print 'returning true'
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self):
'''return all options as a string'''
return self.stringify()
def stringify(self):
''' return the options hash as cli params in a string '''
rval = []
for key, data in self.config_options.items():
if data['include'] \
and (data['value'] or isinstance(data['value'], int)):
rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
return rval
class YeditException(Exception):
''' Exception class for Yedit '''
pass
class Yedit(object):
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z%s/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self, filename=None, content=None, content_type='yaml', separator='.', backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict == None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@separator.setter
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key % ''.join(common_separators), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key % ''.join(common_separators), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and data.has_key(dict_key) and data[dict_key]:
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
return None
data[dict_key] = {}
data = data[dict_key]
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
return data
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
tmp_filename = self.filename + '.yedit'
try:
with open(tmp_filename, 'w') as yfd:
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except Exception as err:
raise YeditException(err.message)
os.rename(tmp_filename, self.filename)
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename == None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. %s' % err)
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError as _:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# pylint: disable=no-member,maybe-no-member
if entry.has_key(key_or_item):
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# pylint: disable=no-member,maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# pylint: disable=no-member,maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if isinstance(entry, dict):
# pylint: disable=no-member,maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type.' \
' value=[%s] [%s]' % (value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# pylint: disable=no-member,maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index != None:
ind = index
if ind != None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
#already exists, return
if ind != None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader)
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
tmp_copy.fa.set_block_style()
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader)
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
tmp_copy.fa.set_block_style()
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
# pylint: disable=too-many-instance-attributes
class PersistentVolumeClaimConfig(object):
''' Handle pvc options '''
# pylint: disable=too-many-arguments
def __init__(self,
sname,
namespace,
kubeconfig,
access_modes=None,
vol_capacity='1G'):
''' constructor for handling pvc options '''
self.kubeconfig = kubeconfig
self.name = sname
self.namespace = namespace
self.access_modes = access_modes
self.vol_capacity = vol_capacity
self.data = {}
self.create_dict()
def create_dict(self):
''' return a service as a dict '''
# version
self.data['apiVersion'] = 'v1'
# kind
self.data['kind'] = 'PersistentVolumeClaim'
# metadata
self.data['metadata'] = {}
self.data['metadata']['name'] = self.name
# spec
self.data['spec'] = {}
self.data['spec']['accessModes'] = ['ReadWriteOnce']
if self.access_modes:
self.data['spec']['accessModes'] = self.access_modes
# storage capacity
self.data['spec']['resources'] = {}
self.data['spec']['resources']['requests'] = {}
self.data['spec']['resources']['requests']['storage'] = self.vol_capacity
# pylint: disable=too-many-instance-attributes,too-many-public-methods
class PersistentVolumeClaim(Yedit):
''' Class to wrap the oc command line tools '''
access_modes_path = "spec.accessModes"
volume_capacity_path = "spec.requests.storage"
volume_name_path = "spec.volumeName"
bound_path = "status.phase"
kind = 'PersistentVolumeClaim'
def __init__(self, content):
'''RoleBinding constructor'''
super(PersistentVolumeClaim, self).__init__(content=content)
self._access_modes = None
self._volume_capacity = None
self._volume_name = None
@property
def volume_name(self):
''' volume_name property '''
if self._volume_name == None:
self._volume_name = self.get_volume_name()
return self._volume_name
@volume_name.setter
def volume_name(self, data):
''' volume_name property setter'''
self._volume_name = data
@property
def access_modes(self):
''' access_modes property '''
if self._access_modes == None:
self._access_modes = self.get_access_modes()
return self._access_modes
@access_modes.setter
def access_modes(self, data):
''' access_modes property setter'''
self._access_modes = data
@property
def volume_capacity(self):
''' volume_capacity property '''
if self._volume_capacity == None:
self._volume_capacity = self.get_volume_capacity()
return self._volume_capacity
@volume_capacity.setter
def volume_capacity(self, data):
''' volume_capacity property setter'''
self._volume_capacity = data
def get_access_modes(self):
'''get access_modes'''
return self.get(PersistentVolumeClaim.access_modes_path) or []
def get_volume_capacity(self):
'''get volume_capacity'''
return self.get(PersistentVolumeClaim.volume_capacity_path) or []
def get_volume_name(self):
'''get volume_name'''
return self.get(PersistentVolumeClaim.volume_name_path) or []
def is_bound(self):
'''return whether volume is bound'''
return self.get(PersistentVolumeClaim.bound_path) or []
#### ADD #####
def add_access_mode(self, inc_mode):
''' add an access_mode'''
if self.access_modes:
self.access_modes.append(inc_mode)<|fim▁hole|> return True
#### /ADD #####
#### Remove #####
def remove_access_mode(self, inc_mode):
''' remove an access_mode'''
try:
self.access_modes.remove(inc_mode)
except ValueError as _:
return False
return True
#### /REMOVE #####
#### UPDATE #####
def update_access_mode(self, inc_mode):
''' update an access_mode'''
try:
index = self.access_modes.index(inc_mode)
except ValueError as _:
return self.add_access_mode(inc_mode)
self.access_modes[index] = inc_mode
return True
#### /UPDATE #####
#### FIND ####
def find_access_mode(self, inc_mode):
''' find a user '''
index = None
try:
index = self.access_modes.index(inc_mode)
except ValueError as _:
return index
return index
# pylint: disable=too-many-instance-attributes
class OCPVC(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
kind = 'pvc'
# pylint allows 5
# pylint: disable=too-many-arguments
def __init__(self,
config,
verbose=False):
''' Constructor for OCVolume '''
super(OCPVC, self).__init__(config.namespace, config.kubeconfig)
self.config = config
self.namespace = config.namespace
self._pvc = None
@property
def pvc(self):
''' property function pvc'''
if not self._pvc:
self.get()
return self._pvc
@pvc.setter
def pvc(self, data):
''' setter function for yedit var '''
self._pvc = data
def bound(self):
'''return whether the pvc is bound'''
if self.pvc.get_volume_name():
return True
return False
def exists(self):
''' return whether a pvc exists '''
if self.pvc:
return True
return False
def get(self):
'''return pvc information '''
result = self._get(self.kind, self.config.name)
if result['returncode'] == 0:
self.pvc = PersistentVolumeClaim(content=result['results'][0])
elif '\"%s\" not found' % self.config.name in result['stderr']:
result['returncode'] = 0
result['results'] = [{}]
return result
def delete(self):
'''delete the object'''
return self._delete(self.kind, self.config.name)
def create(self):
'''create the object'''
return self._create_from_content(self.config.name, self.config.data)
def update(self):
'''update the object'''
# need to update the tls information and the service name
return self._replace_content(self.kind, self.config.name, self.config.data)
def needs_update(self):
''' verify an update is needed '''
if self.pvc.get_volume_name() or self.pvc.is_bound():
return False
skip = []
return not Utils.check_def_equal(self.config.data, self.pvc.yaml_dict, skip_keys=skip, debug=True)
#pylint: disable=too-many-branches
def main():
'''
ansible oc module for pvc
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
debug=dict(default=False, type='bool'),
name=dict(default=None, required=True, type='str'),
namespace=dict(default=None, required=True, type='str'),
volume_capacity=dict(default='1G', type='str'),
access_modes=dict(default=None, type='list'),
),
supports_check_mode=True,
)
pconfig = PersistentVolumeClaimConfig(module.params['name'],
module.params['namespace'],
module.params['kubeconfig'],
module.params['access_modes'],
module.params['volume_capacity'],
)
oc_pvc = OCPVC(pconfig, verbose=module.params['debug'])
state = module.params['state']
api_rval = oc_pvc.get()
#####
# Get
#####
if state == 'list':
module.exit_json(changed=False, results=api_rval['results'], state="list")
########
# Delete
########
if state == 'absent':
if oc_pvc.exists():
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a delete.')
api_rval = oc_pvc.delete()
module.exit_json(changed=True, results=api_rval, state="absent")
module.exit_json(changed=False, state="absent")
if state == 'present':
########
# Create
########
if not oc_pvc.exists():
if module.check_mode:
module.exit_json(changed=False, msg='Would have performed a create.')
# Create it here
api_rval = oc_pvc.create()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
# return the created object
api_rval = oc_pvc.get()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
########
# Update
########
if oc_pvc.pvc.is_bound() or oc_pvc.pvc.get_volume_name():
api_rval['msg'] = '##### - This volume is currently bound. Will not update - ####'
module.exit_json(changed=False, results=api_rval, state="present")
if oc_pvc.needs_update():
api_rval = oc_pvc.update()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
# return the created object
api_rval = oc_pvc.get()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
module.exit_json(changed=False, results=api_rval, state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
from ansible.module_utils.basic import *
main()<|fim▁end|> | else:
self.put(PersistentVolumeClaim.access_modes_path, [inc_mode])
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import setuptools
import os
# This will add the __version__ to the globals
with open("src/lsi/__init__.py") as f:
exec(f.read())
setuptools.setup(
name='lsi',
version=__version__,
author="Narrative Science",<|fim▁hole|> author_email="[email protected]",
url="https://github.com/NarrativeScience/lsi",
package_dir={'': 'src'},
packages=setuptools.find_packages('src'),
provides=setuptools.find_packages('src'),
install_requires=open('requirements.txt').readlines(),
entry_points={
'console_scripts': ['lsi = lsi.lsi:main']
}
)<|fim▁end|> | |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>// Copyright 2015-2017 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! General error types for use in ethcore.
use util::*;
use io::*;
use header::BlockNumber;
use basic_types::LogBloom;
use client::Error as ClientError;
use ipc::binary::{BinaryConvertError, BinaryConvertable};
use types::block_import_error::BlockImportError;
use snapshot::Error as SnapshotError;
use engines::EngineError;
use ethkey::Error as EthkeyError;
use account_provider::SignError as AccountsError;
pub use types::executed::{ExecutionError, CallError};
#[derive(Debug, PartialEq, Clone, Copy)]
/// Errors concerning transaction processing.
pub enum TransactionError {
/// Transaction is already imported to the queue
AlreadyImported,
/// Transaction is not valid anymore (state already has higher nonce)
Old,
/// Transaction has too low fee
/// (there is already a transaction with the same sender-nonce but higher gas price)
TooCheapToReplace,
/// Transaction was not imported to the queue because limit has been reached.
LimitReached,
/// Transaction's gas price is below threshold.
InsufficientGasPrice {
/// Minimal expected gas price
minimal: U256,
/// Transaction gas price
got: U256,
},
/// Transaction's gas is below currently set minimal gas requirement.
InsufficientGas {
/// Minimal expected gas
minimal: U256,
/// Transaction gas
got: U256,
},
/// Sender doesn't have enough funds to pay for this transaction
InsufficientBalance {
/// Senders balance
balance: U256,
/// Transaction cost
cost: U256,
},
/// Transactions gas is higher then current gas limit
GasLimitExceeded {
/// Current gas limit
limit: U256,
/// Declared transaction gas
got: U256,
},
/// Transaction's gas limit (aka gas) is invalid.
InvalidGasLimit(OutOfBounds<U256>),
/// Transaction sender is banned.
SenderBanned,
/// Transaction receipient is banned.
RecipientBanned,
/// Contract creation code is banned.
CodeBanned,
/// Invalid network ID given.
InvalidNetworkId,
}
impl fmt::Display for TransactionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::TransactionError::*;
let msg = match *self {
AlreadyImported => "Already imported".into(),
Old => "No longer valid".into(),
TooCheapToReplace => "Gas price too low to replace".into(),
LimitReached => "Transaction limit reached".into(),
InsufficientGasPrice { minimal, got } =>
format!("Insufficient gas price. Min={}, Given={}", minimal, got),
InsufficientGas { minimal, got } =>
format!("Insufficient gas. Min={}, Given={}", minimal, got),
InsufficientBalance { balance, cost } =>
format!("Insufficient balance for transaction. Balance={}, Cost={}",
balance, cost),
GasLimitExceeded { limit, got } =>
format!("Gas limit exceeded. Limit={}, Given={}", limit, got),
InvalidGasLimit(ref err) => format!("Invalid gas limit. {}", err),
SenderBanned => "Sender is temporarily banned.".into(),
RecipientBanned => "Recipient is temporarily banned.".into(),
CodeBanned => "Contract code is temporarily banned.".into(),
InvalidNetworkId => "Transaction of this network ID is not allowed on this chain.".into(),
};
f.write_fmt(format_args!("Transaction error ({})", msg))
}
}
#[derive(Debug, PartialEq, Clone, Copy, Eq)]
/// Errors concerning block processing.
pub enum BlockError {
/// Block has too many uncles.
TooManyUncles(OutOfBounds<usize>),
/// Extra data is of an invalid length.
ExtraDataOutOfBounds(OutOfBounds<usize>),
/// Seal is incorrect format.
InvalidSealArity(Mismatch<usize>),
/// Block has too much gas used.
TooMuchGasUsed(OutOfBounds<U256>),
/// Uncles hash in header is invalid.
InvalidUnclesHash(Mismatch<H256>),
/// An uncle is from a generation too old.
UncleTooOld(OutOfBounds<BlockNumber>),
/// An uncle is from the same generation as the block.
UncleIsBrother(OutOfBounds<BlockNumber>),
/// An uncle is already in the chain.
UncleInChain(H256),
/// An uncle has a parent not in the chain.
UncleParentNotInChain(H256),
/// State root header field is invalid.
InvalidStateRoot(Mismatch<H256>),
/// Gas used header field is invalid.
InvalidGasUsed(Mismatch<U256>),
/// Transactions root header field is invalid.
InvalidTransactionsRoot(Mismatch<H256>),
/// Difficulty is out of range; this can be used as an looser error prior to getting a definitive
/// value for difficulty. This error needs only provide bounds of which it is out.
DifficultyOutOfBounds(OutOfBounds<U256>),
/// Difficulty header field is invalid; this is a strong error used after getting a definitive
/// value for difficulty (which is provided).
InvalidDifficulty(Mismatch<U256>),
/// Seal element of type H256 (max_hash for Ethash, but could be something else for
/// other seal engines) is out of bounds.
MismatchedH256SealElement(Mismatch<H256>),
/// Proof-of-work aspect of seal, which we assume is a 256-bit value, is invalid.
InvalidProofOfWork(OutOfBounds<U256>),
/// Some low-level aspect of the seal is incorrect.
InvalidSeal,
/// Gas limit header field is invalid.
InvalidGasLimit(OutOfBounds<U256>),
/// Receipts trie root header field is invalid.
InvalidReceiptsRoot(Mismatch<H256>),
/// Timestamp header field is invalid.
InvalidTimestamp(OutOfBounds<u64>),
/// Log bloom header field is invalid.
InvalidLogBloom(Mismatch<LogBloom>),
/// Parent hash field of header is invalid; this is an invalid error indicating a logic flaw in the codebase.
/// TODO: remove and favour an assert!/panic!.
InvalidParentHash(Mismatch<H256>),
/// Number field of header is invalid.
InvalidNumber(Mismatch<BlockNumber>),
/// Block number isn't sensible.
RidiculousNumber(OutOfBounds<BlockNumber>),
/// Too many transactions from a particular address.
TooManyTransactions(Address),
/// Parent given is unknown.
UnknownParent(H256),
/// Uncle parent given is unknown.
UnknownUncleParent(H256),
/// No transition to epoch number.
UnknownEpochTransition(u64),
}
impl fmt::Display for BlockError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::BlockError::*;
let msg = match *self {
TooManyUncles(ref oob) => format!("Block has too many uncles. {}", oob),
ExtraDataOutOfBounds(ref oob) => format!("Extra block data too long. {}", oob),
InvalidSealArity(ref mis) => format!("Block seal in incorrect format: {}", mis),
TooMuchGasUsed(ref oob) => format!("Block has too much gas used. {}", oob),
InvalidUnclesHash(ref mis) => format!("Block has invalid uncles hash: {}", mis),
UncleTooOld(ref oob) => format!("Uncle block is too old. {}", oob),
UncleIsBrother(ref oob) => format!("Uncle from same generation as block. {}", oob),
UncleInChain(ref hash) => format!("Uncle {} already in chain", hash),
UncleParentNotInChain(ref hash) => format!("Uncle {} has a parent not in the chain", hash),
InvalidStateRoot(ref mis) => format!("Invalid state root in header: {}", mis),
InvalidGasUsed(ref mis) => format!("Invalid gas used in header: {}", mis),
InvalidTransactionsRoot(ref mis) => format!("Invalid transactions root in header: {}", mis),
DifficultyOutOfBounds(ref oob) => format!("Invalid block difficulty: {}", oob),
InvalidDifficulty(ref mis) => format!("Invalid block difficulty: {}", mis),
MismatchedH256SealElement(ref mis) => format!("Seal element out of bounds: {}", mis),
InvalidProofOfWork(ref oob) => format!("Block has invalid PoW: {}", oob),
InvalidSeal => "Block has invalid seal.".into(),
InvalidGasLimit(ref oob) => format!("Invalid gas limit: {}", oob),
InvalidReceiptsRoot(ref mis) => format!("Invalid receipts trie root in header: {}", mis),
InvalidTimestamp(ref oob) => format!("Invalid timestamp in header: {}", oob),
InvalidLogBloom(ref oob) => format!("Invalid log bloom in header: {}", oob),
InvalidParentHash(ref mis) => format!("Invalid parent hash: {}", mis),
InvalidNumber(ref mis) => format!("Invalid number in header: {}", mis),
RidiculousNumber(ref oob) => format!("Implausible block number. {}", oob),
UnknownParent(ref hash) => format!("Unknown parent: {}", hash),
UnknownUncleParent(ref hash) => format!("Unknown uncle parent: {}", hash),
UnknownEpochTransition(ref num) => format!("Unknown transition to epoch number: {}", num),
TooManyTransactions(ref address) => format!("Too many transactions from: {}", address),
};
f.write_fmt(format_args!("Block error ({})", msg))
}
}
#[derive(Debug, Clone, Copy, PartialEq)]
/// Import to the block queue result
pub enum ImportError {
/// Already in the block chain.
AlreadyInChain,
/// Already in the block queue.
AlreadyQueued,
/// Already marked as bad from a previous import (could mean parent is bad).
KnownBad,
}
impl fmt::Display for ImportError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let msg = match *self {
ImportError::AlreadyInChain => "block already in chain",
ImportError::AlreadyQueued => "block already in the block queue",
ImportError::KnownBad => "block known to be bad",
};
f.write_fmt(format_args!("Block import error ({})", msg))
}
}
#[derive(Debug)]
/// General error type which should be capable of representing all errors in ethcore.
pub enum Error {
/// Client configuration error.
Client(ClientError),
/// Error concerning a utility.
Util(UtilError),
/// Error concerning block processing.
Block(BlockError),
/// Unknown engine given.
UnknownEngineName(String),
/// Error concerning EVM code execution.
Execution(ExecutionError),
/// Error concerning transaction processing.
Transaction(TransactionError),
/// Error concerning block import.
Import(ImportError),
/// PoW hash is invalid or out of date.
PowHashInvalid,
/// The value of the nonce or mishash is invalid.
PowInvalid,
/// Error concerning TrieDBs
Trie(TrieError),
/// Io crate error.
Io(IoError),
/// Standard io error.
StdIo(::std::io::Error),
/// Snappy error.
Snappy(::util::snappy::InvalidInput),
/// Snapshot error.
Snapshot(SnapshotError),
/// Consensus vote error.
Engine(EngineError),
/// Ethkey error.
Ethkey(EthkeyError),
/// Account Provider error.
AccountProvider(AccountsError),
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Error::Client(ref err) => err.fmt(f),
Error::Util(ref err) => err.fmt(f),
Error::Io(ref err) => err.fmt(f),
Error::Block(ref err) => err.fmt(f),
Error::Execution(ref err) => err.fmt(f),
Error::Transaction(ref err) => err.fmt(f),
Error::Import(ref err) => err.fmt(f),
Error::UnknownEngineName(ref name) =>
f.write_fmt(format_args!("Unknown engine name ({})", name)),
Error::PowHashInvalid => f.write_str("Invalid or out of date PoW hash."),
Error::PowInvalid => f.write_str("Invalid nonce or mishash"),
Error::Trie(ref err) => err.fmt(f),
Error::StdIo(ref err) => err.fmt(f),
Error::Snappy(ref err) => err.fmt(f),
Error::Snapshot(ref err) => err.fmt(f),
Error::Engine(ref err) => err.fmt(f),
Error::Ethkey(ref err) => err.fmt(f),
Error::AccountProvider(ref err) => err.fmt(f),
}
}
}
/// Result of import block operation.
pub type ImportResult = Result<H256, Error>;
impl From<ClientError> for Error {
fn from(err: ClientError) -> Error {
match err {
ClientError::Trie(err) => Error::Trie(err),
_ => Error::Client(err)
}
}
}
impl From<TransactionError> for Error {
fn from(err: TransactionError) -> Error {
Error::Transaction(err)
}
}
impl From<ImportError> for Error {
fn from(err: ImportError) -> Error {
Error::Import(err)
}
}
impl From<BlockError> for Error {
fn from(err: BlockError) -> Error {
Error::Block(err)
}
}
impl From<ExecutionError> for Error {
fn from(err: ExecutionError) -> Error {
Error::Execution(err)
}
}
impl From<::rlp::DecoderError> for Error {
fn from(err: ::rlp::DecoderError) -> Error {
Error::Util(UtilError::Decoder(err))
}
}
impl From<UtilError> for Error {
fn from(err: UtilError) -> Error {
Error::Util(err)
}
}
impl From<IoError> for Error {
fn from(err: IoError) -> Error {
Error::Io(err)
}<|fim▁hole|>impl From<TrieError> for Error {
fn from(err: TrieError) -> Error {
Error::Trie(err)
}
}
impl From<::std::io::Error> for Error {
fn from(err: ::std::io::Error) -> Error {
Error::StdIo(err)
}
}
impl From<BlockImportError> for Error {
fn from(err: BlockImportError) -> Error {
match err {
BlockImportError::Block(e) => Error::Block(e),
BlockImportError::Import(e) => Error::Import(e),
BlockImportError::Other(s) => Error::Util(UtilError::SimpleString(s)),
}
}
}
impl From<snappy::InvalidInput> for Error {
fn from(err: snappy::InvalidInput) -> Error {
Error::Snappy(err)
}
}
impl From<SnapshotError> for Error {
fn from(err: SnapshotError) -> Error {
match err {
SnapshotError::Io(err) => Error::StdIo(err),
SnapshotError::Trie(err) => Error::Trie(err),
SnapshotError::Decoder(err) => err.into(),
other => Error::Snapshot(other),
}
}
}
impl From<EngineError> for Error {
fn from(err: EngineError) -> Error {
Error::Engine(err)
}
}
impl From<EthkeyError> for Error {
fn from(err: EthkeyError) -> Error {
Error::Ethkey(err)
}
}
impl From<AccountsError> for Error {
fn from(err: AccountsError) -> Error {
Error::AccountProvider(err)
}
}
impl<E> From<Box<E>> for Error where Error: From<E> {
fn from(err: Box<E>) -> Error {
Error::from(*err)
}
}
binary_fixed_size!(BlockError);
binary_fixed_size!(ImportError);
binary_fixed_size!(TransactionError);
// TODO: uncomment below once https://github.com/rust-lang/rust/issues/27336 sorted.
/*#![feature(concat_idents)]
macro_rules! assimilate {
($name:ident) => (
impl From<concat_idents!($name, Error)> for Error {
fn from(err: concat_idents!($name, Error)) -> Error {
Error:: $name (err)
}
}
)
}
assimilate!(FromHex);
assimilate!(BaseData);*/<|fim▁end|> | }
|
<|file_name|>video-works.server.models.js<|end_file_name|><|fim▁begin|>'use strict';
var mongoose = require('mongoose'),
Schema = mongoose.Schema;
var videoWorksSchema = new Schema ({
title: {
type: String
},
directedBy: {
type: [String]
},
editedBy: {
type: [String]
},
cast: {
type: [String]
},
videoUrl: {
type: String
},
copyright: {
type: String
},
workInfo: {
type: String
},
coverImageUrl: {
type: String
},
created: {
type: Date,
default: Date.now
}<|fim▁hole|>mongoose.model('videoWorks', videoWorksSchema);<|fim▁end|> |
});
|
<|file_name|>bala.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
# Pilas engine - A video game framework.
#
# Copyright 2010 - Hugo Ruscitti
# License: LGPLv3 (see http://www.gnu.org/licenses/lgpl.html)
#
# Website - http://www.pilas-engine.com.ar
from pilasengine.actores.actor import Actor
class Bala(Actor):
""" Representa una bala que va en línea recta. """
def __init__(self, pilas, x=0, y=0, rotacion=0, velocidad_maxima=9,
angulo_de_movimiento=90):
"""
Construye la Bala.
:param x: Posición x del proyectil.
:param y: Posición y del proyectil.
:param velocidad_maxima: Velocidad máxima que alcanzará el proyectil.
:param angulo_de_movimiento: Angulo en que se moverá el Actor..
"""
super(Bala, self).__init__(pilas=pilas, x=x, y=y)
self.imagen = pilas.imagenes.cargar('disparos/bola_amarilla.png')
self.rotacion = rotacion
self.radio_de_colision = 5<|fim▁hole|> self.hacer(pilas.comportamientos.Proyectil,
velocidad_maxima=velocidad_maxima,
aceleracion=1,
angulo_de_movimiento=angulo_de_movimiento,
gravedad=0)
self.aprender(self.pilas.habilidades.EliminarseSiSaleDePantalla)
self.cuando_se_elimina = None
def eliminar(self):
if self.cuando_se_elimina:
self.cuando_se_elimina(self)
super(Bala, self).eliminar()<|fim▁end|> | |
<|file_name|>tgen-dram-ctrl.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#<|fim▁hole|># Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Hansson
import m5
from m5.objects import *
# both traffic generator and communication monitor are only available
# if we have protobuf support, so potentially skip this test
require_sim_object("TrafficGen")
require_sim_object("CommMonitor")
# even if this is only a traffic generator, call it cpu to make sure
# the scripts are happy
cpu = TrafficGen(config_file = "tests/quick/se/70.tgen/tgen-dram-ctrl.cfg")
# system simulated
system = System(cpu = cpu, physmem = DDR3_1600_x64(),
membus = NoncoherentBus(width = 16),
clk_domain = SrcClockDomain(clock = '1GHz',
voltage_domain =
VoltageDomain()))
# add a communication monitor
system.monitor = CommMonitor()
# connect the traffic generator to the bus via a communication monitor
system.cpu.port = system.monitor.slave
system.monitor.master = system.membus.slave
# connect the system port even if it is not used in this example
system.system_port = system.membus.slave
# connect memory to the membus
system.physmem.port = system.membus.master
# -----------------------
# run simulation
# -----------------------
root = Root(full_system = False, system = system)
root.system.mem_mode = 'timing'<|fim▁end|> | |
<|file_name|>managers.py<|end_file_name|><|fim▁begin|>import re
import random
import hashlib
from django.db import models
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.<|fim▁hole|> """Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return
return profile.activate()
def create_profile(self, user):
"""Create a ``RegistrationProfile`` for a given ``User``, and return
the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a SHA1 hash,
generated from a combination of the ``User``'s username and a random
salt.
"""
salt = hashlib.sha1(str(random.random())).hexdigest()[:5]
username = user.username
if isinstance(username, unicode):
username = username.encode('utf-8')
activation_key = hashlib.sha1(salt+username).hexdigest()
return self.create(user=user, activation_key=activation_key)<|fim▁end|> | """
def activate_user(self, activation_key): |
<|file_name|>tcplog_test.go<|end_file_name|><|fim▁begin|>package tcplog
import (
"net"
"strings"
"sync"
"testing"
"github.com/stretchr/testify/assert"
"github.com/vektra/cypress"
"github.com/vektra/neko"
)
type TestFormatter struct{}
func (tf *TestFormatter) Format(m *cypress.Message) ([]byte, error) {
return []byte(m.KVString()), nil
}
func TestRead(t *testing.T) {
n := neko.Start(t)
var l *Logger
n.Setup(func() {
l = NewLogger("", false, &TestFormatter{})
})
n.It("reads a byte slice", func() {
ok := l.Read([]byte("This is a long line"))
assert.NoError(t, ok)
})
n.It("reads a string", func() {
ok := l.Read("This is a long line")
assert.NoError(t, ok)
})
n.It("reads a cypress.Message", func() {
message := NewMessage(t)
ok := l.Read(message)
assert.NoError(t, ok)
})
n.It("does not read an int", func() {
ok := l.Read(1)
assert.Error(t, ok)
})
n.Meow()
}
func TestWrite(t *testing.T) {
n := neko.Start(t)
var (
l *Logger
line = []byte("This is a log line")
)
n.Setup(func() {
l = NewLogger("", false, &TestFormatter{})
})
n.It("adds a log line to the pump", func() {
l.write(line)
select {
case pumpLine := <-l.Pump:
assert.Equal(t, line, pumpLine)
var zero uint64 = 0
assert.Equal(t, zero, l.PumpDropped)
default:
t.Fail()
}
})
n.It("adds an error line to the pump if lines were dropped", func() {
l.PumpDropped = 1
l.write(line)
select {
case <-l.Pump:
expected := "The tcplog pump dropped 1 log lines"
actual := <-l.Pump
assert.True(t, strings.Index(string(actual), expected) != -1)
var zero uint64 = 0
assert.Equal(t, zero, l.PumpDropped)
default:
t.Fail()
}
})
n.It("does not add a log line and increments dropped counter if pump is full ", func() {
l.Pump = make(chan []byte, 0)
l.write(line)
select {
case <-l.Pump:
t.Fail()
default:
var one uint64 = 1
<|fim▁hole|>
n.Meow()
}
func TestDial(t *testing.T) {
s := NewTcpServer()
go s.Run("127.0.0.1")
l := NewLogger(<-s.Address, false, &TestFormatter{})
conn, _ := l.dial()
_, ok := conn.(net.Conn)
defer conn.Close()
assert.True(t, ok, "returns a connection")
}
func TestSendLogs(t *testing.T) {
n := neko.Start(t)
var (
s *TcpServer
l *Logger
line = []byte("This is a log line")
wg sync.WaitGroup
)
n.Setup(func() {
s = NewTcpServer()
wg.Add(1)
go func() {
defer wg.Done()
s.Run("127.0.0.1")
}()
l = NewLogger(<-s.Address, false, &TestFormatter{})
wg.Add(1)
go func() {
defer wg.Done()
l.sendLogs()
}()
})
n.It("sends line from pipe to tcp server", func() {
l.Pump <- line
close(l.Pump)
wg.Wait()
select {
case message := <-s.Messages:
assert.Equal(t, string(line), string(message))
default:
t.Fail()
}
})
n.Meow()
}<|fim▁end|> | assert.Equal(t, one, l.PumpDropped)
}
}) |
<|file_name|>model_resource_groups.go<|end_file_name|><|fim▁begin|>/*
* CLOUD API
*
* An enterprise-grade Infrastructure is provided as a Service (IaaS) solution that can be managed through a browser-based \"Data Center Designer\" (DCD) tool or via an easy to use API. The API allows you to perform a variety of management tasks such as spinning up additional servers, adding volumes, adjusting networking, and so forth. It is designed to allow users to leverage the same power and flexibility found within the DCD visual tool. Both tools are consistent with their concepts and lend well to making the experience smooth and intuitive.
*
* API version: 5.0
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package ionossdk
import (
"encoding/json"
)
// ResourceGroups Resources assigned to this group.
type ResourceGroups struct {
// The resource's unique identifier
Id *string `json:"id,omitempty"`
// The type of the resource
Type *Type `json:"type,omitempty"`
// URL to the object representation (absolute path)
Href *string `json:"href,omitempty"`
// Array of items in that collection
Items *[]Resource `json:"items,omitempty"`
}
// GetId returns the Id field value
// If the value is explicit nil, the zero value for string will be returned
func (o *ResourceGroups) GetId() *string {
if o == nil {
return nil
}
return o.Id
}
// GetIdOk returns a tuple with the Id field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ResourceGroups) GetIdOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.Id, true
}
// SetId sets field value
func (o *ResourceGroups) SetId(v string) {
o.Id = &v
}
// HasId returns a boolean if a field has been set.
func (o *ResourceGroups) HasId() bool {
if o != nil && o.Id != nil {
return true
}
return false
}
// GetType returns the Type field value
// If the value is explicit nil, the zero value for Type will be returned
func (o *ResourceGroups) GetType() *Type {
if o == nil {
return nil
}
return o.Type
}
// GetTypeOk returns a tuple with the Type field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ResourceGroups) GetTypeOk() (*Type, bool) {
if o == nil {
return nil, false
}
return o.Type, true
}
// SetType sets field value
func (o *ResourceGroups) SetType(v Type) {
o.Type = &v
}
// HasType returns a boolean if a field has been set.
func (o *ResourceGroups) HasType() bool {
if o != nil && o.Type != nil {
return true
}
return false
}
// GetHref returns the Href field value
// If the value is explicit nil, the zero value for string will be returned
func (o *ResourceGroups) GetHref() *string {
if o == nil {
return nil
}
return o.Href
}
// GetHrefOk returns a tuple with the Href field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ResourceGroups) GetHrefOk() (*string, bool) {
if o == nil {
return nil, false
}
return o.Href, true
}
// SetHref sets field value
func (o *ResourceGroups) SetHref(v string) {
o.Href = &v
}
// HasHref returns a boolean if a field has been set.
func (o *ResourceGroups) HasHref() bool {
if o != nil && o.Href != nil {
return true
}
return false
}
// GetItems returns the Items field value
// If the value is explicit nil, the zero value for []Resource will be returned
func (o *ResourceGroups) GetItems() *[]Resource {
if o == nil {
return nil
}
return o.Items
}
// GetItemsOk returns a tuple with the Items field value
// and a boolean to check if the value has been set.
// NOTE: If the value is an explicit nil, `nil, true` will be returned
func (o *ResourceGroups) GetItemsOk() (*[]Resource, bool) {
if o == nil {
return nil, false
}
return o.Items, true
}
// SetItems sets field value
func (o *ResourceGroups) SetItems(v []Resource) {
o.Items = &v
}
// HasItems returns a boolean if a field has been set.
func (o *ResourceGroups) HasItems() bool {
if o != nil && o.Items != nil {
return true
}
return false
}
func (o ResourceGroups) MarshalJSON() ([]byte, error) {<|fim▁hole|> toSerialize["id"] = o.Id
}
if o.Type != nil {
toSerialize["type"] = o.Type
}
if o.Href != nil {
toSerialize["href"] = o.Href
}
if o.Items != nil {
toSerialize["items"] = o.Items
}
return json.Marshal(toSerialize)
}
type NullableResourceGroups struct {
value *ResourceGroups
isSet bool
}
func (v NullableResourceGroups) Get() *ResourceGroups {
return v.value
}
func (v *NullableResourceGroups) Set(val *ResourceGroups) {
v.value = val
v.isSet = true
}
func (v NullableResourceGroups) IsSet() bool {
return v.isSet
}
func (v *NullableResourceGroups) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableResourceGroups(val *ResourceGroups) *NullableResourceGroups {
return &NullableResourceGroups{value: val, isSet: true}
}
func (v NullableResourceGroups) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableResourceGroups) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}<|fim▁end|> | toSerialize := map[string]interface{}{}
if o.Id != nil { |
<|file_name|>jsdocTemplateClass.ts<|end_file_name|><|fim▁begin|>// @allowJs: true
// @checkJs: true<|fim▁hole|>// @Filename: templateTagOnClasses.js
/**
* @template T
* @typedef {(t: T) => T} Id
*/
/** @template T */
class Foo {
/** @typedef {(t: T) => T} Id2 */
/** @param {T} x */
constructor (x) {
this.a = x
}
/**
*
* @param {T} x
* @param {Id<T>} y
* @param {Id2} alpha
* @return {T}
*/
foo(x, y, alpha) {
return alpha(y(x))
}
}
var f = new Foo(1)
var g = new Foo(false)
f.a = g.a<|fim▁end|> | // @noEmit: true |
<|file_name|>sync-colorselector.js<|end_file_name|><|fim▁begin|>$(function() {
// column select
dw.backend.on('sync-option:base-color', sync);
function sync(args) {
var chart = args.chart,
vis = args.vis,
theme_id = chart.get('theme'),
labels = getLabels(),
$el = $('#'+args.key),
$picker = $('.base-color-picker', $el);
if (dw.theme(theme_id)) themesAreReady();
else dw.backend.one('theme-loaded', themesAreReady);
function themesAreReady() {
var theme = dw.theme(theme_id);
if (!args.option.hideBaseColorPicker) initBaseColorPicker();
if (!args.option.hideCustomColorSelector) initCustomColorSelector();
/*
* initializes the base color dropdown
*/
function initBaseColorPicker() {
var curColor = chart.get('metadata.visualize.'+args.key, 0);
if (!_.isString(curColor)) curColor = theme.colors.palette[curColor];
// update base color picker
$picker
.css('background', curColor)
.click(function() {
$picker.colorselector({
color: curColor,
palette: [].concat(theme.colors.palette, theme.colors.secondary),
change: baseColorChanged
});
});
function baseColorChanged(color) {
$picker.css('background', color);
var palIndex = theme.colors.palette.join(',')
.toLowerCase()
.split(',')
.indexOf(color);
chart.set(
'metadata.visualize.'+args.key,
palIndex < 0 ? color : palIndex
);
curColor = color;
}
}
/*
* initializes the custom color dialog
*/
function initCustomColorSelector() {
var labels = getLabels(),
sel = chart.get('metadata.visualize.custom-colors', {}),
$head = $('.custom-color-selector-head', $el),
$body = $('.custom-color-selector-body', $el),
$customColorBtn = $('.custom', $head),
$labelUl = $('.dataseries', $body),
$resetAll = $('.reset-all-colors', $body);
if (_.isEmpty(labels)) {
$head.hide();
return;
}
$head.show();
$customColorBtn.click(function(e) {
e.preventDefault();
$body.toggle();
$customColorBtn.toggleClass('active');
});
$resetAll.click(resetAllColors);
// populate custom color selector
$.each(labels, addLabelToList);
$('.select-all', $body).click(function() {
$('li', $labelUl).addClass('selected');
customColorSelectSeries();
});
$('.select-none', $body).click(function() {
$('li', $labelUl).removeClass('selected');
customColorSelectSeries();
});
$('.select-invert', $body).click(function() {
$('li', $labelUl).toggleClass('selected');
customColorSelectSeries();
});
function addLabelToList(i, lbl) {
var s = lbl;
if (_.isArray(lbl)) {
s = lbl[0];
lbl = lbl[1];
}
var li = $('<li data-series="'+s+'"></li>')
.append('<div class="color">×</div><label>'+lbl+'</label>')
.appendTo($labelUl)
.click(click);
if (sel[s]) {
$('.color', li).html('').css('background', sel[s]);
li.data('color', sel[s]);
}
function click(e) {
if (!e.shiftKey) $('li', $labelUl).removeClass('selected');
if (e.shiftKey && li.hasClass('selected')) li.removeClass('selected');
else li.addClass('selected');
customColorSelectSeries();
if (e.shiftKey) { // clear selection
if (window.getSelection) {
if (window.getSelection().empty) { // Chrome
window.getSelection().empty();
} else if (window.getSelection().removeAllRanges) { // Firefox
window.getSelection().removeAllRanges();
}
} else if (document.selection) { // IE?
document.selection.empty();
}
}
}
}
// called whenever the user selects a new series
function customColorSelectSeries() {
var li = $('li.selected', $labelUl),
$colPicker = $('.color-picker', $body),
$reset = $('.reset-color', $body);
if (li.length > 0) {
$('.info', $body).hide();<|fim▁hole|> $colPicker.click(function() {
$colPicker.colorselector({
color: li.data('color'),
palette: [].concat(theme.colors.palette, theme.colors.secondary),
change: function(color) {
$colPicker.css('background', color);
update(color);
}
});
}).css('background', li.data('color') || '#fff');
$reset.off('click').on('click', reset);
} else {
$('.info', $body).show();
$('.select', $body).hide();
}
// set a new color and save
function update(color) {
var sel = $.extend({}, chart.get('metadata.visualize.custom-colors', {}));
$('.color', li)
.css('background', color)
.html('');
li.data('color', color);
li.each(function(i, el) {
sel[$(el).data('series')] = color;
});
chart.set('metadata.visualize.custom-colors', sel);
}
// reset selected colors and save
function reset() {
var sel = $.extend({}, chart.get('metadata.visualize.custom-colors', {}));
li.data('color', undefined);
$('.color', li)
.css('background', '')
.html('×');
li.each(function(i, li) {
sel[$(li).data('series')] = '';
});
chart.set('metadata.visualize.custom-colors', sel);
$colPicker.css('background', '#fff');
}
}
function resetAllColors() {
$('li .color', $labelUl).html('×').css('background', '');
$('li', $labelUl).data('color', undefined);
$('.color-picker', $body).css('background', '#fff');
chart.set('metadata.visualize.custom-colors', {});
}
}
}
function getLabels() {
return args.option.axis && vis.axes(true)[args.option.axis] ?
_.unique(vis.axes(true)[args.option.axis].values()) :
(vis.colorKeys ? vis.colorKeys() : vis.keys());
}
}
});<|fim▁end|> | $('.select', $body).show(); |
<|file_name|>jewels-and-stones_test.go<|end_file_name|><|fim▁begin|>package leetcode
import "testing"
func TestNumJewelsInStones(t *testing.T) {
if numJewelsInStones("aA", "aAAbbbb") != 3 {
t.Fatal()
}<|fim▁hole|><|fim▁end|> | if numJewelsInStones("z", "ZZ") != 0 {
t.Fatal()
}
} |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust.
// Copyright 2014 Ben Gamari <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use std::gc::Gc;
use syntax::ast;
use syntax::ast::P;
use syntax::ext::base::ExtCtxt;
use node;
mod utils;
mod setter;
mod getter;
mod union;
mod register;
mod accessors;
pub struct Builder {
items: Vec<Gc<ast::Item>>,
}
impl Builder {
pub fn new() -> Builder {<|fim▁hole|> Builder {items: Vec::new()}
}
pub fn emit_items<'a>(&mut self, cx: &'a ExtCtxt, reg: Gc<node::Reg>)
-> Vec<P<ast::Item>> {
node::visit_reg(&*reg, &mut setter::BuildSetters::new(self, cx));
node::visit_reg(&*reg, &mut getter::BuildGetters::new(self, cx));
node::visit_reg(&*reg, &mut register::BuildRegStructs::new(self, cx));
node::visit_reg(&*reg, &mut union::BuildUnionTypes::new(self, cx));
node::visit_reg(&*reg, &mut accessors::BuildAccessors::new(self, cx));
self.items.clone()
}
pub fn push_item(&mut self, item: Gc<ast::Item>) {
self.items.push(item);
}
}<|fim▁end|> | |
<|file_name|>quadview.cpp<|end_file_name|><|fim▁begin|>/***********************************************************************************
* *
* Voreen - The Volume Rendering Engine *
* *
* Copyright (C) 2005-2013 University of Muenster, Germany. *
* Visualization and Computer Graphics Group <http://viscg.uni-muenster.de> *
* For a list of authors please refer to the file "CREDITS.txt". *
* *
* This file is part of the Voreen software package. Voreen is free software: *
* you can redistribute it and/or modify it under the terms of the GNU General *
* Public License version 2 as published by the Free Software Foundation. *
* *
* Voreen is distributed in the hope that it will be useful, but WITHOUT ANY *
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR *
* A PARTICULAR PURPOSE. See the GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License in the file *
* "LICENSE.txt" along with this file. If not, see <http://www.gnu.org/licenses/>. *
* *
* For non-commercial academic use see the license exception specified in the file *
* "LICENSE-academic.txt". To get information about commercial licensing please *
* contact the authors. *
* *
***********************************************************************************/
#include "quadview.h"
namespace voreen {
QuadView::QuadView()
: RenderProcessor()
, showGrid_("showGrid", "Show grid", true)
, gridColor_("gridColor", "Grid color", tgt::vec4(1.0f, 1.0f, 1.0f, 1.0f))
, maximized_("maximized", "Maximized sub-view", 0, 0, 4)
, maximizeOnDoubleClick_("maximizeOnDoubleClick", "Maximize on double click", true)
, maximizeEventProp_("mouseEvent.maximize", "Maximize Event", this, &QuadView::toggleMaximization,<|fim▁hole|> , inport1_(Port::INPORT, "inport1", "Image1 Input", false, Processor::INVALID_RESULT, RenderPort::RENDERSIZE_ORIGIN)
, inport2_(Port::INPORT, "inport2", "Image2 Input", false, Processor::INVALID_RESULT, RenderPort::RENDERSIZE_ORIGIN)
, inport3_(Port::INPORT, "inport3", "Image3 Input", false, Processor::INVALID_RESULT, RenderPort::RENDERSIZE_ORIGIN)
, inport4_(Port::INPORT, "inport4", "Image4 Input", false, Processor::INVALID_RESULT, RenderPort::RENDERSIZE_ORIGIN)
, currentPort_(-1)
, isDragging_(false)
{
gridColor_.setViews(Property::COLOR);
addProperty(showGrid_);
addProperty(gridColor_);
addProperty(maximized_);
maximized_.setVisible(false);
addProperty(maximizeOnDoubleClick_);
addEventProperty(maximizeEventProp_);
addEventProperty(mouseMoveEventProp_);
addPort(outport_);
addPort(inport1_);
addPort(inport2_);
addPort(inport3_);
addPort(inport4_);
outport_.onSizeReceiveChange<QuadView>(this, &QuadView::portSizeReceiveChanged);
}
QuadView::~QuadView() {
}
Processor* QuadView::create() const {
return new QuadView();
}
bool QuadView::isReady() const {
if (!outport_.isReady())
return false;
if (!inport1_.isReady() && !inport2_.isReady() && !inport3_.isReady() && !inport4_.isReady())
return false;
if(maximized_.get() != 0) {
switch(maximized_.get()) {
case 1: if(!inport1_.isReady())
return false;
break;
case 2: if(!inport2_.isReady())
return false;
break;
case 3: if(!inport3_.isReady())
return false;
break;
case 4: if(!inport4_.isReady())
return false;
break;
}
}
return true;
}
void QuadView::process() {
if (maximized_.get() == 0) {
MatStack.matrixMode(tgt::MatrixStack::MODELVIEW);
outport_.activateTarget();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
if (inport1_.isReady()) {
inport1_.bindColorTexture(GL_TEXTURE0);
inport1_.getColorTexture()->enable();
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
MatStack.translate(-0.5f, 0.5f, 0.0f);
MatStack.scale(0.5f, 0.5f, 1.0f);
glDepthFunc(GL_ALWAYS);
renderQuad();
glDepthFunc(GL_LESS);
MatStack.loadIdentity();
inport1_.getColorTexture()->disable();
}
if (inport2_.isReady()) {
inport2_.bindColorTexture(GL_TEXTURE0);
inport2_.getColorTexture()->enable();
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
MatStack.translate(0.5f, 0.5f, 0.0f);
MatStack.scale(0.5f, 0.5f, 1.0f);
glDepthFunc(GL_ALWAYS);
renderQuad();
glDepthFunc(GL_LESS);
MatStack.loadIdentity();
inport2_.getColorTexture()->disable();
}
if (inport3_.isReady()) {
inport3_.bindColorTexture(GL_TEXTURE0);
inport3_.getColorTexture()->enable();
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
MatStack.translate(-0.5f, -0.5f, 0.0f);
MatStack.scale(0.5f, 0.5f, 1.0f);
glDepthFunc(GL_ALWAYS);
renderQuad();
glDepthFunc(GL_LESS);
MatStack.loadIdentity();
inport3_.getColorTexture()->disable();
}
if (inport4_.isReady()) {
inport4_.bindColorTexture(GL_TEXTURE0);
inport4_.getColorTexture()->enable();
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
MatStack.translate(0.5f, -0.5f, 0.0f);
MatStack.scale(0.5f, 0.5f, 1.0f);
glDepthFunc(GL_ALWAYS);
renderQuad();
glDepthFunc(GL_LESS);
MatStack.loadIdentity();
inport4_.getColorTexture()->disable();
}
glActiveTexture(GL_TEXTURE0);
if(showGrid_.get()) {
glDepthFunc(GL_ALWAYS);
glColor4f(gridColor_.get().r, gridColor_.get().g, gridColor_.get().b, gridColor_.get().a);
glBegin(GL_LINES);
glVertex2f(-1.0f, 0.0f);
glVertex2f(1.0f, 0.0f);
glVertex2f(0.0f, 1.0f);
glVertex2f(0.0f, -1.0f);
glEnd();
glDepthFunc(GL_LESS);
}
outport_.deactivateTarget();
MatStack.matrixMode(tgt::MatrixStack::MODELVIEW);
MatStack.loadIdentity();
LGL_ERROR;
}
else {
//maximized:
switch(maximized_.get()) {
case 1: if(!inport1_.isReady())
return;
outport_.activateTarget();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
inport1_.bindColorTexture(GL_TEXTURE0);
inport1_.getColorTexture()->enable();
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glDepthFunc(GL_ALWAYS);
renderQuad();
glDepthFunc(GL_LESS);
MatStack.loadIdentity();
inport1_.getColorTexture()->disable();
outport_.deactivateTarget();
break;
case 2: if(!inport2_.isReady())
return;
outport_.activateTarget();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
inport2_.bindColorTexture(GL_TEXTURE0);
inport2_.getColorTexture()->enable();
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glDepthFunc(GL_ALWAYS);
renderQuad();
glDepthFunc(GL_LESS);
MatStack.loadIdentity();
inport2_.getColorTexture()->disable();
outport_.deactivateTarget();
break;
case 3: if(!inport3_.isReady())
return;
outport_.activateTarget();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
inport3_.bindColorTexture(GL_TEXTURE0);
inport3_.getColorTexture()->enable();
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glDepthFunc(GL_ALWAYS);
renderQuad();
glDepthFunc(GL_LESS);
MatStack.loadIdentity();
inport3_.getColorTexture()->disable();
outport_.deactivateTarget();
break;
case 4: if(!inport4_.isReady())
return;
outport_.activateTarget();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
inport4_.bindColorTexture(GL_TEXTURE0);
inport4_.getColorTexture()->enable();
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
glDepthFunc(GL_ALWAYS);
renderQuad();
glDepthFunc(GL_LESS);
MatStack.loadIdentity();
inport4_.getColorTexture()->disable();
outport_.deactivateTarget();
break;
}
}
}
void QuadView::initialize() throw (tgt::Exception) {
RenderProcessor::initialize();
updateSizes();
}
void QuadView::portSizeReceiveChanged() {
updateSizes();
}
void QuadView::updateSizes() {
if (outport_.getSize() == tgt::ivec2(0))
return;
if(maximized_.get() == 0) {
tgt::ivec2 subsize = outport_.getReceivedSize() / 2;
inport1_.requestSize(subsize);
inport2_.requestSize(subsize);
inport3_.requestSize(subsize);
inport4_.requestSize(subsize);
}
else {
switch(maximized_.get()) {
case 1: inport1_.requestSize(outport_.getReceivedSize());
break;
case 2: inport2_.requestSize(outport_.getReceivedSize());
break;
case 3: inport3_.requestSize(outport_.getReceivedSize());
break;
case 4: inport4_.requestSize(outport_.getReceivedSize());
break;
default:;
}
}
}
void QuadView::mouseMove(tgt::MouseEvent* e) {
e->accept();
int prevCurrenPort = currentPort_;
if (maximized_.get() == 0) {
if ((e->action() & tgt::MouseEvent::EXIT) == tgt::MouseEvent::EXIT)
currentPort_ = -1;
if ((e->action() & tgt::MouseEvent::PRESSED) == tgt::MouseEvent::PRESSED)
isDragging_ = true;
if ((e->action() & tgt::MouseEvent::RELEASED) == tgt::MouseEvent::RELEASED)
isDragging_ = false;
if (!isDragging_) {
if (e->y() < (e->viewport().y/2)) {
if(e->x() < (e->viewport().x/2)) {
currentPort_ = 1;
}
else {
currentPort_ = 2;
}
}
else {
if(e->x() < (e->viewport().x/2)) {
currentPort_ = 3;
}
else {
currentPort_ = 4;
}
}
}
if (currentPort_ != prevCurrenPort) {
tgt::MouseEvent leaveEvent(1, 1, tgt::MouseEvent::EXIT, e->modifiers(), e->button(), e->viewport() / 2);
tgt::MouseEvent enterEvent(1, 1, tgt::MouseEvent::ENTER, e->modifiers(), e->button(), e->viewport() / 2);
leaveEvent.ignore();
enterEvent.ignore();
switch(prevCurrenPort) {
case 1:
inport1_.distributeEvent(&leaveEvent);
break;
case 2:
inport2_.distributeEvent(&leaveEvent);
break;
case 3:
inport3_.distributeEvent(&leaveEvent);
break;
case 4:
inport4_.distributeEvent(&leaveEvent);
break;
}
switch(currentPort_) {
case 1:
inport1_.distributeEvent(&enterEvent);
break;
case 2:
inport2_.distributeEvent(&enterEvent);
break;
case 3:
inport3_.distributeEvent(&enterEvent);
break;
case 4:
inport4_.distributeEvent(&enterEvent);
break;
}
}
tgt::MouseEvent moveEvent(e->x() % (e->viewport().x/2), e->y() % (e->viewport().y/2), tgt::MouseEvent::MOTION, e->modifiers(), e->button(), e->viewport() / 2);
moveEvent.ignore();
switch(currentPort_) {
case 1:
inport1_.distributeEvent(&moveEvent);
break;
case 2:
inport2_.distributeEvent(&moveEvent);
break;
case 3:
inport3_.distributeEvent(&moveEvent);
break;
case 4:
inport4_.distributeEvent(&moveEvent);
break;
}
}
else {
switch(maximized_.get()) {
case 1: inport1_.distributeEvent(e);
break;
case 2: inport2_.distributeEvent(e);
break;
case 3: inport3_.distributeEvent(e);
break;
case 4: inport4_.distributeEvent(e);
break;
default:;
}
}
}
void QuadView::invalidate(int inv) {
RenderProcessor::invalidate(inv);
}
void QuadView::onEvent(tgt::Event* e) {
tgt::TouchEvent* te = dynamic_cast<tgt::TouchEvent*>(e);
if(te) {
distributeTouchEvent(te);
return;
}
tgt::MouseEvent* me = dynamic_cast<tgt::MouseEvent*>(e);
if (me && !(maximized_.get() == 0 && mouseMoveEventProp_.accepts(me)) && !(maximizeEventProp_.accepts(me) && maximizeOnDoubleClick_.get())) {
distributeMouseEvent(me);
return;
}
RenderProcessor::onEvent(e);
}
// TODO For now, Touch Events are sent to inports only if all touchpoints are located within a single subsection of the viewport
void QuadView::distributeTouchEvent(tgt::TouchEvent* te) {
if (maximized_.get() == 0) {
const std::deque<tgt::TouchPoint>& tps = te->touchPoints();
const tgt::TouchPoint& first = tps.front();
tgt::vec2 outSize = outport_.getSize();
int section = first.pos().y < outSize.y / 2 ? (first.pos().x < outSize.x / 2 ? 0 : 1) : (first.pos().x < outSize.x / 2 ? 2 : 3);
for(std::deque<tgt::TouchPoint>::const_iterator it = tps.begin() + 1; it != tps.end(); it++) {
const tgt::TouchPoint& tp = *it;
// TODO different sections -> handle instead of doing nothing
if(section != (tp.pos().y < outSize.y / 2 ? (tp.pos().x < outSize.x / 2 ? 0 : 1) : (tp.pos().x < outSize.x / 2 ? 2 : 3)))
return;
}
RenderPort* inport = (section == 0 ? &inport1_ : (section == 1 ? &inport2_ : (section == 2 ? &inport3_ : &inport4_)));
tgt::vec2 offset = section == 0 ? tgt::vec2(0.f) : (section == 1 ? tgt::vec2(-outSize.x / 2.f, 0.f) : (section == 2 ? tgt::vec2(0.f, -outSize.y / 2.f) : -outSize / 2.f));
std::deque<tgt::TouchPoint> tpsTrafo;
for(std::deque<tgt::TouchPoint>::const_iterator it = tps.begin(); it != tps.end(); it++) {
const tgt::TouchPoint& tp = *it;
tgt::TouchPoint newTP = tp;
newTP.setPos(tp.pos() + offset);
tpsTrafo.push_back(newTP);
}
tgt::TouchEvent nte = tgt::TouchEvent(tgt::Event::MODIFIER_NONE, te->touchPointStates(), te->deviceType(), tpsTrafo);
nte.ignore(); // accepted is set to true by default
inport->distributeEvent(&nte);
if(nte.isAccepted())
te->accept();
}
else {
switch(maximized_.get()) {
case 1: inport1_.distributeEvent(te);
break;
case 2: inport2_.distributeEvent(te);
break;
case 3: inport3_.distributeEvent(te);
break;
case 4: inport4_.distributeEvent(te);
break;
default:;
}
}
}
void QuadView::distributeMouseEvent(tgt::MouseEvent* me) {
if (maximized_.get() == 0) {
if (me->y() < (me->viewport().y / 2)) {
if (me->x() < (me->viewport().x / 2)) {
tgt::MouseEvent newme(me->x(), me->y(), me->action(), me->modifiers(), me->button(), me->viewport() / 2);
newme.ignore(); // accepted is set to true by default
inport1_.distributeEvent(&newme);
if (newme.isAccepted())
me->accept();
}
else {
tgt::MouseEvent newme(me->x() - (me->viewport().x / 2), me->y(), me->action(), me->modifiers(), me->button(), me->viewport() / 2);
newme.ignore();
inport2_.distributeEvent(&newme);
if (newme.isAccepted())
me->accept();
}
}
else {
if (me->x() < (me->viewport().x / 2)) {
tgt::MouseEvent newme(me->x(), me->y() - (me->viewport().y / 2), me->action(), me->modifiers(), me->button(), me->viewport() / 2);
newme.ignore();
inport3_.distributeEvent(&newme);
if (newme.isAccepted())
me->accept();
}
else {
tgt::MouseEvent newme(me->x() - (me->viewport().x / 2), me->y() - (me->viewport().y / 2), me->action(), me->modifiers(), me->button(), me->viewport() / 2);
newme.ignore();
inport4_.distributeEvent(&newme);
if (newme.isAccepted())
me->accept();
}
}
}
else {
switch(maximized_.get()) {
case 1: inport1_.distributeEvent(me);
break;
case 2: inport2_.distributeEvent(me);
break;
case 3: inport3_.distributeEvent(me);
break;
case 4: inport4_.distributeEvent(me);
break;
default:;
}
}
}
void QuadView::toggleMaximization(tgt::MouseEvent* me) {
if (maximizeOnDoubleClick_.get()) {
if (maximized_.get() == 0) {
if (me->y() < (me->viewport().y / 2)) {
if (me->x() < (me->viewport().x / 2)) {
maximized_.set(1);
}
else {
maximized_.set(2);
}
}
else {
if (me->x() < (me->viewport().x / 2)) {
maximized_.set(3);
}
else {
maximized_.set(4);
}
}
}
else {
maximized_.set(0);
}
updateSizes();
me->accept();
}
}
} // namespace voreen<|fim▁end|> | tgt::MouseEvent::MOUSE_BUTTON_LEFT, tgt::MouseEvent::DOUBLECLICK, tgt::MouseEvent::MODIFIER_NONE)
, mouseMoveEventProp_("mouseEvent.move", "Move Event", this, &QuadView::mouseMove,
tgt::MouseEvent::MOUSE_BUTTON_NONE, tgt::MouseEvent::MOTION | tgt::MouseEvent::CLICK | tgt::MouseEvent::ENTER_EXIT, tgt::MouseEvent::MODIFIER_NONE)
, outport_(Port::OUTPORT, "outport", "Image Output", true, Processor::INVALID_RESULT, RenderPort::RENDERSIZE_RECEIVER) |
<|file_name|>markers.py<|end_file_name|><|fim▁begin|>#Copyright ReportLab Europe Ltd. 2000-2004
#see license.txt for license details
#history http://www.reportlab.co.uk/cgi-bin/viewcvs.cgi/public/reportlab/trunk/reportlab/graphics/charts/markers.py
"""
This modules defines a collection of markers used in charts.
The make* functions return a simple shape or a widget as for
the smiley.
"""
__version__=''' $Id: markers.py 2385 2004-06-17 15:26:05Z rgbecker $ '''
from reportlab.lib import colors
from reportlab.graphics.shapes import Rect, Line, Circle, Polygon
from reportlab.graphics.widgets.signsandsymbols import SmileyFace
def makeEmptySquare(x, y, size, color):
"Make an empty square marker."
d = size/2.0
rect = Rect(x-d, y-d, 2*d, 2*d)
rect.strokeColor = color
rect.fillColor = None
return rect
def makeFilledSquare(x, y, size, color):<|fim▁hole|> d = size/2.0
rect = Rect(x-d, y-d, 2*d, 2*d)
rect.strokeColor = color
rect.fillColor = color
return rect
def makeFilledDiamond(x, y, size, color):
"Make a filled diamond marker."
d = size/2.0
poly = Polygon((x-d,y, x,y+d, x+d,y, x,y-d))
poly.strokeColor = color
poly.fillColor = color
return poly
def makeEmptyCircle(x, y, size, color):
"Make a hollow circle marker."
d = size/2.0
circle = Circle(x, y, d)
circle.strokeColor = color
circle.fillColor = colors.white
return circle
def makeFilledCircle(x, y, size, color):
"Make a hollow circle marker."
d = size/2.0
circle = Circle(x, y, d)
circle.strokeColor = color
circle.fillColor = color
return circle
def makeSmiley(x, y, size, color):
"Make a smiley marker."
d = size
s = SmileyFace()
s.fillColor = color
s.x = x-d
s.y = y-d
s.size = d*2
return s<|fim▁end|> | "Make a filled square marker."
|
<|file_name|>you.create-user.js<|end_file_name|><|fim▁begin|>if (Meteor.isServer) {
var
//// A lookup-table whose keys are generated each time an ‘you.register’ form is rendered using the `babelslug()` method.
//// The key is a babelslug, followed by hyphen, followed by a Meteor connection ID (like a session ID for anon users).
//// The value is the unix timestamp in milliseconds, which allows us to clear out old and unused babelslugs.
//// Two examples are shown here:
recentBabelslugs = { // @todo for a multi-servo project, move this functionality to a shared mongoDB collection
// 'MagentaMouse-KukNJw4d4vjGzzrQa': 1409341347912,
// 'BlueChessCat-YYJWMWTPq7RFWdKr6': 1409341399283
}
//// Clear out stale elements in the `recentBabelslugs` lookup-table.
, recentBabelslugsHousekeeping = function () {
var key
, now = Date.now()
;
for (key in recentBabelslugs) {
if (15 * 60 * 1000 < now - recentBabelslugs[key]) { // allow a user 15 minutes to fill in the registration form
delete recentBabelslugs[key];
}
}
}
//// Check how many times a given `username` exists in the user database. If all’s well, this should only ever return `0` or `1`.
, usernameCount = function (username) {
return Meteor.users.find({ 'profile.username': username }).count();
}
//// BabelSlug, from Rich Plastow’s work, 2014-02-09.
, ucs2 = [
[ // animal
{ en:'slug' ,es:'babosa' ,ru:'\u0441\u043B\u0438\u0437\u043D\u044F\u043A',fr:'limace' ,zh:'\u86DE\u8753',ar:'\u064A\u0631\u0642\u0627\u0646\u0629' }
, { en:'mouse' ,es:'rat\u00F3n',ru:'\u043C\u044B\u0448\u044C' ,fr:'souris' ,zh:'\u9F20\u6807',ar:'\u0641\u0623\u0631' }
, { en:'cow' ,es:'vaca' ,ru:'\u043A\u043E\u0440\u043E\u0432\u0430' ,fr:'vache' ,zh:'\u725B' ,ar:'\u0628\u0642\u0631\u0629' }
, { en:'cat' ,es:'gato' ,ru:'\u043A\u043E\u0448\u043A\u0430' ,fr:'chat' ,zh:'\u732B' ,ar:'\u0642\u0637' }
, { en:'rabbit',es:'conejo' ,ru:'\u043A\u0440\u043E\u043B\u0438\u043A' ,fr:'lapin' ,zh:'\u5154' ,ar:'\u0623\u0631\u0646\u0628' }
, { en:'deer' ,es:'ciervo' ,ru:'\u043E\u043B\u0435\u043D\u044C' ,fr:'cerf' ,zh:'\u9E7F' ,ar:'\u0623\u064A\u0644' }
, { en:'bear' ,es:'oso' ,ru:'\u043C\u0435\u0434\u0432\u0435\u0434\u044C',fr:'ours' ,zh:'\u718A' ,ar:'\u062F\u064F\u0628\u0651' }
, { en:'frog' ,es:'rana' ,ru:'\u043B\u044F\u0433\u0443\u0448\u043A\u0430',fr:'grenouille',zh:'\u9752\u86D9',ar:'\u0636\u0641\u062F\u0639' }
]
// , [ // texture
// { en:'-' ,es:'-' ,ru:'-' ,fr:'-' ,zh:'-' ,ar:'-' }<|fim▁hole|> // , { en:'dotted' ,es:'punteado',ru:[1087,1091,1085,1082,1090,1080,1088,1085,1099,1081],fr:'pointill\u00E9',zh:[26001,28857],ar:[1605,1606,1602,1591] }
// , { en:'striped',es:'rayas' ,ru:[1087,1086,1083,1086,1089,1072,1090,1099,1081] ,fr:'ray\u00E9' ,zh:[26465,32441],ar:[1605,1602,1604,1605] }
// , { en:'chess' ,es:'ajedrez' ,ru:[1096,1072,1093,1084,1072,1090,1099] ,fr:'\u00E9checs' ,zh:[26827] ,ar:[1588,1591,1585,1606,1580] }
// ]
, [ // color1
{ en:'-' ,es:'-' ,ru:'-' ,fr:'-' ,zh:'-' ,ar:'-' }
, { en:'red' ,es:'rojo' ,ru:[1082,1088,1072,1089,1085,1099,1081] ,fr:'rouge' ,zh:[32418] ,ar:[1571,1581,1605,1585] }
, { en:'orange' ,es:'naranja' ,ru:[1086,1088,1072,1085,1078,1077,1074,1099,1081],fr:'orange' ,zh:[27225] ,ar:[1575,1604,1576,1585,1578,1602,1575,1604,1610] }
, { en:'yellow' ,es:'amarillo',ru:[1078,1077,1083,1090,1099,1081] ,fr:'jaune' ,zh:[40644] ,ar:[1571,1589,1601,1585] }
, { en:'green' ,es:'verde' ,ru:[1079,1077,1083,1077,1085,1099,1081] ,fr:'vert' ,zh:[32511] ,ar:[1571,1582,1590,1585] }
// , { en:'cyan' ,es:'cian' ,ru:[1075,1086,1083,1091,1073,1086,1081] ,fr:'cyan' ,zh:[38738] ,ar:[1587,1605,1575,1608,1610] }
, { en:'blue' ,es:'azul' ,ru:[1089,1080,1085,1080,1081] ,fr:'bleu' ,zh:[34013] ,ar:[1571,1586,1585,1602] }
, { en:'purple' ,es:'magenta' ,ru:[1087,1091,1088,1087,1091,1088,1085,1099,1081],fr:'magenta',zh:[27915,32418],ar:[1571,1585,1580,1608,1575,1606,1610] } // @todo translate purple (these are for magenta)
]
, [ // emotion @todo convert remaining languages from 2014-Work/BabelSlug/README.md
{ en:'-' ,es:'-' ,fr:'-' }
, { en:'happy' ,es:'feliz' ,fr:'heureux' }
// , { en:'sad' ,es:'triste' ,fr:'triste' }
, { en:'laughing' ,es:'risa' ,fr:'rire' }
, { en:'sleepy' ,es:'soñoliento' ,fr:'somnolent' }
, { en:'surprised',es:'sorprendido',fr:'étonné' }
// , { en:'playful' ,es:'juguetón' ,fr:'espiègle' }
, { en:'confused' ,es:'confundido' ,fr:'embrouillé' }
]
]
//// Prepare an empty cache, which may contain HTML entities converted from `ucs2`, if required.
, html = (function () {
var i, j, l, m ,html = {};
for (i=0, l=ucs2.length; i<l; i++) {
html[i] = [];
for (j=0, m=ucs2[i].length; j<m; j++) {
html[i][j] = {};
}
}
return html
} ())
// !!!! `numberToPhrase()`, then `i` becomes `num`, and then `ucs2` becomes an array, and `i` is iterator !!!!
, numberToPhrase = function (number, options) {
var key, len, rem, word
, num = ~~(number < 0 ? -number : number) // ensure `num` is a positive whole number, or zero
, opt = options || {}
, l18n = ucs2[0][0][opt.l18n] ? opt.l18n : 'en' // set to English if the localization option is invalid or unspecified
, phrase = []
;
for (key in ucs2) {
len = ucs2[key].length;
rem = num % len;
if (! opt.format || 'ucs2' === opt.format) {
word = ucs2[key][rem][l18n];
// console.log('key ' + key + ' format ' + 'ucs2' + ' word ' + word);
} else if ('html' === opt.format) {
word = html[key][rem][l18n];
if (! word) {
word = html[key][rem][l18n] = ucs2[key][rem][l18n].replace(/[\u00A0-\u2666]/g, function(c) {
return '&#' + c.charCodeAt(0) + ';';
});
// console.log('ADD TO CACHE key ' + key + ' format ' + 'html' + ' word ' + word);
} else {
// console.log('GET IN CACHE key ' + key + ' format ' + 'html' + ' word ' + word);
}
} else {
// @todo format error
}
if ('-' !== word) { phrase.unshift(word); }
num = ~~(num / len); // prepare for the next part in `ucs2`
if (0 === num) { break; } // low numbers don't need to step through every section in `ucs2`
}
return phrase.join('-');
}
;
Meteor.methods({
babelslug: function () {
var i, key, babelslug;
if (this.isSimulation) { return; } // clientside stub (return value is ignored)
//// Housekeeping on the `recentBabelslugs` lut.
recentBabelslugsHousekeeping();
//// Try, 200 times, to find a username which has not been taken. @todo this is quite brute-force ... can we come up with a more elegant solution?
for (i=200; i>0; i--) {
babelslug = numberToPhrase( Math.floor(Math.random() * 50000) );
if ( 3 === babelslug.split('-').length && ! recentBabelslugs[babelslug] && ! usernameCount(babelslug) ) { break; } // we are only using three-part usernames at present
}
if (! i) { throw new Meteor.Error(500, "Cannot generate a username! Please email " + Config.about.webmaster); } // @todo check `(! i)` can ever be truthy
recentBabelslugs[babelslug] = { // later, when the form is submitted, we will check that the babelslug value is expected
now: Date.now() // allows `recentBabelslugsHousekeeping()` to find stale babelslugs
, cid: this.connection.id
}
return babelslug + '_' + this.connection.id; // @todo is `this.connection.id` ever some unexpected value, for example `null`?
}
});
Accounts.onCreateUser(function (options, user) {
var babelslug, connectionId;
//// Housekeeping on the `recentBabelslugs` lut.
recentBabelslugsHousekeeping();
//// Validate the value of `<input id="AT_field_you-babelslug" ...>`.
babelslug = options.profile['you-babelslug'].split('_')[0];
connectionId = options.profile['you-babelslug'].split('_')[1];
if (! babelslug || ! connectionId) {
throw new Meteor.Error(500, "The ‘username’ field is invalid."); // @todo better error-code than 500?
}
if (! recentBabelslugs[babelslug]) {
throw new Meteor.Error(500, "Your registration form expired after 15 minutes. Please refresh the browser and try again."); // The ‘username’ value is unexpected, so this may actually be a hack attempt
}
if ( usernameCount(babelslug) ) {
throw new Meteor.Error(500, "The ‘username’ is already in use."); // prevent two `Meteor.user` records having the same username, which could happen on a multi-servo project, until we change `recentBabelslugs` to a shared mongoDB collection @todo
}
//// Remove the babelslug, as it’s not needed any more.
delete recentBabelslugs[babelslug];
//// Record the username (‘[email protected]’ gets a special username).
options.profile = options.profile || {};
options.profile.username = '[email protected]' === options.email ? 'red-cat' : babelslug;
//// Record other registration data.
if (options.profile['you-age-group-code']) { options.profile.agc = options.profile['you-age-group-code']; }
if (options.profile['you-based-in-code']) { options.profile.bic = options.profile['you-based-in-code']; }
if (options.profile['you-hear-about-code']) { options.profile.hac = options.profile['you-hear-about-code']; }
if (options.profile['you-hear-about-text']) { options.profile.hat = options.profile['you-hear-about-text']; }
if (options.profile['you-newsletter-opt']) { options.profile.nlo = options.profile['you-newsletter-opt']; }
//// The registration is valid, so record it as usual. http://docs.meteor.com/#accounts_oncreateuser
user.profile = options.profile;
return user;
});
}<|fim▁end|> | |
<|file_name|>pagerank.rs<|end_file_name|><|fim▁begin|>// extern crate rand;
// extern crate time;
// extern crate columnar;
// extern crate timely;
// extern crate differential_dataflow;
//
// use std::mem;
//
// use std::hash::Hash;
// use timely::example_shared::*;
// use timely::example_shared::operators::*;
// use timely::communication::ThreadCommunicator;
//
// use rand::{Rng, SeedableRng, StdRng};
//
// use differential_dataflow::Collection;
// use differential_dataflow::collection_trace::lookup::UnsignedInt;
// use differential_dataflow::collection_trace::LeastUpperBound;
//
// use differential_dataflow::operators::*;
//
fn main() {
//
// let start = time::precise_time_s();
// let start2 = start.clone();
// let mut computation = GraphRoot::new(ThreadCommunicator);
//
// let mut input = computation.subcomputation(|builder| {
//
// let (input, mut edges) = builder.new_input();
//
// pagerank(&edges).consolidate(|||x| *x)
// .inspect_batch(move |t, x| { println!("{}s:\tobserved at {:?}: {:?} changes",
// ((time::precise_time_s() - start2)) - (t.inner as f64),
// t, x.len()) });
//
// input
// });
//
// // let mut nodes = BufReader::new(File::open("/Users/mcsherry/Projects/Datasets/twitter-dedup.offsets").unwrap());
// // let mut edges = BufReader::new(File::open("/Users/mcsherry/Projects/Datasets/twitter-dedup.targets").unwrap());
//
// // let mut sent = 0;
// // let mut buffer = Vec::new();
// // let mut offset = nodes.read_u64::<LittleEndian>().unwrap();
// // assert!(offset == 0);
// // for node in (0..60000000) {
// // let read = nodes.read_u64::<LittleEndian>().unwrap();
// // for _ in 0.. (read - offset) {
// // let edge = edges.read_u32::<LittleEndian>().unwrap();
// // if node % 2 == 0 && edge % 2 == 0 {
// // buffer.push(((node / 2 as u32, edge / 2 as u32), 1));
// // if buffer.len() > 1000 {
// // sent += buffer.len();
// // input.send_at(0, buffer.drain(..));
// // computation.step();
// // }
// // }
// // }
// // offset = read;
// // }
// //
// // sent += buffer.len();
// // input.send_at(0, buffer.drain(..));
// //
// // println!("sent {} edges", sent);
//
// let nodes = 200_000u32;
// let edges = 4_000_000;
//
// println!("determining pagerank of {} nodes, {} edges:", nodes, edges);
// println!("please note: not actually pagerank yet; don't get excited.");
//
// let seed: &[_] = &[1, 2, 3, 4];
// let mut rng1: StdRng = SeedableRng::from_seed(seed);
// let mut rng2: StdRng = SeedableRng::from_seed(seed);
//
// rng1.gen::<f64>();
// rng2.gen::<f64>();
//
// let mut left = edges;
// while left > 0 {
// let next = if left < 1000 { left } else { 1000 };
// input.send_at(0, (0..next).map(|_| ((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), 1)));
// computation.step();
// left -= next;
// }
//
// println!("input ingested after {}", time::precise_time_s() - start);
// //<|fim▁hole|>// // if time::precise_time_s() - start >= round as f64 {
// // let change_count = 1000;
// // for _ in 0..change_count {
// // changes.push(((rng1.gen_range(0, nodes), rng1.gen_range(0, nodes)), 1));
// // changes.push(((rng2.gen_range(0, nodes), rng2.gen_range(0, nodes)),-1));
// // }
// //
// // input.send_at(round, changes.drain(..));
// // input.advance_to(round + 1);
// // round += 1;
// // }
// // }
//
// input.close();
//
// while computation.step() { }
// computation.step(); // shut down
}
//
// fn pagerank<G: GraphBuilder, U: UnsignedInt>(edges: &Collection<G, (U, U)>) -> Collection<G, U>
// where G::Timestamp: LeastUpperBound+Hash {
//
// let degrs = edges.map(|(x,w)| (x.0,w))
// .consolidate(|||x| *x)
// .group_by_u(|x| (x,()), |k,v| (*k,*v), |_,s,t| t.push((s[0].1, 1)))
// .inspect_batch(|_t, xs| println!("degrees: {:?}", xs.len()))
// ;
//
// // start everyone with 100 units of "rank".
// edges.group_by_u(|x| (x.0,()), |k,_| *k, |_,_,t| { t.push(((), 10000)) })
// .iterate(u32::max_value(), |||x| *x, |ranks| {
//
// let degrs = degrs.enter(&ranks.scope());
// let edges = edges.enter(&ranks.scope());
//
// // pair surfers with the out-degree of their location
// ranks.join_u(°rs, |n| (n,()), |nc| nc, |n,_,c| (*n,*c))
// .inspect_batch(|t, xs| println!("join1ed at {:?}: {:?}", t, xs.len()))
// .group_by_u(|x| x, |k,_| *k, |_,s,t| t.push(((), s[0].1 / s[0].0)))
// .inspect_batch(|t, xs| println!("grouped at {:?}: {:?}", t, xs.len()))
// .join_u(&edges, |n| (n,()), |e| e, |_,_,d| *d)
// .inspect_batch(|t, xs| println!("join2ed at {:?}: {:?}\n", t, xs.len()))
// })
// }<|fim▁end|> | // // let mut round = 0 as u32;
// // let mut changes = Vec::new();
// // while computation.step() { |
<|file_name|>fn-trait-formatting.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(unboxed_closures)]
#![feature(box_syntax)]
fn needs_fn<F>(x: F) where F: Fn(isize) -> isize {}
fn main() {
let _: () = (box |_: isize| {}) as Box<FnOnce(isize)>;
//~^ ERROR object-safe
//~| ERROR mismatched types
//~| expected `()`
//~| found `Box<core::ops::FnOnce(isize)>`
//~| expected ()
//~| found box
let _: () = (box |_: isize, isize| {}) as Box<Fn(isize, isize)>;
//~^ ERROR mismatched types
//~| expected `()`
//~| found `Box<core::ops::Fn(isize, isize)>`<|fim▁hole|> let _: () = (box || -> isize unimplemented!()) as Box<FnMut() -> isize>;
//~^ ERROR mismatched types
//~| expected `()`
//~| found `Box<core::ops::FnMut() -> isize>`
//~| expected ()
//~| found box
needs_fn(1);
//~^ ERROR `core::ops::Fn<(isize,)>`
//~| ERROR `core::ops::Fn<(isize,)>`
}<|fim▁end|> | //~| expected ()
//~| found box |
<|file_name|>hooks.go<|end_file_name|><|fim▁begin|>/*
Copyright 2019 The Knative Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package testing includes utilities for testing controllers.
package testing
import (
"errors"
"sync"
"time"
"go.uber.org/atomic"
"k8s.io/apimachinery/pkg/runtime"
kubetesting "k8s.io/client-go/testing"
)
// HookResult is the return value of hook functions.
type HookResult bool
const (
// HookComplete indicates the hook function completed, and WaitForHooks should
// not wait for it.
HookComplete HookResult = true
// HookIncomplete indicates the hook function is incomplete, and WaitForHooks
// should wait for it to complete.
HookIncomplete HookResult = false
)
/*
CreateHookFunc is a function for handling a Create hook. Its runtime.Object
parameter will be the Kubernetes resource created. The resource can be cast
to its actual type like this:
pod := obj.(*v1.Pod)
A return value of true marks the hook as completed. Returning false allows
the hook to run again when the next resource of the requested type is
created.
*/
type CreateHookFunc func(runtime.Object) HookResult
/*
UpdateHookFunc is a function for handling an update hook. its runtime.Object
parameter will be the Kubernetes resource updated. The resource can be cast
to its actual type like this:
pod := obj.(*v1.Pod)
A return value of true marks the hook as completed. Returning false allows
the hook to run again when the next resource of the requested type is
updated.
*/
type UpdateHookFunc func(runtime.Object) HookResult
/*
DeleteHookFunc is a function for handling a delete hook. Its name parameter will
be the name of the resource deleted. The resource itself is not available to
the reactor.
*/
type DeleteHookFunc func(string) HookResult
/*
Hooks is a utility struct that simplifies controller testing with fake
clients. A Hooks struct allows attaching hook functions to actions (create,
update, delete) on a specified resource type within a fake client and ensuring
that all hooks complete in a timely manner.
*/
type Hooks struct {
completionCh chan int32
completionIndex *atomic.Int32
// Denotes whether or not the registered hooks should no longer be called
// because they have already been waited upon.
// This uses a Mutex over a channel to guarantee that after WaitForHooks
// returns no hooked functions will be called.
closed bool
mutex sync.RWMutex
}
// NewHooks returns a Hooks struct that can be used to attach hooks to one or
// more fake clients and wait for all hooks to complete.
// TODO(grantr): Allow validating that a hook never fires
func NewHooks() *Hooks {
return &Hooks{
completionCh: make(chan int32, 100),
completionIndex: atomic.NewInt32(-1),
}
}
// OnCreate attaches a create hook to the given Fake. The hook function is
// executed every time a resource of the given type is created.
func (h *Hooks) OnCreate(fake *kubetesting.Fake, resource string, rf CreateHookFunc) {
index := h.completionIndex.Inc()
fake.PrependReactor("create", resource, func(a kubetesting.Action) (bool, runtime.Object, error) {
obj := a.(kubetesting.CreateActionImpl).Object
h.mutex.RLock()
defer h.mutex.RUnlock()
if !h.closed && rf(obj) == HookComplete {
h.completionCh <- index
}
return false, nil, nil
})
}
// OnUpdate attaches an update hook to the given Fake. The hook function is
// executed every time a resource of the given type is updated.
func (h *Hooks) OnUpdate(fake *kubetesting.Fake, resource string, rf UpdateHookFunc) {
index := h.completionIndex.Inc()
fake.PrependReactor("update", resource, func(a kubetesting.Action) (bool, runtime.Object, error) {
obj := a.(kubetesting.UpdateActionImpl).Object
h.mutex.RLock()
defer h.mutex.RUnlock()
if !h.closed && rf(obj) == HookComplete {
h.completionCh <- index
}
return false, nil, nil
})
}
// OnDelete attaches a delete hook to the given Fake. The hook function is
// executed every time a resource of the given type is deleted.
func (h *Hooks) OnDelete(fake *kubetesting.Fake, resource string, rf DeleteHookFunc) {
index := h.completionIndex.Inc()
fake.PrependReactor("delete", resource, func(a kubetesting.Action) (bool, runtime.Object, error) {
name := a.(kubetesting.DeleteActionImpl).Name
h.mutex.RLock()
defer h.mutex.RUnlock()
if !h.closed && rf(name) == HookComplete {
h.completionCh <- index
}
return false, nil, nil
})<|fim▁hole|>// WaitForHooks waits until all attached hooks have returned true at least once.
// If the given timeout expires before that happens, an error is returned.
// The registered actions will no longer be executed after WaitForHooks has
// returned.
func (h *Hooks) WaitForHooks(timeout time.Duration) error {
defer func() {
h.mutex.Lock()
defer h.mutex.Unlock()
h.closed = true
}()
ci := int(h.completionIndex.Load())
if ci == -1 {
return nil
}
// Convert index to count.
ci++
timer := time.After(timeout)
hookCompletions := map[int32]HookResult{}
for {
select {
case i := <-h.completionCh:
hookCompletions[i] = HookComplete
if len(hookCompletions) == ci {
h.completionIndex.Dec()
return nil
}
case <-timer:
return errors.New("timed out waiting for hooks to complete")
}
}
}<|fim▁end|> | }
|
<|file_name|>utf8-string.cpp<|end_file_name|><|fim▁begin|>// -*- coding: us-ascii-unix -*-
// Copyright 2012 Lukas Kemmer
//
// Licensed under the Apache License, Version 2.0 (the "License"); you
// may not use this file except in compliance with the License. You
// may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
#include <cassert>
#include "text/utf8.hh"
#include "text/utf8-string.hh"
namespace faint{
inline bool outside(const std::string& data, size_t pos){
return utf8::num_characters(data) <= pos;
}
utf8_string::utf8_string(size_t n, const utf8_char& ch){
for (size_t i = 0; i != n; i++){
m_data += ch.str();
}
}
utf8_string::utf8_string(const utf8_char& ch)
: utf8_string(1, ch)
{}
utf8_string::utf8_string(const char* str)
: m_data(str)
{}
utf8_string::utf8_string(const std::string& str)
: m_data(str)
{}
utf8_char utf8_string::at(size_t pos) const{
if (outside(m_data, pos)){
throw std::out_of_range("utf8_string::at invalid string position");
}
return operator[](pos);
}
utf8_char utf8_string::back() const{
assert(!m_data.empty());
return operator[](size() - 1);<|fim▁hole|>
utf8_char utf8_string::front() const{
assert(!m_data.empty());
return operator[](0);
}
size_t utf8_string::bytes() const{
return m_data.size();
}
void utf8_string::clear(){
m_data.clear();
}
utf8_string utf8_string::substr(size_t pos, size_t n) const{
if (outside(m_data, pos)){
throw std::out_of_range("utf8_string::substr invalid string position");
}
size_t startByte = utf8::char_num_to_byte_num_checked(pos, m_data);
size_t numBytes = (n == utf8_string::npos) ?
std::string::npos :
utf8::char_num_to_byte_num_clamped(pos + n, m_data) - startByte;
return utf8_string(m_data.substr(startByte, numBytes));
}
const char* utf8_string::c_str() const{
return m_data.c_str();
}
const std::string& utf8_string::str() const{
return m_data;
}
size_t utf8_string::size() const{
return utf8::num_characters(m_data);
}
bool utf8_string::empty() const{
return m_data.empty();
}
utf8_string& utf8_string::erase(size_t pos, size_t n){
if (outside(m_data, pos)){
throw std::out_of_range("utf8_string::erase invalid string position");
}
size_t startByte = utf8::char_num_to_byte_num_clamped(pos, m_data);
size_t numBytes = (n == npos ? npos :
utf8::char_num_to_byte_num_clamped(pos + n, m_data) - startByte);
m_data.erase(startByte, numBytes);
return *this;
}
utf8_string& utf8_string::insert(size_t pos, const utf8_string& inserted){
if (pos > utf8::num_characters(m_data)){
throw std::out_of_range("invalid insertion index");
}
m_data.insert(utf8::char_num_to_byte_num_checked(pos, m_data), inserted.str());
return *this;
}
utf8_string& utf8_string::insert(size_t pos, size_t num, const utf8_char& c){
if (pos > utf8::num_characters(m_data)){
throw std::out_of_range("invalid insertion index");
}
insert(pos, utf8_string(num, c));
return *this;
}
utf8_char utf8_string::operator[](size_t i) const{
size_t pos = utf8::char_num_to_byte_num_checked(i, m_data);
size_t numBytes = faint::utf8::prefix_num_bytes(m_data[pos]);
return utf8_char(m_data.substr(pos, numBytes));
}
size_t utf8_string::find(const utf8_char& ch, size_t start) const{
// Since the leading byte has a unique pattern, using regular
// std::string find should be OK, I think.
size_t pos = m_data.find(ch.str(),
utf8::char_num_to_byte_num_checked(start, m_data));
if (pos == npos){
return pos;
}
return utf8::byte_num_to_char_num(pos, m_data);
}
size_t utf8_string::find_last_of(const utf8_string& s, size_t inPos) const{
const size_t endPos = inPos == npos ? size() : inPos;
for (size_t i = 0; i != endPos; i++){
auto pos = endPos - i - 1;
if (s.find((*this)[pos]) != utf8_string::npos){
return pos;
}
}
return utf8_string::npos;
}
size_t utf8_string::rfind(const utf8_char& ch, size_t start) const{
// Since the leading byte has a unique pattern, using regular
// std::string rfind should be OK, I think.
if (m_data.empty()){
return npos;
}
size_t startByte = (start == npos) ? m_data.size() - 1 :
utf8::char_num_to_byte_num_checked(start, m_data);
size_t pos = m_data.rfind(ch.str(), startByte);
if (pos == npos){
return pos;
}
return pos == npos ? npos :
utf8::byte_num_to_char_num(pos, m_data);
}
utf8_string& utf8_string::operator=(const utf8_string& other){
if (&other == this){
return *this;
}
m_data = other.m_data;
return *this;
}
utf8_string& utf8_string::operator+=(const utf8_char& ch){
m_data += ch.str();
return *this;
}
utf8_string& utf8_string::operator+=(const utf8_string& str){
m_data += str.str();
return *this;
}
utf8_string operator+(const utf8_string& lhs, const utf8_char& rhs){
return utf8_string(lhs.str() + rhs.str());
}
utf8_string operator+(const utf8_string& lhs, const utf8_string& rhs){
return utf8_string(lhs.str() + rhs.str());
}
utf8_string operator+(const utf8_char& lhs, const utf8_string& rhs){
return utf8_string(lhs.str() + rhs.str());
}
const size_t utf8_string::npos(std::string::npos);
bool utf8_string::operator<(const utf8_string& s) const{
return m_data < s.m_data;
}
bool is_ascii(const utf8_string& s){
const std::string& bytes = s.str();
for (char ch : bytes){
if (utf8::prefix_num_bytes(ch) != 1){
return false;
}
}
return true;
}
std::ostream& operator<<(std::ostream& o, const utf8_string& s){
o << s.str();
return o;
}
bool operator==(const utf8_string& lhs, const utf8_string& rhs){
return lhs.str() == rhs.str();
}
bool operator!=(const utf8_string& lhs, const utf8_string& rhs){
return !(lhs == rhs);
}
utf8_string_const_iterator begin(const utf8_string& s){
return utf8_string_const_iterator(s, 0);
}
utf8_string_const_iterator end(const utf8_string& s){
return utf8_string_const_iterator(s, s.size());
}
} // namespace<|fim▁end|> | } |
<|file_name|>DiseaseOverview.java<|end_file_name|><|fim▁begin|>/**
* ****************************************************************************
* Copyright 2014 Virginia Polytechnic Institute and State University
* <p/>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ****************************************************************************
*/
package edu.vt.vbi.patric.portlets;
import edu.vt.vbi.patric.beans.Genome;
import edu.vt.vbi.patric.beans.Taxonomy;
import edu.vt.vbi.patric.common.DataApiHandler;
import edu.vt.vbi.patric.common.SiteHelper;
import edu.vt.vbi.patric.common.SolrCore;
import edu.vt.vbi.patric.dao.DBDisease;
import edu.vt.vbi.patric.dao.ResultType;
import org.apache.commons.lang.StringUtils;
import org.apache.solr.client.solrj.SolrQuery;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.ObjectReader;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.portlet.*;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class DiseaseOverview extends GenericPortlet {
private static final Logger LOGGER = LoggerFactory.getLogger(DiseaseOverview.class);
private ObjectReader jsonReader;
@Override
public void init() throws PortletException {
super.init();
ObjectMapper objectMapper = new ObjectMapper();
jsonReader = objectMapper.reader(Map.class);
}
@Override
protected void doView(RenderRequest request, RenderResponse response) throws PortletException, IOException {
SiteHelper.setHtmlMetaElements(request, response, "Disease Overview");
response.setContentType("text/html");
response.setTitle("Disease Overview");
String contextType = request.getParameter("context_type");
String contextId = request.getParameter("context_id");
int taxonId;
List<Integer> targetGenusList = Arrays
.asList(1386,773,138,234,32008,194,83553,1485,776,943,561,262,209,1637,1763,780,590,620,1279,1301,662,629);
DataApiHandler dataApi = new DataApiHandler();
if (contextType.equals("genome")) {
Genome genome = dataApi.getGenome(contextId);
taxonId = genome.getTaxonId();
} else {
taxonId = Integer.parseInt(contextId);
}
Taxonomy taxonomy = dataApi.getTaxonomy(taxonId);
List<String> taxonLineageNames = taxonomy.getLineageNames();
List<String> taxonLineageRanks = taxonomy.getLineageRanks();
List<Integer> taxonLineageIds = taxonomy.getLineageIds();
List<Taxonomy> genusList = new LinkedList<>();
for (int i = 0; i < taxonLineageIds.size(); i++) {
if (taxonLineageRanks.get(i).equals("genus") && targetGenusList.contains(taxonLineageIds.get(i))) {
Taxonomy genus = new Taxonomy();
genus.setId(taxonLineageIds.get(i));
genus.setTaxonName(taxonLineageNames.get(i));
genusList.add(genus);
}
}
if (genusList.isEmpty()) {
SolrQuery query = new SolrQuery("lineage_ids:" + taxonId + " AND taxon_rank:genus AND taxon_id:(" + StringUtils.join(targetGenusList, " OR ") + ")");
String apiResponse = dataApi.solrQuery(SolrCore.TAXONOMY, query);
Map resp = jsonReader.readValue(apiResponse);
Map respBody = (Map) resp.get("response");
genusList = dataApi.bindDocuments((List<Map>) respBody.get("docs"), Taxonomy.class);
}
request.setAttribute("contextType", contextType);
request.setAttribute("contextId", contextId);
request.setAttribute("genusList", genusList);
PortletRequestDispatcher prd = getPortletContext().getRequestDispatcher("/WEB-INF/jsp/disease_overview.jsp");
prd.include(request, response);
}
@SuppressWarnings("unchecked")
public void serveResource(ResourceRequest request, ResourceResponse response) throws PortletException, IOException {
response.setContentType("application/json");
String type = request.getParameter("type");
String cId = request.getParameter("cId");
DBDisease conn_disease = new DBDisease();
int count_total;
JSONArray results = new JSONArray();
PrintWriter writer = response.getWriter();
if (type.equals("incidence")) {
JSONObject jsonResult = new JSONObject();
// String cType = request.getParameter("cType");
// sorting
// String sort_field = request.getParameter("sort");
// String sort_dir = request.getParameter("dir");
// Map<String, String> key = new HashMap<>();
// Map<String, String> sort = null;
//
// if (sort_field != null && sort_dir != null) {
// sort = new HashMap<String, String>();
// sort.put("field", sort_field);
// sort.put("direction", sort_dir);
// }
//
// key.put("cId", cId);
// key.put("cType", cType);
count_total = 1;
jsonResult.put("total", count_total);
JSONObject obj = new JSONObject();
obj.put("rownum", "1");
obj.put("pathogen", "Pathogen");
obj.put("disease", "Disease");
obj.put("incidence", "10");
obj.put("infection", "5");
results.add(obj);
jsonResult.put("results", results);
jsonResult.writeJSONString(writer);
}
else if (type.equals("disease_tree")) {
JSONArray jsonResult = new JSONArray();
String tree_node = request.getParameter("node");
List<ResultType> items = conn_disease.getMeshHierarchy(cId, tree_node);
if (items.size() > 0) {
int min = Integer.parseInt(items.get(0).get("lvl"));
try {
for (ResultType item : items) {
if (min == Integer.parseInt(item.get("lvl"))) {
boolean flag = false;
JSONObject obj = DiseaseOverview.encodeNodeJSONObject(item);
String mesh_id = (String) obj.get("tree_node");
for (int j = 0; j < jsonResult.size(); j++) {
JSONObject temp = (JSONObject) jsonResult.get(j);
if (temp.get("tree_node").equals(mesh_id)) {
flag = true;
temp.put("pathogen", temp.get("pathogen") + "<br>" + obj.get("pathogen"));
temp.put("genome", temp.get("genome") + "<br>" + obj.get("genome"));
temp.put("vfdb", temp.get("vfdb") + "<br>" + obj.get("vfdb"));
temp.put("gad", temp.get("gad") + "<br>" + obj.get("gad"));
temp.put("ctd", temp.get("ctd") + "<br>" + obj.get("ctd"));
temp.put("taxon_id", temp.get("taxon_id") + "<br>" + obj.get("taxon_id"));
jsonResult.set(j, temp);
}
}
if (!flag) {
jsonResult.add(obj);
}
}
}
}
catch (Exception ex) {
LOGGER.error(ex.getMessage(), ex);
}
}
jsonResult.writeJSONString(writer);
}
writer.close();
}
@SuppressWarnings("unchecked")
public static JSONObject encodeNodeJSONObject(ResultType rt) {
JSONObject obj = new JSONObject();
obj.putAll(rt);
obj.put("id", rt.get("tree_node"));
obj.put("node", rt.get("tree_node"));
obj.put("expanded", "true");
if (rt.get("leaf").equals("1")) {
obj.put("leaf", "true");
}
else {<|fim▁hole|>}<|fim▁end|> | obj.put("leaf", "false");
}
return obj;
} |
<|file_name|>Sheet.java<|end_file_name|><|fim▁begin|>package com.lj.learning.dao;
// THIS CODE IS GENERATED BY greenDAO, EDIT ONLY INSIDE THE "KEEP"-SECTIONS
// KEEP INCLUDES - put your custom includes here
// KEEP INCLUDES END
/**
* Entity mapped to table "SHEET".
*/
public class Sheet implements java.io.Serializable {<|fim▁hole|> /** Not-null value. */
/**
* 表格页签名称
*/
private String sheetname;
/** Not-null value. */
/**
* 数据库表名称
*/
private String databasename;
// KEEP FIELDS - put your custom fields here
// KEEP FIELDS END
public Sheet() {
}
public Sheet(Long id) {
this.id = id;
}
public Sheet(Long id, String sheetname, String databasename) {
this.id = id;
this.sheetname = sheetname;
this.databasename = databasename;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
/** Not-null value. */
public String getSheetname() {
return sheetname;
}
/** Not-null value; ensure this value is available before it is saved to the database. */
public void setSheetname(String sheetname) {
this.sheetname = sheetname;
}
/** Not-null value. */
public String getDatabasename() {
return databasename;
}
/** Not-null value; ensure this value is available before it is saved to the database. */
public void setDatabasename(String databasename) {
this.databasename = databasename;
}
// KEEP METHODS - put your custom methods here
// KEEP METHODS END
}<|fim▁end|> |
private Long id; |
<|file_name|>transpose_op.cc<|end_file_name|><|fim▁begin|>/* Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/operators/transpose_op.h"
#include <string>
#include <vector>
#ifdef PADDLE_WITH_MKLDNN
#include "paddle/fluid/platform/mkldnn_helper.h"
#endif
namespace paddle {
namespace operators {
using framework::Tensor;
class TransposeOp : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext *ctx) const override {
PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should not be null");
PADDLE_ENFORCE(ctx->HasOutput("Out"), "Output(Out) should not be null");
auto x_dims = ctx->GetInputDim("X");
std::vector<int> axis = ctx->Attrs().Get<std::vector<int>>("axis");
size_t x_rank = x_dims.size();
size_t axis_size = axis.size();
PADDLE_ENFORCE_EQ(x_rank, axis_size,
"The input tensor's rank(%d) "
"should be equal to the axis's size(%d)",
x_rank, axis_size);
std::vector<int> count(axis_size, 0);
for (size_t i = 0; i < axis_size; i++) {
PADDLE_ENFORCE(
axis[i] < static_cast<int>(axis_size) && ++count[axis[i]] == 1,
"Each element of Attribute axis should be a unique value "
"range from 0 to (dims - 1), "
"where the dims is the axis's size");
}
framework::DDim out_dims(x_dims);
for (size_t i = 0; i < axis_size; i++) {
out_dims[i] = x_dims[axis[i]];
}
ctx->SetOutputDim("Out", out_dims);
}
protected:
framework::OpKernelType GetExpectedKernelType(
const framework::ExecutionContext &ctx) const override {
framework::LibraryType library_{framework::LibraryType::kPlain};
std::string data_format = ctx.Attr<std::string>("data_format");
framework::DataLayout layout_ = framework::StringToDataLayout(data_format);
#ifdef PADDLE_WITH_MKLDNN
if (library_ == framework::LibraryType::kPlain &&
platform::CanMKLDNNBeUsed(ctx)) {
library_ = framework::LibraryType::kMKLDNN;
layout_ = framework::DataLayout::kMKLDNN;
}
#endif
return framework::OpKernelType(ctx.Input<Tensor>("X")->type(),
ctx.GetPlace(), layout_, library_);
}
};
class TransposeOpMaker : public framework::OpProtoAndCheckerMaker {
public:
void Make() override {
AddInput(
"X",
"(Tensor) The input tensor, tensors with rank up to 6 are supported.");
AddOutput("Out", "(Tensor)The output tensor.");
AddAttr<std::vector<int>>(
"axis",
"(vector<int>) A list of values, and the size of the list should be "
"the same with the input tensor rank. This operator permutes the input "
"tensor's axes according to the values given.");
AddAttr<bool>("use_mkldnn",
"(bool, default false) Only used in mkldnn kernel")
.SetDefault(false);
AddAttr<std::string>(
"data_format",
"(string, default NCHW) Only used in "
"An optional string from: \"NHWC\", \"NCHW\". "
"Defaults to \"NHWC\". Specify the data format of the output data, "
"the input will be transformed automatically. ")
.SetDefault("AnyLayout");
AddComment(R"DOC(
Transpose Operator.
The input tensor will be permuted according to the axes given.
The behavior of this operator is similar to how `numpy.transpose` works.
- suppose the input `X` is a 2-D tensor:
$$
X = \begin{pmatrix}
0 &1 &2 \\
3 &4 &5
\end{pmatrix}$$
the given `axes` is: $[1, 0]$, and $Y$ = transpose($X$, axis)
then the output $Y$ is:
$$
Y = \begin{pmatrix}
0 &3 \\
1 &4 \\
2 &5
\end{pmatrix}$$
- Given a input tensor with shape $(N, C, H, W)$ and the `axes` is
$[0, 2, 3, 1]$, then shape of the output tensor will be: $(N, H, W, C)$.
)DOC");
}
};
class TransposeOpGrad : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext *ctx) const override {
PADDLE_ENFORCE(ctx->HasInput("X"), "Input(X) should not be null");
PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Out")),
"Input(Out@GRAD) should not be null");
auto x_dims = ctx->GetInputDim("X");
ctx->SetOutputDim(framework::GradVarName("X"), x_dims);
if (ctx->HasOutput(framework::GradVarName("X"))) {
ctx->SetOutputDim(framework::GradVarName("X"), x_dims);
}
}
protected:
framework::OpKernelType GetExpectedKernelType(
const framework::ExecutionContext &ctx) const override {
framework::LibraryType library_{framework::LibraryType::kPlain};
std::string data_format = ctx.Attr<std::string>("data_format");
framework::DataLayout layout_ = framework::StringToDataLayout(data_format);
#ifdef PADDLE_WITH_MKLDNN
if (library_ == framework::LibraryType::kPlain &&
platform::CanMKLDNNBeUsed(ctx)) {
library_ = framework::LibraryType::kMKLDNN;
layout_ = framework::DataLayout::kMKLDNN;
}
#endif
return framework::OpKernelType(
ctx.Input<framework::LoDTensor>(framework::GradVarName("Out"))->type(),
ctx.GetPlace(), layout_, library_);
}
};
// FIXME(zcd): transpose2 adds an intermediate output(XShape) based on
// transpose, the XShape is used to carry the shape and lod of X which
// will be used in transpose_grad, in this way, the framework can reuse
// the memory of X immediately the transpose2_op is finished.
// Considering compatibility issues, we could not fix transpose2_op
class Transpose2Op : public TransposeOp {
public:
Transpose2Op(const std::string &type,
const framework::VariableNameMap &inputs,
const framework::VariableNameMap &outputs,
const framework::AttributeMap &attrs)
: TransposeOp(type, inputs, outputs, attrs) {}
void InferShape(framework::InferShapeContext *ctx) const override {
TransposeOp::InferShape(ctx);
PADDLE_ENFORCE(ctx->HasOutput("XShape"),
"Output(XShape) should not be null");
const auto &in_dims = ctx->GetInputDim("X");
std::vector<int64_t> x_shape_dim(in_dims.size() + 1);
x_shape_dim[0] = 0;
for (int i = 0; i < in_dims.size(); ++i) {
x_shape_dim[i + 1] = in_dims[i];
}
ctx->SetOutputDim("XShape", framework::make_ddim(x_shape_dim));
ctx->ShareLoD("X", /*->*/ "XShape");
}
protected:
framework::OpKernelType GetExpectedKernelType(
const framework::ExecutionContext &ctx) const override {
framework::LibraryType library_{framework::LibraryType::kPlain};
std::string data_format = ctx.Attr<std::string>("data_format");
framework::DataLayout layout_ = framework::StringToDataLayout(data_format);
#ifdef PADDLE_WITH_MKLDNN<|fim▁hole|> }
#endif
return framework::OpKernelType(ctx.Input<Tensor>("X")->type(),
ctx.GetPlace(), layout_, library_);
}
};
class Transpose2OpMaker : public TransposeOpMaker {
public:
void Make() override {
TransposeOpMaker::Make();
AddOutput("XShape", "(Tensor)The output tensor.").AsIntermediate();
}
};
class Transpose2GradMaker : public framework::SingleGradOpDescMaker {
public:
using framework::SingleGradOpDescMaker::SingleGradOpDescMaker;
std::unique_ptr<framework::OpDesc> Apply() const override {
auto *grad_op = new framework::OpDesc();
grad_op->SetType("transpose2_grad");
grad_op->SetInput("XShape", Output("XShape"));
grad_op->SetInput(framework::GradVarName("Out"), OutputGrad("Out"));
grad_op->SetOutput(framework::GradVarName("X"), InputGrad("X"));
grad_op->SetAttrMap(Attrs());
return std::unique_ptr<framework::OpDesc>(grad_op);
}
};
class Transpose2OpGrad : public framework::OperatorWithKernel {
public:
using framework::OperatorWithKernel::OperatorWithKernel;
void InferShape(framework::InferShapeContext *ctx) const override {
PADDLE_ENFORCE(ctx->HasInput("XShape"), "Input(XShape) should not be null");
PADDLE_ENFORCE(ctx->HasInput(framework::GradVarName("Out")),
"Input(Out@GRAD) should not be null");
if (ctx->HasOutput(framework::GradVarName("X"))) {
auto xshape_dim = ctx->GetInputDim("XShape");
auto x_shape_dim =
framework::slice_ddim(xshape_dim, 1, xshape_dim.size());
ctx->SetOutputDim(framework::GradVarName("X"), x_shape_dim);
ctx->ShareLoD("XShape", framework::GradVarName("X"));
}
}
protected:
framework::OpKernelType GetExpectedKernelType(
const framework::ExecutionContext &ctx) const override {
framework::LibraryType library_{framework::LibraryType::kPlain};
std::string data_format = ctx.Attr<std::string>("data_format");
framework::DataLayout layout_ = framework::StringToDataLayout(data_format);
#ifdef PADDLE_WITH_MKLDNN
if (library_ == framework::LibraryType::kPlain &&
platform::CanMKLDNNBeUsed(ctx)) {
library_ = framework::LibraryType::kMKLDNN;
layout_ = framework::DataLayout::kMKLDNN;
}
#endif
return framework::OpKernelType(
ctx.Input<framework::LoDTensor>(framework::GradVarName("Out"))->type(),
ctx.GetPlace(), layout_, library_);
}
};
} // namespace operators
} // namespace paddle
namespace ops = paddle::operators;
REGISTER_OPERATOR(transpose, ops::TransposeOp, ops::TransposeOpMaker,
paddle::framework::DefaultGradOpDescMaker<true>);
REGISTER_OPERATOR(transpose_grad, ops::TransposeOpGrad);
REGISTER_OP_CPU_KERNEL(
transpose, ops::TransposeKernel<paddle::platform::CPUDeviceContext, float>,
ops::TransposeKernel<paddle::platform::CPUDeviceContext, double>);
REGISTER_OP_CPU_KERNEL(
transpose_grad,
ops::TransposeGradKernel<paddle::platform::CPUDeviceContext, float>,
ops::TransposeGradKernel<paddle::platform::CPUDeviceContext, double>);
REGISTER_OPERATOR(transpose2, ops::Transpose2Op, ops::Transpose2OpMaker,
ops::Transpose2GradMaker);
REGISTER_OPERATOR(transpose2_grad, ops::Transpose2OpGrad);
REGISTER_OP_CPU_KERNEL(
transpose2, ops::TransposeKernel<paddle::platform::CPUDeviceContext, float>,
ops::TransposeKernel<paddle::platform::CPUDeviceContext, double>);
REGISTER_OP_CPU_KERNEL(
transpose2_grad,
ops::TransposeGradKernel<paddle::platform::CPUDeviceContext, float>,
ops::TransposeGradKernel<paddle::platform::CPUDeviceContext, double>);<|fim▁end|> | if (library_ == framework::LibraryType::kPlain &&
platform::CanMKLDNNBeUsed(ctx)) {
library_ = framework::LibraryType::kMKLDNN;
layout_ = framework::DataLayout::kMKLDNN; |
<|file_name|>associated-types-projection-bound-in-supertraits.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>// run-pass
#![allow(unused_variables)]
// Test that we correctly handle projection bounds appearing in the
// supertrait list (and in conjunction with overloaded operators). In
// this case, the `Result=Self` binding in the supertrait listing of
// `Int` was being ignored.
trait Not {
type Result;
fn not(self) -> Self::Result;
}
trait Int: Not<Result=Self> + Sized {
fn count_ones(self) -> usize;
fn count_zeros(self) -> usize {
// neither works
let x: Self = self.not();
0
}
}
fn main() { }<|fim▁end|> | |
<|file_name|>p_roc.py<|end_file_name|><|fim▁begin|>"""Contains the drivers and interface code for pinball machines which use the Multimorphic R-ROC hardware controllers.
This code can be used with P-ROC driver boards, or with Stern SAM, Stern
Whitestar, Williams WPC, or Williams WPC95 driver boards.
Much of this code is from the P-ROC drivers section of the pyprocgame project,
written by Adam Preble and Gerry Stellenberg. It was originally released under
the MIT license and is released here under the MIT License.
More info on the P-ROC hardware platform: http://pinballcontrollers.com/
Original code source on which this module was based:
https://github.com/preble/pyprocgame
"""
from typing import Dict, List
from mpf.core.platform import DmdPlatform, DriverConfig, SwitchConfig, SegmentDisplaySoftwareFlashPlatform
from mpf.devices.segment_display.segment_display_text import ColoredSegmentDisplayText
from mpf.platforms.interfaces.dmd_platform import DmdPlatformInterface
from mpf.platforms.interfaces.segment_display_platform_interface import SegmentDisplaySoftwareFlashPlatformInterface
from mpf.platforms.p_roc_common import PDBConfig, PROCBasePlatform
from mpf.core.utility_functions import Util
from mpf.platforms.p_roc_devices import PROCDriver
MYPY = False
if MYPY: # pragma: no cover
from mpf.core.machine import MachineController # pylint: disable-msg=cyclic-import,unused-import
class PRocHardwarePlatform(PROCBasePlatform, DmdPlatform, SegmentDisplaySoftwareFlashPlatform):
"""Platform class for the P-ROC hardware controller.
Args:
----
machine: The MachineController instance.
"""
__slots__ = ["dmd", "alpha_display", "aux_port", "_use_extended_matrix",
"_use_first_eight_direct_inputs"]
def __init__(self, machine):
"""Initialise P-ROC."""
super().__init__(machine)
# validate config for p_roc
self.config = self.machine.config_validator.validate_config("p_roc", self.machine.config.get('p_roc', {}))
self._configure_device_logging_and_debug('P-Roc', self.config)
if self.config['driverboards']:
self.machine_type = self.pinproc.normalize_machine_type(self.config['driverboards'])
else:
self.machine_type = self.pinproc.normalize_machine_type(self.machine.config['hardware']['driverboards'])
self.dmd = None
self.alpha_display = None
self.aux_port = None
self._use_extended_matrix = False
self._use_first_eight_direct_inputs = False
async def connect(self):
"""Connect to the P-Roc."""
await super().connect()
self.aux_port = AuxPort(self)
self.aux_port.reset()
# Because PDBs can be configured in many different ways, we need to
# traverse the YAML settings to see how many PDBs are being used.
# Then we can configure the P-ROC appropriately to use those PDBs.
# Only then can we relate the YAML coil/light #'s to P-ROC numbers for
# the collections.
if self.machine_type == self.pinproc.MachineTypePDB:
self.debug_log("Configuring P-ROC for PDBs (P-ROC driver boards)")
self.pdbconfig = PDBConfig(self, self.machine.config, self.pinproc.DriverCount)
else:
self.debug_log("Configuring P-ROC for OEM driver boards")
def _get_default_subtype(self):
"""Return default subtype for P-Roc."""
return "matrix"
def __repr__(self):
"""Return string representation."""
return '<Platform.P-ROC>'
def get_info_string(self):
"""Dump infos about boards."""
infos = "Firmware Version: {} Firmware Revision: {} Hardware Board ID: {}\n".format(
self.version, self.revision, self.hardware_version)
return infos
@classmethod
def get_coil_config_section(cls):
"""Return coil config section."""
return "p_roc_coils"
def configure_driver(self, config: DriverConfig, number: str, platform_settings: dict):
"""Create a P-ROC driver.
Typically drivers are coils or flashers, but for the P-ROC this is
also used for matrix-based lights.
Args:
----
config: Dictionary of settings for the driver.
number: Number of this driver
platform_settings: Platform specific setting for this driver.
Returns a reference to the PROCDriver object which is the actual object
you can use to pulse(), patter(), enable(), etc.
"""
# todo need to add Aux Bus support
# todo need to add virtual driver support for driver counts > 256
# Find the P-ROC number for each driver. For P-ROC driver boards, the
# P-ROC number is specified via the Ax-By-C format. For OEM driver
# boards configured via driver numbers, libpinproc's decode() method
# can provide the number.
if self.machine_type == self.pinproc.MachineTypePDB:
proc_num = self.pdbconfig.get_proc_coil_number(str(number))
if proc_num == -1:
raise AssertionError("Driver {} cannot be controlled by the P-ROC. ".format(str(number)))
else:
proc_num = self.pinproc.decode(self.machine_type, str(number))
polarity = platform_settings.get("polarity", None)
driver = PROCDriver(proc_num, config, self, number, polarity)
self._late_init_futures.append(driver.initialise())
return driver
def configure_switch(self, number: str, config: SwitchConfig, platform_config: dict):
"""Configure a P-ROC switch.
Args:
----
number: String number of the switch to configure.
config: SwitchConfig settings.
platform_config: Platform specific settings.
Returns: A configured switch object.
"""
del platform_config
try:
if number.startswith("SD") and 0 <= int(number[2:]) <= 7:
self._use_first_eight_direct_inputs = True
_, y = number.split('/', 2)
if int(y) > 7:
self._use_extended_matrix = True
except ValueError:
pass
if self._use_extended_matrix and self._use_first_eight_direct_inputs:
raise AssertionError(
"P-Roc vannot use extended matrix and the first eight direct inputs at the same "
"time. Either only use SD8 to SD31 or only use matrix X/Y with Y <= 7. Offending "
"switch: {}".format(number))
if self.machine_type == self.pinproc.MachineTypePDB:
proc_num = self.pdbconfig.get_proc_switch_number(str(number))
if proc_num == -1:
raise AssertionError("Switch {} cannot be controlled by the P-ROC. ".format(str(number)))
else:
proc_num = self.pinproc.decode(self.machine_type, str(number))
return self._configure_switch(config, proc_num)
<|fim▁hole|> The P-ROC uses the following values for hw switch states:
1 - closed (debounced)
2 - open (debounced)
3 - closed (not debounced)
4 - open (not debounced)
"""
switch_states = await self.run_proc_cmd("switch_get_states")
states = {}
for switch, state in enumerate(switch_states):
states[switch] = bool(state in (1, 3))
return states
def configure_dmd(self):
"""Configure a hardware DMD connected to a classic P-ROC."""
self.dmd = PROCDMD(self, self.machine)
return self.dmd
async def configure_segment_display(self, number: str, display_size: int, platform_settings) \
-> "SegmentDisplaySoftwareFlashPlatformInterface":
"""Configure display."""
del platform_settings
del display_size
number_int = int(number)
if 0 < number_int >= 4:
raise AssertionError("Number must be between 0 and 3 for p_roc segment display.")
if not self.alpha_display:
self.alpha_display = AuxAlphanumericDisplay(self, self.aux_port)
display = PRocAlphanumericDisplay(self.alpha_display, number_int)
self._handle_software_flash(display)
return display
def process_events(self, events):
"""Process events from the P-Roc."""
for event in events:
event_type = event['type']
event_value = event['value']
if event_type == self.pinproc.EventTypeDMDFrameDisplayed:
# ignore this for now
pass
elif event_type in (self.pinproc.EventTypeSwitchClosedDebounced,
self.pinproc.EventTypeSwitchClosedNondebounced):
self.machine.switch_controller.process_switch_by_num(
state=1, num=event_value, platform=self)
elif event_type in (self.pinproc.EventTypeSwitchOpenDebounced,
self.pinproc.EventTypeSwitchOpenNondebounced):
self.machine.switch_controller.process_switch_by_num(
state=0, num=event_value, platform=self)
else:
self.log.warning("Received unrecognized event from the P-ROC. "
"Type: %s, Value: %s", event_type, event_value)
class PROCDMD(DmdPlatformInterface):
"""Parent class for a physical DMD attached to a P-ROC.
Args:
----
platform: Reference to the MachineController's proc attribute.
machine: Reference to the MachineController
"""
__slots__ = ["machine", "platform"]
def __init__(self, platform, machine):
"""Set up DMD."""
self.platform = platform # type: PROCBasePlatform
self.machine = machine # type: MachineController
# dmd_timing defaults should be 250, 400, 180, 800
if self.machine.config['p_roc']['dmd_timing_cycles']:
dmd_timing = Util.string_to_event_list(
self.machine.config['p_roc']['dmd_timing_cycles'])
self.platform.run_proc_cmd_no_wait("dmd_update_config", dmd_timing)
def set_brightness(self, brightness: float):
"""Set brightness."""
# currently not supported. can be implemented using dmd_timing_cycles
assert brightness == 1.0
def update(self, data):
"""Update the DMD with a new frame.
Args:
----
data: A 4096-byte raw string.
"""
if len(data) == 4096:
self.platform.run_proc_cmd_no_wait("_dmd_send", data)
else:
self.machine.log.warning("Received DMD frame of length %s instead"
"of 4096. Discarding...", len(data))
class AuxPort:
"""Aux port on the P-Roc."""
__slots__ = ["platform", "_commands"]
def __init__(self, platform):
"""Initialise aux port."""
self.platform = platform
self._commands = []
def reset(self):
"""Reset aux port."""
commands = [self.platform.pinproc.aux_command_disable()]
for _ in range(1, 255):
commands += [self.platform.pinproc.aux_command_jump(0)]
self.platform.run_proc_cmd_no_wait("aux_send_commands", 0, commands)
def reserve_index(self):
"""Return index of next free command slot and reserve it."""
self._commands += [[]]
return len(self._commands) - 1
def update(self, index, commands):
"""Update command slot with command."""
self._commands[index] = commands
self._write_commands()
def _write_commands(self):
"""Write commands to hardware."""
# disable program
commands = [self.platform.pinproc.aux_command_disable()]
# build command list
for command_set in self._commands:
commands += command_set
self.platform.run_proc_cmd_no_wait("aux_send_commands", 0, commands)
# jump from slot 0 to slot 1. overwrites the disable
self.platform.run_proc_cmd_no_wait("aux_send_commands", 0, [self.platform.pinproc.aux_command_jump(1)])
class PRocAlphanumericDisplay(SegmentDisplaySoftwareFlashPlatformInterface):
"""Since AuxAlphanumericDisplay updates all four displays wrap it and set the correct offset."""
__slots__ = ["display"]
def __init__(self, display, index):
"""Initialise alpha numeric display."""
super().__init__(index)
self.display = display
def _set_text(self, text: ColoredSegmentDisplayText):
"""Set digits to display."""
# TODO: use DisplayCharacter and intern dots and commas
self.display.set_text(text.convert_to_str(), self.number)
class AuxAlphanumericDisplay:
"""An alpha numeric display connected to the aux port on the P-Roc."""
# Start at ASCII table offset 32: ' '
ascii_segments = [0x0000, # ' '
0x016a, # '!' Random Debris Character 1
0x3014, # '"' Random Debris Character 2
0x5d80, # '#' Random Debris Character 3
0x00a4, # '$' Random Debris Character 4
0x3270, # '%' Random Debris Character 5
0x4640, # '&' Random Debris Character 6
0x0200, # '''
0x1400, # '('
0x4100, # ')'
0x7f40, # '*'
0x2a40, # '+'
0x8080, # ','
0x0840, # '-'
0x8000, # '.'
0x4400, # '/'
0x003f, # '0'
0x0006, # '1'
0x085b, # '2'
0x084f, # '3'
0x0866, # '4'
0x086d, # '5'
0x087d, # '6'
0x0007, # '7'
0x087f, # '8'
0x086f, # '9'
0x0821, # ':' Random Debris Character 7
0x1004, # ';' Random Debris Character 8
0x1c00, # '<' Left Arrow
0x1386, # '=' Random Debris Character 9
0x4140, # '>' Right Arrow
0x0045, # '?' Random Debris Character 10
0x4820, # '@' Random Debris Character 11
0x0877, # 'A'
0x2a4f, # 'B'
0x0039, # 'C'
0x220f, # 'D'
0x0879, # 'E'
0x0871, # 'F'
0x083d, # 'G'
0x0876, # 'H'
0x2209, # 'I'
0x001e, # 'J'
0x1470, # 'K'
0x0038, # 'L'
0x0536, # 'M'
0x1136, # 'N'
0x003f, # 'O'
0x0873, # 'P'
0x103f, # 'Q'
0x1873, # 'R'
0x086d, # 'S'
0x2201, # 'T'
0x003e, # 'U'
0x4430, # 'V'
0x5036, # 'W'
0x5500, # 'X'
0x2500, # 'Y'
0x4409, # 'Z'
0x6004, # '[' Random Debris Character 12
0x6411, # '\' Random Debris Character 13
0x780a, # ']' Random Debris Character 14
0x093a, # '^' Random Debris Character 15
0x0008, # '_'
0x2220, # '`' Random Debris Character 16
0x0c56, # 'a' Broken Letter a
0x684e, # 'b' Broken Letter b
0x081c, # 'c' Broken Letter c
0x380e, # 'd' Broken Letter d
0x1178, # 'e' Broken Letter e
0x4831, # 'f' Broken Letter f
0x083d, # 'g' Broken Letter g NOT CREATED YET
0x0854, # 'h' Broken Letter h
0x2209, # 'i' Broken Letter i NOT CREATED YET
0x001e, # 'j' Broken Letter j NOT CREATED YET
0x1070, # 'k' Broken Letter k
0x0038, # 'l' Broken Letter l NOT CREATED YET
0x0536, # 'm' Broken Letter m NOT CREATED YET
0x1136, # 'n' Broken Letter n NOT CREATED YET
0x085c, # 'o' Broken Letter o
0x0873, # 'p' Broken Letter p NOT CREATED YET
0x103f, # 'q' Broken Letter q NOT CREATED YET
0x1c72, # 'r' Broken Letter r
0x116c, # 's' Broken Letter s
0x2120, # 't' Broken Letter t
0x003e, # 'u' Broken Letter u NOT CREATED YET
0x4430, # 'v' Broken Letter v NOT CREATED YET
0x5036, # 'w' Broken Letter w NOT CREATED YET
0x5500, # 'x' Broken Letter x NOT CREATED YET
0x2500, # 'y' Broken Letter y NOT CREATED YET
0x4409 # 'z' Broken Letter z NOT CREATED YET
]
strobes = [8, 9, 10, 11, 12]
full_intensity_delay = 350 # microseconds
inter_char_delay = 40 # microseconds
__slots__ = ["platform", "aux_controller", "aux_index", "texts"]
def __init__(self, platform, aux_controller):
"""Initialise the alphanumeric display."""
self.platform = platform
self.aux_controller = aux_controller
self.aux_index = aux_controller.reserve_index()
self.texts = [" "] * 4
def set_text(self, text, index):
"""Set text for display."""
if len(text) != 8:
text = text[0:8].rjust(8, ' ')
self.texts[index] = text
# build expected format
input_strings = [self.texts[0] + self.texts[1], self.texts[2] + self.texts[3]]
self.display(input_strings)
def display(self, input_strings, intensities=None):
"""Set display text."""
strings = []
if intensities is None:
intensities = [[1] * 16] * 2
# Make sure strings are at least 16 chars.
# Then convert each string to a list of chars.
for j in range(0, 2):
if len(input_strings[j]) < 16:
input_strings[j] += ' ' * (16 - len(input_strings[j]))
strings += [list(input_strings[j])]
# Make sure insensities are 1 or less
for i in range(0, 16):
for j in range(0, 2):
if intensities[j][i] > 1:
intensities[j][i] = 1
commands = []
char_on_time = []
char_off_time = []
# Initialize a 2x16 array for segments value
segs = [[0] * 16 for _ in range(2)]
# Loop through each character
for i in range(0, 16):
# Activate the character position (this goes to both displayas)
commands += [self.platform.pinproc.aux_command_output_custom(i, 0, self.strobes[0], False, 0)]
for j in range(0, 2):
segs[j][i] = self.ascii_segments[ord(strings[j][i]) - 32]
# Check for commas or periods.
# If found, squeeze comma into previous character.
# No point checking the last character (plus, this avoids an
# indexing error by not checking i+1 on the 16th char.
if i < 15:
comma_dot = strings[j][i + 1]
if comma_dot in (".", ","):
segs[j][i] |= self.ascii_segments[ord(comma_dot) - 32]
strings[j].remove(comma_dot)
# Append a space to ensure there are enough chars.
strings[j].append(' ')
# character is 16 bits long, characters are loaded in 2 lots of 8 bits,
# for each display (4 enable lines total)
commands += [self.platform.pinproc.aux_command_output_custom(
segs[j][i] & 0xff, 0,
self.strobes[j * 2 + 1], False, 0)] # first 8 bits of characater data
commands += [self.platform.pinproc.aux_command_output_custom(
(segs[j][i] >> 8) & 0xff, 0,
self.strobes[j * 2 + 2], False, 0)] # second 8 bits of characater data
char_on_time += [intensities[j][i] * self.full_intensity_delay]
char_off_time += [self.inter_char_delay + (self.full_intensity_delay - char_on_time[j])]
if char_on_time[0] < char_on_time[1]:
first = 0
second = 1
else:
first = 1
second = 0
# Determine amount of time to leave the other char on after the
# first is off.
between_delay = char_on_time[second] - char_on_time[first]
# Not sure if the hardware will like a delay of 0
# Use 2 to be extra safe. 2 microseconds won't affect display.
if between_delay == 0:
between_delay = 2
# Delay until it's time to turn off the character with the lowest intensity
commands += [self.platform.pinproc.aux_command_delay(char_on_time[first])]
commands += [self.platform.pinproc.aux_command_output_custom(0, 0, self.strobes[first * 2 + 1], False, 0)]
commands += [self.platform.pinproc.aux_command_output_custom(0, 0, self.strobes[first * 2 + 2], False, 0)]
# Delay until it's time to turn off the other character.
commands += [self.platform.pinproc.aux_command_delay(between_delay)]
commands += [self.platform.pinproc.aux_command_output_custom(0, 0, self.strobes[second * 2 + 1], False, 0)]
commands += [self.platform.pinproc.aux_command_output_custom(0, 0, self.strobes[second * 2 + 2], False, 0)]
# Delay for the inter-digit delay.
commands += [self.platform.pinproc.aux_command_delay(char_off_time[second])]
# Send the new list of commands to the Aux port controller.
self.aux_controller.update(self.aux_index, commands)<|fim▁end|> | async def get_hw_switch_states(self) -> Dict[str, bool]:
"""Read in and set the initial switch state.
|
<|file_name|>trackModel.js<|end_file_name|><|fim▁begin|>var WO = WO || {};
WO.Track = Backbone.Model.extend({
urlRoot: '/api/tracks',
idAttribute: '_id',
defaults: {
notes: "",
title: 'Acoustic Piano',
isMuted: false,
solo: false,
octave: 4,
volume: 0.75,
instrument: "",
type: 'MIDI'
},
initialize : function(){
this.set('notes', []);
this.set('instrument', WO.InstrumentFactory( "Acoustic Piano", this.cid));
WO.instrumentKeyHandler.create(this.get('instrument'));
this.on('changeInstrument', function(instrumentName){this.changeInstrument(instrumentName);}, this);
},
genObjectId: (function() {
function s4() {
return Math.floor((1 + Math.random()) * 0x10000)
.toString(16)
.substring(1);
}
return function() {
return s4() + s4() + s4();
};
})(),
changeInstrument: function(instrumentName) {
var instType = {
'Acoustic Piano': 'MIDI',
'Audio File': 'Audio',
'Microphone': 'Microphone',
'Acoustic Guitar Steel': 'MIDI',
'Alto Sax': 'MIDI',
'Church Organ': 'MIDI',
'Distortion Guitar': 'MIDI',
'Electric Piano 1': 'MIDI',
'Flute': 'MIDI',
'Muted Trumpet': 'MIDI',
'Oboe': 'MIDI',
'Overdriven Guitar': 'MIDI',
'Pad 3 Polysynth': 'MIDI',
'Synth': 'MIDI',
'Synth Bass 1': 'MIDI',
'Synth Strings 2': 'MIDI',
'Viola': 'MIDI',
'Violin': 'MIDI',
'Xylophone': 'MIDI'
};
var previousInstrumentType = this.get('type');
WO.appView.unbindKeys();
this.set('type', instType[instrumentName]);
this.set('title', instrumentName);
if (this.get('type') === 'MIDI') {
this.set('instrument', WO.InstrumentFactory(instrumentName, this));
WO.instrumentKeyHandler.create(this.get('instrument'));
if (previousInstrumentType !== 'MIDI') {
$('.active-track .track-notes').html('');
this.set('mRender', new WO.MidiRender(this.cid + ' .track-notes'));
}
} else {
this.set('notes', []);
$('.active-track .track-notes').html('');
this.set('instrument', WO.InstrumentFactory(instrumentName, this));
}
},
saveTrack: function(){
var instrument = this.get('instrument');
var mRender = this.get('mRender');
this.set('instrument', '');
this.set('mRender', '');
var that = this;
var newlySaveTrack = $.when(that.save()).done(function(){
that.set('instrument', instrument);<|fim▁hole|> return newlySaveTrack;
}
});
//see what type of instrumetn current serection is
// midi -> mic => remove svg , add mic
// midi -> audio => remove svg , add audio
// midi -> midi => null
// mic -> audio => remove mic , add audio
// mic -> midi => remove mike, add svg
// audio -> mic => remove audio, add mic
// audio -> midi => remove audio, add svg
// keep notes only for midi change to hear different instruments.<|fim▁end|> | that.set('mRender', mRender);
return that;
}); |
<|file_name|>solution.py<|end_file_name|><|fim▁begin|>class Solution(object):
def combinationSum(self, candidates, target):
"""
:type candidates: List[int]
:type target: int
:rtype: List[List[int]]
"""
def recurhelper(nums,res,path,target,start):
if target==0:
res.append(path)
return
if target<0:
return
if target>0:
for i in xrange(start,len(nums)):
if nums[i]<=target:
recurhelper(nums,res,path+[nums[i]],target-nums[i],i)
res=[]<|fim▁hole|> return res<|fim▁end|> | candidates.sort()
recurhelper(candidates,res,[],target,0) |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include "GameCtrl.h"
char title[16][30]=
{
{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0},
{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0},
{0,1,1,1,1,1,0,1,0,0,0,1,0,0,1,1,0,0,0,1,0,0,1,0,1,1,1,1,1,0},
{0,1,0,0,0,0,0,1,1,0,0,1,0,1,0,0,1,0,0,1,0,1,0,0,1,0,0,0,0,0},
{0,1,1,1,1,1,0,1,0,1,0,1,0,1,1,1,1,0,0,1,1,0,0,0,1,1,1,1,1,0},
{0,0,0,0,0,1,0,1,0,0,1,1,0,1,0,0,1,0,0,1,0,1,0,0,1,0,0,0,0,0},
{0,1,1,1,1,1,0,1,0,0,0,1,0,1,0,0,1,0,0,1,0,0,1,0,1,1,1,1,1,0},
{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0},
{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0},
{0,0,1,1,1,1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,1,1,1,1,0},
{0,0,1,0,0,0,0,0,0,1,0,0,1,0,0,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0},
{0,0,1,0,0,1,1,0,0,1,1,1,1,0,0,1,0,1,0,1,0,1,0,0,1,1,1,1,1,0},
{0,0,1,1,1,1,1,0,0,1,0,0,1,0,0,1,0,1,0,1,0,1,0,0,1,0,0,0,0,0},
{0,0,0,0,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,1,1,1,1,0},
{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0},
{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}
};
int main() {
int menu = 0;
for(int x=0;x<16;x++){
printf("\t");
for(int y=0;y<30;y++){
if(title[x][y] == 1){
printf("¡á");
}
if(title[x][y] == 0){
printf("¡à");
}
}
printf("\n");
}
printf("\n");
printf("\t\t\t\t<MENU>\n");
printf("\t\t\t 1.Snake Game(Manual Mode)\n");
printf("\t\t\t 2.See How to find Shortest Path\n");
printf("\t\t\t 3.See How to find Longest Path\n");
printf("\t\t\t 4.AI Hamiltonian Cycle\n");
printf("\t\t\t 5.AI Graph Search\n");
printf("\t\t\t 6.Exit\n");
printf("\t\t\t =>Input Menu Number : ");
scanf("%d",&menu);
auto game = GameCtrl::getInstance(0);
// Set FPS. Default is 60.0
game->setFPS(60.0);
// Set the interval time between each snake's movement. Default is 30 ms.
// To play classic snake game, set to 150 ms is perfect.
game->setMoveInterval(150);
<|fim▁hole|> // Set whether to record the snake's movements to file. Default is true.
// The movements will be written to a file named "movements.txt".
game->setRecordMovements(true);
// Set map's size(including boundaries). Default is 10*10. Minimum is 5*5.
game->setMapRow(10);
game->setMapCol(10);
if(menu==1){ //Snake Game
// Set whether to enable the snake AI. Default is true.
game->setEnableAI(false);
// Set whether to use a hamiltonian cycle to guide the AI. Default is true.
game->setEnableHamilton(true);
// Set whether to run the test program. Default is false.
// You can select different testing methods by modifying GameCtrl::test().
game->setRunTest(false);
}
else if(menu==2){ //Shortest Path
game->setEnableAI(false);
game->setEnableHamilton(true);
game->setRunTest(true);
game->setMapRow(20);
game->setMapCol(20);
}
else if(menu==3){ //Longest Path
auto game = GameCtrl::getInstance(1);
game->setEnableAI(false);
game->setEnableHamilton(true);
game->setRunTest(true);
game->setMapRow(20);
game->setMapCol(20);
}
else if(menu==4){ //Hamiltonian Cycle
game->setEnableAI(true);
game->setEnableHamilton(true);
game->setRunTest(false);
}
else if(menu==5){ //AI
game->setEnableAI(true);
game->setEnableHamilton(false);
game->setRunTest(false);
}
else
return 0;
return game->run();
}<|fim▁end|> | |
<|file_name|>vmovdqu8.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;
use ::instruction_def::*;
use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vmovdqu8_1() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 127, 138, 111, 211], OperandSize::Dword)
}
fn vmovdqu8_2() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(XMM2)), operand2: Some(Indirect(EDX, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 127, 139, 111, 18], OperandSize::Dword)
}
fn vmovdqu8_3() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM8)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 209, 127, 142, 111, 224], OperandSize::Qword)
}
fn vmovdqu8_4() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(XMM6)), operand2: Some(IndirectDisplaced(RSI, 756772570, Some(OperandSize::Xmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 127, 139, 111, 182, 218, 110, 27, 45], OperandSize::Qword)
}
fn vmovdqu8_5() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM0)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 127, 172, 111, 240], OperandSize::Dword)
}
fn vmovdqu8_6() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(YMM6)), operand2: Some(IndirectScaledIndexedDisplaced(EAX, ECX, Two, 1288004266, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 127, 170, 111, 180, 72, 170, 98, 197, 76], OperandSize::Dword)
}
fn vmovdqu8_7() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(YMM27)), operand2: Some(Direct(YMM3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 97, 127, 172, 111, 219], OperandSize::Qword)
}
fn vmovdqu8_8() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(YMM13)), operand2: Some(IndirectDisplaced(RBX, 1332318920, Some(OperandSize::Ymmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 113, 127, 170, 111, 171, 200, 146, 105, 79], OperandSize::Qword)
}
fn vmovdqu8_9() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(ZMM2)), operand2: Some(Direct(ZMM1)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 127, 203, 111, 209], OperandSize::Dword)
}
fn vmovdqu8_10() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(ZMM0)), operand2: Some(IndirectScaledIndexedDisplaced(ECX, EAX, Four, 1010909766, Some(OperandSize::Zmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 127, 204, 111, 132, 129, 70, 66, 65, 60], OperandSize::Dword)
}
fn vmovdqu8_11() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(ZMM8)), operand2: Some(Direct(ZMM21)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 49, 127, 202, 111, 197], OperandSize::Qword)
}
fn vmovdqu8_12() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(ZMM14)), operand2: Some(IndirectDisplaced(RAX, 559064875, Some(OperandSize::Zmmword), None)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 113, 127, 204, 111, 176, 43, 167, 82, 33], OperandSize::Qword)
}
fn vmovdqu8_13() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: None }, &[98, 241, 127, 138, 111, 247], OperandSize::Dword)
}
fn vmovdqu8_14() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(IndirectScaledIndexed(EAX, EDI, Four, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(XMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 241, 127, 8, 127, 20, 184], OperandSize::Dword)
}
fn vmovdqu8_15() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM31)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 145, 127, 142, 111, 247], OperandSize::Qword)
}
fn vmovdqu8_16() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(IndirectDisplaced(RBX, 842925213, Some(OperandSize::Xmmword), None)), operand2: Some(Direct(XMM10)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 113, 127, 8, 127, 147, 157, 4, 62, 50], OperandSize::Qword)
}
fn vmovdqu8_17() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(YMM2)), operand2: Some(Direct(YMM4)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 127, 172, 111, 212], OperandSize::Dword)
}
fn vmovdqu8_18() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(IndirectScaledIndexed(EBX, EBX, Eight, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(YMM3)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 241, 127, 40, 127, 28, 219], OperandSize::Dword)
}
fn vmovdqu8_19() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(YMM14)), operand2: Some(Direct(YMM21)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 49, 127, 174, 111, 245], OperandSize::Qword)
}
fn vmovdqu8_20() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(IndirectScaledIndexed(RSI, RCX, Two, Some(OperandSize::Ymmword), None)), operand2: Some(Direct(YMM2)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 241, 127, 40, 127, 20, 78], OperandSize::Qword)
}
fn vmovdqu8_21() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(ZMM1)), operand2: Some(Direct(ZMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 241, 127, 205, 111, 207], OperandSize::Dword)
}
fn vmovdqu8_22() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Indirect(EDX, Some(OperandSize::Zmmword), None)), operand2: Some(Direct(ZMM7)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 241, 127, 72, 127, 58], OperandSize::Dword)
}
fn vmovdqu8_23() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Direct(ZMM26)), operand2: Some(Direct(ZMM18)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 33, 127, 204, 111, 210], OperandSize::Qword)
}
<|fim▁hole|>}<|fim▁end|> | fn vmovdqu8_24() {
run_test(&Instruction { mnemonic: Mnemonic::VMOVDQU8, operand1: Some(Indirect(RDI, Some(OperandSize::Zmmword), None)), operand2: Some(Direct(ZMM19)), operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[98, 225, 127, 72, 127, 31], OperandSize::Qword) |
<|file_name|>lazytox.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
Lazy 'tox' to quickly check if branch is up to PR standards.
This is NOT a tox replacement, only a quick check during development.
"""
import os
import asyncio
import sys
import re
import shlex
from collections import namedtuple
try:
from colorlog.escape_codes import escape_codes
except ImportError:
escape_codes = None
RE_ASCII = re.compile(r"\033\[[^m]*m")
Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"])
PASS = "green"
FAIL = "bold_red"
def printc(the_color, *args):
"""Color print helper."""
msg = " ".join(args)
if not escape_codes:
print(msg)
return
try:
print(escape_codes[the_color] + msg + escape_codes["reset"])
except KeyError:
print(msg)
raise ValueError("Invalid color {}".format(the_color))
def validate_requirements_ok():
"""Validate requirements, returns True of ok."""<|fim▁hole|> from gen_requirements_all import main as req_main
return req_main(True) == 0
async def read_stream(stream, display):
"""Read from stream line by line until EOF, display, and capture lines."""
output = []
while True:
line = await stream.readline()
if not line:
break
output.append(line)
display(line.decode()) # assume it doesn't block
return b"".join(output)
async def async_exec(*args, display=False):
"""Execute, return code & log."""
argsp = []
for arg in args:
if os.path.isfile(arg):
argsp.append("\\\n {}".format(shlex.quote(arg)))
else:
argsp.append(shlex.quote(arg))
printc("cyan", *argsp)
try:
kwargs = {
"loop": LOOP,
"stdout": asyncio.subprocess.PIPE,
"stderr": asyncio.subprocess.STDOUT,
}
if display:
kwargs["stderr"] = asyncio.subprocess.PIPE
proc = await asyncio.create_subprocess_exec(*args, **kwargs)
except FileNotFoundError as err:
printc(
FAIL,
"Could not execute {}. Did you install test requirements?".format(args[0]),
)
raise err
if not display:
# Readin stdout into log
stdout, _ = await proc.communicate()
else:
# read child's stdout/stderr concurrently (capture and display)
stdout, _ = await asyncio.gather(
read_stream(proc.stdout, sys.stdout.write),
read_stream(proc.stderr, sys.stderr.write),
)
exit_code = await proc.wait()
stdout = stdout.decode("utf-8")
return exit_code, stdout
async def git():
"""Exec git."""
if len(sys.argv) > 2 and sys.argv[1] == "--":
return sys.argv[2:]
_, log = await async_exec("git", "merge-base", "upstream/dev", "HEAD")
merge_base = log.splitlines()[0]
_, log = await async_exec("git", "diff", merge_base, "--name-only")
return log.splitlines()
async def pylint(files):
"""Exec pylint."""
_, log = await async_exec("pylint", "-f", "parseable", "--persistent=n", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 3:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], "", line[2].strip(), _fn.startswith("tests/")))
return res
async def flake8(files):
"""Exec flake8."""
_, log = await async_exec("flake8", "--doctests", *files)
res = []
for line in log.splitlines():
line = line.split(":")
if len(line) < 4:
continue
_fn = line[0].replace("\\", "/")
res.append(Error(_fn, line[1], line[2], line[3].strip(), False))
return res
async def lint(files):
"""Perform lint."""
files = [file for file in files if os.path.isfile(file)]
fres, pres = await asyncio.gather(flake8(files), pylint(files))
res = fres + pres
res.sort(key=lambda item: item.file)
if res:
print("Pylint & Flake8 errors:")
else:
printc(PASS, "Pylint and Flake8 passed")
lint_ok = True
for err in res:
err_msg = "{} {}:{} {}".format(err.file, err.line, err.col, err.msg)
# tests/* does not have to pass lint
if err.skip:
print(err_msg)
else:
printc(FAIL, err_msg)
lint_ok = False
return lint_ok
async def main():
"""Run the main loop."""
# Ensure we are in the homeassistant root
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
files = await git()
if not files:
print(
"No changed files found. Please ensure you have added your "
"changes with git add & git commit"
)
return
pyfile = re.compile(r".+\.py$")
pyfiles = [file for file in files if pyfile.match(file)]
print("=============================")
printc("bold", "CHANGED FILES:\n", "\n ".join(pyfiles))
print("=============================")
skip_lint = len(sys.argv) > 1 and sys.argv[1] == "--skiplint"
if skip_lint:
printc(FAIL, "LINT DISABLED")
elif not await lint(pyfiles):
printc(FAIL, "Please fix your lint issues before continuing")
return
test_files = set()
gen_req = False
for fname in pyfiles:
if fname.startswith("homeassistant/components/"):
gen_req = True # requirements script for components
# Find test files...
if fname.startswith("tests/"):
if "/test_" in fname and os.path.isfile(fname):
# All test helpers should be excluded
test_files.add(fname)
else:
parts = fname.split("/")
parts[0] = "tests"
if parts[-1] == "__init__.py":
parts[-1] = "test_init.py"
elif parts[-1] == "__main__.py":
parts[-1] = "test_main.py"
else:
parts[-1] = "test_" + parts[-1]
fname = "/".join(parts)
if os.path.isfile(fname):
test_files.add(fname)
if gen_req:
print("=============================")
if validate_requirements_ok():
printc(PASS, "script/gen_requirements.py passed")
else:
printc(FAIL, "Please run script/gen_requirements.py")
return
print("=============================")
if not test_files:
print("No test files identified, ideally you should run tox")
return
code, _ = await async_exec(
"pytest", "-vv", "--force-sugar", "--", *test_files, display=True
)
print("=============================")
if code == 0:
printc(PASS, "Yay! This will most likely pass tox")
else:
printc(FAIL, "Tests not passing")
if skip_lint:
printc(FAIL, "LINT DISABLED")
if __name__ == "__main__":
LOOP = (
asyncio.ProactorEventLoop()
if sys.platform == "win32"
else asyncio.get_event_loop()
)
try:
LOOP.run_until_complete(main())
except (FileNotFoundError, KeyboardInterrupt):
pass
finally:
LOOP.close()<|fim▁end|> | |
<|file_name|>template.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package base
import (
"container/list"
"encoding/json"
"fmt"
"html/template"
"runtime"
"strings"
"time"
"golang.org/x/net/html/charset"
"golang.org/x/text/transform"
"github.com/gogits/chardet"
"github.com/pecastro/gogs/modules/setting"
)
func Safe(raw string) template.HTML {
return template.HTML(raw)
}
func Str2html(raw string) template.HTML {
return template.HTML(Sanitizer.Sanitize(raw))
}
func Range(l int) []int {
return make([]int, l)
}
func List(l *list.List) chan interface{} {
e := l.Front()
c := make(chan interface{})
go func() {
for e != nil {
c <- e.Value
e = e.Next()
}
close(c)
}()
return c
}
func Sha1(str string) string {
return EncodeSha1(str)
}
func ShortSha(sha1 string) string {
if len(sha1) == 40 {
return sha1[:10]
}
return sha1
}
func DetectEncoding(content []byte) (string, error) {
detector := chardet.NewTextDetector()
result, err := detector.DetectBest(content)
if result.Charset != "UTF-8" && len(setting.AnsiCharset) > 0 {
return setting.AnsiCharset, err
}
return result.Charset, err
}
func ToUtf8WithErr(content []byte) (error, string) {
charsetLabel, err := DetectEncoding(content)
if err != nil {
return err, ""
}
if charsetLabel == "UTF-8" {
return nil, string(content)
}
encoding, _ := charset.Lookup(charsetLabel)
if encoding == nil {
return fmt.Errorf("unknow char decoder %s", charsetLabel), string(content)
}
result, n, err := transform.String(encoding.NewDecoder(), string(content))
// If there is an error, we concatenate the nicely decoded part and the
// original left over. This way we won't loose data.
if err != nil {
result = result + string(content[n:])
}
return err, result
}
func ToUtf8(content string) string {
_, res := ToUtf8WithErr([]byte(content))
return res
}
// RenderCommitMessage renders commit message with XSS-safe and special links.
func RenderCommitMessage(msg, urlPrefix string) template.HTML {
return template.HTML(string(RenderIssueIndexPattern([]byte(template.HTMLEscapeString(msg)), urlPrefix)))
}
var mailDomains = map[string]string{
"gmail.com": "gmail.com",
}
var TemplateFuncs template.FuncMap = map[string]interface{}{
"GoVer": func() string {
return strings.Title(runtime.Version())
},
"AppName": func() string {
return setting.AppName
},
"AppSubUrl": func() string {
return setting.AppSubUrl
},
"AppVer": func() string {
return setting.AppVer
},
"AppDomain": func() string {
return setting.Domain
},
"DisableGravatar": func() bool {
return setting.DisableGravatar
},
"LoadTimes": func(startTime time.Time) string {
return fmt.Sprint(time.Since(startTime).Nanoseconds()/1e6) + "ms"
},
"AvatarLink": AvatarLink,
"Safe": Safe,
"Str2html": Str2html,
"TimeSince": TimeSince,
"RawTimeSince": RawTimeSince,
"FileSize": FileSize,
"Subtract": Subtract,
"Add": func(a, b int) int {
return a + b
},
"ActionIcon": ActionIcon,
"DateFmtLong": func(t time.Time) string {
return t.Format(time.RFC1123Z)
},
"DateFmtShort": func(t time.Time) string {
return t.Format("Jan 02, 2006")
},
"List": List,
"Mail2Domain": func(mail string) string {
if !strings.Contains(mail, "@") {
return "try.gogs.io"
}
suffix := strings.SplitN(mail, "@", 2)[1]
domain, ok := mailDomains[suffix]
if !ok {
return "mail." + suffix
}
return domain
},
"SubStr": func(str string, start, length int) string {<|fim▁hole|> if len(str) == 0 {
return ""
}
end := start + length
if length == -1 {
end = len(str)
}
if len(str) < end {
return str
}
return str[start:end]
},
"DiffTypeToStr": DiffTypeToStr,
"DiffLineTypeToStr": DiffLineTypeToStr,
"Sha1": Sha1,
"ShortSha": ShortSha,
"Md5": EncodeMd5,
"ActionContent2Commits": ActionContent2Commits,
"Oauth2Icon": Oauth2Icon,
"Oauth2Name": Oauth2Name,
"ToUtf8": ToUtf8,
"EscapePound": func(str string) string {
return strings.Replace(strings.Replace(str, "%", "%25", -1), "#", "%23", -1)
},
"RenderCommitMessage": RenderCommitMessage,
}
type Actioner interface {
GetOpType() int
GetActUserName() string
GetActEmail() string
GetRepoUserName() string
GetRepoName() string
GetRepoPath() string
GetRepoLink() string
GetBranch() string
GetContent() string
GetCreate() time.Time
GetIssueInfos() []string
}
// ActionIcon accepts a int that represents action operation type
// and returns a icon class name.
func ActionIcon(opType int) string {
switch opType {
case 1, 8: // Create, transfer repository.
return "repo"
case 5, 9: // Commit repository.
return "git-commit"
case 6: // Create issue.
return "issue-opened"
case 10: // Comment issue.
return "comment"
default:
return "invalid type"
}
}
type PushCommit struct {
Sha1 string
Message string
AuthorEmail string
AuthorName string
}
type PushCommits struct {
Len int
Commits []*PushCommit
CompareUrl string
}
func ActionContent2Commits(act Actioner) *PushCommits {
var push *PushCommits
if err := json.Unmarshal([]byte(act.GetContent()), &push); err != nil {
return nil
}
return push
}
func DiffTypeToStr(diffType int) string {
diffTypes := map[int]string{
1: "add", 2: "modify", 3: "del",
}
return diffTypes[diffType]
}
func DiffLineTypeToStr(diffType int) string {
switch diffType {
case 2:
return "add"
case 3:
return "del"
case 4:
return "tag"
}
return "same"
}
func Oauth2Icon(t int) string {
switch t {
case 1:
return "fa-github-square"
case 2:
return "fa-google-plus-square"
case 3:
return "fa-twitter-square"
case 4:
return "fa-qq"
case 5:
return "fa-weibo"
}
return ""
}
func Oauth2Name(t int) string {
switch t {
case 1:
return "GitHub"
case 2:
return "Google+"
case 3:
return "Twitter"
case 4:
return "腾讯 QQ"
case 5:
return "Weibo"
}
return ""
}<|fim▁end|> | |
<|file_name|>generic-crud-service.ts<|end_file_name|><|fim▁begin|>namespace shared {<|fim▁hole|> private mongoDb: any;
private collectionName: string
private mongo: any;
constructor(collectionName: string) {
this.mongo = require('mongodb');
this.collectionName = collectionName;
this.boostrap();
}
private boostrap(): void {
var Server = this.mongo.Server;
var MongoClient = this.mongo.MongoClient;
var appSettings = new AppSettings();
var that = this;
MongoClient.connect(appSettings.mongoDbDatabaseUrl, function (err, db) {
that.mongoDb = db;
if (err) {
console.log('CollectionService', "Error when try to connect to database", err);
db.close();
}
else {
console.log('CollectionService', "Connected to database");
db.collection('tst', { strict: true }, function (err, collection) {
if (err) {
db.createCollection('tst');
db.createCollection('warehouse');
db.createCollection('delivery');
db.createCollection('travel');
db.createCollection('customer');
db.createCollection('user');
}
});
}
});
}
public findById(id: string, callback: (success: boolean, item?: T, msg?: string) => void): void {
var that = this;
this.mongoDb.collection(this.collectionName, function (err, collection) {
collection
.find({ _id: new that.mongo.ObjectID(id) })
.toArray()
.then((value: T[]) => {
if (value.length > 0)
callback(true, value[0]);
else
callback(true, null);
},
(reason: any) => {
callback(false, null, reason);
});
});
}
public find(query: any, callback: (success: boolean, lst?: T[], msg?: string) => void): void {
this.mongoDb.collection(this.collectionName, function (err, collection) {
collection
.find(query)
.toArray()
.then((value: T[]) => {
callback(true, value);
},
(reason: any) => {
callback(false, null, reason);
});
});
}
public save(model: T, callback: (success: boolean, featched: T, msg?: string) => void): void {
this.mongoDb.collection(this.collectionName, function (err, collection) {
collection.save(model, { safe: true }, function (err, result) {
if (err) {
console.error('CollectionService', 'Error when inserted data in collection', err);
callback(false, null, err);
} else {
console.log('CollectionService', 'InsertedModel: ' + JSON.stringify(model));
callback(true, model);
}
});
});
}
}
}<|fim▁end|> | export class GenericCrudService<T> {
private server: any; |
<|file_name|>SignalAbstract.js<|end_file_name|><|fim▁begin|>define(["require", "exports", "./SignalConnection"], function (require, exports, SignalConnection_1) {
"use strict";
var Task = (function () {
function Task(fn) {
this.next = null;
this.fn = fn;
}
return Task;
}());
var SignalAbstract = (function () {
function SignalAbstract(listener) {
if (listener === void 0) { listener = null; }
this._deferredTasks = null;
this.connect = this.connectImpl;
this._head = (listener != null) ? new SignalConnection_1.default(this, listener) : null;
}
SignalAbstract.prototype.hasListeners = function () {
return this._head != null;
};
SignalAbstract.prototype.connectImpl = function (listener, prioritize) {
if (prioritize === void 0) { prioritize = false; }
var _g = this;
var conn = new SignalConnection_1.default(this, listener);
if (this.dispatching()) {
this.defer(function () {
_g.listAdd(conn, prioritize);
});
}
else {
this.listAdd(conn, prioritize);
}
return conn;
};
SignalAbstract.prototype.disconnect = function (conn) {
var _g = this;
if (this.dispatching()) {
this.defer(function () {
_g.listRemove(conn);
});
}
else {
this.listRemove(conn);
}
};
SignalAbstract.prototype.defer = function (fn) {
var tail = null;
var p = this._deferredTasks;
while (p != null) {
tail = p;
p = p.next;
}
var task = new Task(fn);
if (tail != null) {
tail.next = task;
}
else {
this._deferredTasks = task;
}
};
SignalAbstract.prototype.willEmit = function () {
var snapshot = this._head;
this._head = SignalAbstract.DISPATCHING_SENTINEL;
return snapshot;
};
SignalAbstract.prototype.didEmit = function (head) {
this._head = head;
var snapshot = this._deferredTasks;
this._deferredTasks = null;
while (snapshot != null) {
snapshot.fn();
snapshot = snapshot.next;
}
};
SignalAbstract.prototype.dispatching = function () {
return this._head == SignalAbstract.DISPATCHING_SENTINEL;
};
SignalAbstract.prototype.listAdd = function (conn, prioritize) {
if (prioritize) {
conn._next = this._head;
this._head = conn;
}
else {
var tail = null;
var p = this._head;
while (p != null) {
tail = p;<|fim▁hole|> p = p._next;
}
if (tail != null) {
tail._next = conn;
}
else {
this._head = conn;
}
}
};
SignalAbstract.prototype.listRemove = function (conn) {
var prev = null;
var p = this._head;
while (p != null) {
if (p == conn) {
var next = p._next;
if (prev == null) {
this._head = next;
}
else {
prev._next = next;
}
return;
}
prev = p;
p = p._next;
}
};
SignalAbstract.DISPATCHING_SENTINEL = new SignalConnection_1.default(null, null);
return SignalAbstract;
}());
Object.defineProperty(exports, "__esModule", { value: true });
exports.default = SignalAbstract;
});<|fim▁end|> | |
<|file_name|>pci.cc<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2014 ARM Limited
* All rights reserved
*
* The license below extends only to copyright in the software and shall
* not be construed as granting a license to any other intellectual
* property including but not limited to intellectual property relating
* to a hardware implementation of the functionality of the software
* licensed hereunder. You may use the software subject to the license
* terms below provided that you ensure that this notice is replicated
* unmodified and in its entirety in all distributions of the software,
* modified or unmodified, in source code or in binary form.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Authors: Andreas Sandberg
*/
#include "debug/VIOPci.hh"
#include "dev/virtio/pci.hh"
#include "mem/packet_access.hh"
#include "params/PciVirtIO.hh"
PciVirtIO::PciVirtIO(const Params *params)
: PciDevice(params), queueNotify(0), interruptDeliveryPending(false),
vio(*params->vio), callbackKick(this)
{
// Override the subsystem ID with the device ID from VirtIO
config.subsystemID = htole(vio.deviceId);
BARSize[0] = BAR0_SIZE_BASE + vio.configSize;
vio.registerKickCallback(&callbackKick);
}
PciVirtIO::~PciVirtIO()
{
}
Tick
PciVirtIO::read(PacketPtr pkt)
{
const unsigned M5_VAR_USED size(pkt->getSize());
int bar;
Addr offset;
if (!getBAR(pkt->getAddr(), bar, offset))
panic("Invalid PCI memory access to unmapped memory.\n");
assert(bar == 0);
DPRINTF(VIOPci, "Reading offset 0x%x [len: %i]\n", offset, size);
// Forward device configuration writes to the device VirtIO model
if (offset >= OFF_VIO_DEVICE) {
vio.readConfig(pkt, offset - OFF_VIO_DEVICE);
return 0;
}
pkt->makeResponse();
switch(offset) {
case OFF_DEVICE_FEATURES:
DPRINTF(VIOPci, " DEVICE_FEATURES request\n");
assert(size == sizeof(uint32_t));
pkt->set<uint32_t>(vio.deviceFeatures);
break;
case OFF_GUEST_FEATURES:
DPRINTF(VIOPci, " GUEST_FEATURES request\n");
assert(size == sizeof(uint32_t));
pkt->set<uint32_t>(vio.getGuestFeatures());
break;
case OFF_QUEUE_ADDRESS:
DPRINTF(VIOPci, " QUEUE_ADDRESS request\n");
assert(size == sizeof(uint32_t));
pkt->set<uint32_t>(vio.getQueueAddress());
break;
case OFF_QUEUE_SIZE:
DPRINTF(VIOPci, " QUEUE_SIZE request\n");
assert(size == sizeof(uint16_t));
pkt->set<uint16_t>(vio.getQueueSize());
break;
case OFF_QUEUE_SELECT:
DPRINTF(VIOPci, " QUEUE_SELECT\n");
assert(size == sizeof(uint16_t));
pkt->set<uint16_t>(vio.getQueueSelect());
break;
case OFF_QUEUE_NOTIFY:
DPRINTF(VIOPci, " QUEUE_NOTIFY request\n");
assert(size == sizeof(uint16_t));
pkt->set<uint16_t>(queueNotify);
break;
case OFF_DEVICE_STATUS:
DPRINTF(VIOPci, " DEVICE_STATUS request\n");
assert(size == sizeof(uint8_t));
pkt->set<uint8_t>(vio.getDeviceStatus());
break;
case OFF_ISR_STATUS: {
DPRINTF(VIOPci, " ISR_STATUS\n");
assert(size == sizeof(uint8_t));
uint8_t isr_status(interruptDeliveryPending ? 1 : 0);
interruptDeliveryPending = false;
pkt->set<uint8_t>(isr_status);
} break;
default:
panic("Unhandled read offset (0x%x)\n", offset);
}
return 0;
}
Tick
PciVirtIO::write(PacketPtr pkt)
{
const unsigned M5_VAR_USED size(pkt->getSize());
int bar;
Addr offset;
if (!getBAR(pkt->getAddr(), bar, offset))
panic("Invalid PCI memory access to unmapped memory.\n");
assert(bar == 0);
DPRINTF(VIOPci, "Writing offset 0x%x [len: %i]\n", offset, size);
// Forward device configuration writes to the device VirtIO model
if (offset >= OFF_VIO_DEVICE) {
vio.writeConfig(pkt, offset - OFF_VIO_DEVICE);
return 0;
}
pkt->makeResponse();
switch(offset) {
case OFF_DEVICE_FEATURES:
warn("Guest tried to write device features.");
break;
case OFF_GUEST_FEATURES:
DPRINTF(VIOPci, " WRITE GUEST_FEATURES request\n");
assert(size == sizeof(uint32_t));
vio.setGuestFeatures(pkt->get<uint32_t>());
break;
case OFF_QUEUE_ADDRESS:
DPRINTF(VIOPci, " WRITE QUEUE_ADDRESS\n");
assert(size == sizeof(uint32_t));
vio.setQueueAddress(pkt->get<uint32_t>());
break;
case OFF_QUEUE_SIZE:
panic("Guest tried to write queue size.");
break;
case OFF_QUEUE_SELECT:
DPRINTF(VIOPci, " WRITE QUEUE_SELECT\n");
assert(size == sizeof(uint16_t));
vio.setQueueSelect(pkt->get<uint16_t>());
break;
case OFF_QUEUE_NOTIFY:
DPRINTF(VIOPci, " WRITE QUEUE_NOTIFY\n");
assert(size == sizeof(uint16_t));
queueNotify = pkt->get<uint16_t>();
vio.onNotify(queueNotify);
break;
case OFF_DEVICE_STATUS: {
assert(size == sizeof(uint8_t));
uint8_t status(pkt->get<uint8_t>());
DPRINTF(VIOPci, "VirtIO set status: 0x%x\n", status);
vio.setDeviceStatus(status);
} break;
case OFF_ISR_STATUS:
warn("Guest tried to write ISR status.");
break;
default:
panic("Unhandled read offset (0x%x)\n", offset);
}
return 0;
}
void
PciVirtIO::kick()
{
DPRINTF(VIOPci, "kick(): Sending interrupt...\n");
interruptDeliveryPending = true;<|fim▁hole|>
PciVirtIO *
PciVirtIOParams::create()
{
return new PciVirtIO(this);
}<|fim▁end|> | intrPost();
} |
<|file_name|>values.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Helper types and traits for the handling of CSS values.
use app_units::Au;
use cssparser::{ParseError, Parser, Token, UnicodeRange, serialize_string};
use cssparser::ToCss as CssparserToCss;
use servo_arc::Arc;
use std::fmt::{self, Write};
/// Serialises a value according to its CSS representation.
///
/// This trait is implemented for `str` and its friends, serialising the string
/// contents as a CSS quoted string.
///
/// This trait is derivable with `#[derive(ToCss)]`, with the following behaviour:
/// * unit variants get serialised as the `snake-case` representation
/// of their name;
/// * unit variants whose name starts with "Moz" or "Webkit" are prepended
/// with a "-";
/// * if `#[css(comma)]` is found on a variant, its fields are separated by
/// commas, otherwise, by spaces;
/// * if `#[css(function)]` is found on a variant, the variant name gets
/// serialised like unit variants and its fields are surrounded by parentheses;
/// * if `#[css(iterable)]` is found on a function variant, that variant needs
/// to have a single member, and that member needs to be iterable. The
/// iterable will be serialized as the arguments for the function;
/// * an iterable field can also be annotated with `#[css(if_empty = "foo")]`
/// to print `"foo"` if the iterator is empty;
/// * if `#[css(dimension)]` is found on a variant, that variant needs
/// to have a single member. The variant would be serialized as a CSS
/// dimension token, like: <member><identifier>;
/// * if `#[css(skip)]` is found on a field, the `ToCss` call for that field
/// is skipped;
/// * if `#[css(skip_if = "function")]` is found on a field, the `ToCss` call
/// for that field is skipped if `function` returns true. This function is
/// provided the field as an argument;
/// * finally, one can put `#[css(derive_debug)]` on the whole type, to
/// implement `Debug` by a single call to `ToCss::to_css`.
pub trait ToCss {
/// Serialize `self` in CSS syntax, writing to `dest`.
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write;
/// Serialize `self` in CSS syntax and return a string.
///
/// (This is a convenience wrapper for `to_css` and probably should not be overridden.)
#[inline]
fn to_css_string(&self) -> String {
let mut s = String::new();
self.to_css(&mut CssWriter::new(&mut s)).unwrap();
s
}
}
impl<'a, T> ToCss for &'a T where T: ToCss + ?Sized {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write {
(*self).to_css(dest)
}
}
impl ToCss for str {
#[inline]
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write {
serialize_string(self, dest)
}
}
impl ToCss for String {
#[inline]
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write {
serialize_string(self, dest)
}
}
impl<T> ToCss for Option<T>
where
T: ToCss,
{
#[inline]
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write {
self.as_ref().map_or(Ok(()), |value| value.to_css(dest))
}
}
/// A writer tailored for serialising CSS.
///
/// Coupled with SequenceWriter, this allows callers to transparently handle
/// things like comma-separated values etc.
pub struct CssWriter<'w, W: 'w> {
inner: &'w mut W,
prefix: Option<&'static str>,
}
impl<'w, W> CssWriter<'w, W>
where
W: Write,
{
/// Creates a new `CssWriter`.
#[inline]
pub fn new(inner: &'w mut W) -> Self {
Self { inner, prefix: Some("") }
}
}
impl<'w, W> Write for CssWriter<'w, W>
where
W: Write,
{
#[inline]
fn write_str(&mut self, s: &str) -> fmt::Result {
if s.is_empty() {
return Ok(());
}
if let Some(prefix) = self.prefix.take() {
// We are going to write things, but first we need to write
// the prefix that was set by `SequenceWriter::item`.
if !prefix.is_empty() {
self.inner.write_str(prefix)?;
}
}
self.inner.write_str(s)
}
#[inline]
fn write_char(&mut self, c: char) -> fmt::Result {
if let Some(prefix) = self.prefix.take() {
// See comment in `write_str`.
if !prefix.is_empty() {
self.inner.write_str(prefix)?;
}
}
self.inner.write_char(c)
}
}
#[macro_export]
macro_rules! serialize_function {
($dest: expr, $name: ident($( $arg: expr, )+)) => {
serialize_function!($dest, $name($($arg),+))
};
($dest: expr, $name: ident($first_arg: expr $( , $arg: expr )*)) => {
{
$dest.write_str(concat!(stringify!($name), "("))?;
$first_arg.to_css($dest)?;
$(
$dest.write_str(", ")?;
$arg.to_css($dest)?;
)*
$dest.write_char(')')
}
}
}
/// Convenience wrapper to serialise CSS values separated by a given string.
pub struct SequenceWriter<'a, 'b: 'a, W: 'b> {
inner: &'a mut CssWriter<'b, W>,
separator: &'static str,
}
impl<'a, 'b, W> SequenceWriter<'a, 'b, W>
where
W: Write + 'b,
{
/// Create a new sequence writer.
#[inline]
pub fn new(inner: &'a mut CssWriter<'b, W>, separator: &'static str) -> Self {
if inner.prefix.is_none() {
// See comment in `item`.
inner.prefix = Some("");
}
Self { inner, separator }
}
#[inline]
fn write_item<F>(&mut self, f: F) -> fmt::Result
where
F: FnOnce(&mut CssWriter<'b, W>) -> fmt::Result
{
let old_prefix = self.inner.prefix;
if old_prefix.is_none() {
// If there is no prefix in the inner writer, a previous
// call to this method produced output, which means we need<|fim▁hole|> }
f(self.inner)?;
match (old_prefix, self.inner.prefix) {
(_, None) => {
// This call produced output and cleaned up after itself.
}
(None, Some(p)) => {
// Some previous call to `item` produced output,
// but this one did not, prefix should be the same as
// the one we set.
debug_assert_eq!(self.separator, p);
// We clean up here even though it's not necessary just
// to be able to do all these assertion checks.
self.inner.prefix = None;
}
(Some(old), Some(new)) => {
// No previous call to `item` produced output, and this one
// either.
debug_assert_eq!(old, new);
}
}
Ok(())
}
/// Serialises a CSS value, writing any separator as necessary.
///
/// The separator is never written before any `item` produces any output,
/// and is written in subsequent calls only if the `item` produces some
/// output on its own again. This lets us handle `Option<T>` fields by
/// just not printing anything on `None`.
#[inline]
pub fn item<T>(&mut self, item: &T) -> fmt::Result
where
T: ToCss,
{
self.write_item(|inner| item.to_css(inner))
}
/// Writes a string as-is (i.e. not escaped or wrapped in quotes)
/// with any separator as necessary.
///
/// See SequenceWriter::item.
#[inline]
pub fn raw_item(&mut self, item: &str) -> fmt::Result {
self.write_item(|inner| inner.write_str(item))
}
}
/// A wrapper type that implements `ToCss` by printing its inner field.
pub struct Verbatim<'a, T>(pub &'a T)
where
T: ?Sized + 'a;
impl<'a, T> ToCss for Verbatim<'a, T>
where
T: AsRef<str> + ?Sized + 'a,
{
#[inline]
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write {
dest.write_str(self.0.as_ref())
}
}
/// Type used as the associated type in the `OneOrMoreSeparated` trait on a
/// type to indicate that a serialized list of elements of this type is
/// separated by commas.
pub struct Comma;
/// Type used as the associated type in the `OneOrMoreSeparated` trait on a
/// type to indicate that a serialized list of elements of this type is
/// separated by spaces.
pub struct Space;
/// Type used as the associated type in the `OneOrMoreSeparated` trait on a
/// type to indicate that a serialized list of elements of this type is
/// separated by commas, but spaces without commas are also allowed when
/// parsing.
pub struct CommaWithSpace;
/// A trait satisfied by the types corresponding to separators.
pub trait Separator {
/// The separator string that the satisfying separator type corresponds to.
fn separator() -> &'static str;
/// Parses a sequence of values separated by this separator.
///
/// The given closure is called repeatedly for each item in the sequence.
///
/// Successful results are accumulated in a vector.
///
/// This method returns `Err(_)` the first time a closure does or if
/// the separators aren't correct.
fn parse<'i, 't, F, T, E>(
parser: &mut Parser<'i, 't>,
parse_one: F,
) -> Result<Vec<T>, ParseError<'i, E>>
where
F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>;
}
impl Separator for Comma {
fn separator() -> &'static str {
", "
}
fn parse<'i, 't, F, T, E>(
input: &mut Parser<'i, 't>,
parse_one: F,
) -> Result<Vec<T>, ParseError<'i, E>>
where
F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>
{
input.parse_comma_separated(parse_one)
}
}
impl Separator for Space {
fn separator() -> &'static str {
" "
}
fn parse<'i, 't, F, T, E>(
input: &mut Parser<'i, 't>,
mut parse_one: F,
) -> Result<Vec<T>, ParseError<'i, E>>
where
F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>
{
input.skip_whitespace(); // Unnecessary for correctness, but may help try() rewind less.
let mut results = vec![parse_one(input)?];
loop {
input.skip_whitespace(); // Unnecessary for correctness, but may help try() rewind less.
if let Ok(item) = input.try(&mut parse_one) {
results.push(item);
} else {
return Ok(results)
}
}
}
}
impl Separator for CommaWithSpace {
fn separator() -> &'static str {
", "
}
fn parse<'i, 't, F, T, E>(
input: &mut Parser<'i, 't>,
mut parse_one: F,
) -> Result<Vec<T>, ParseError<'i, E>>
where
F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result<T, ParseError<'i, E>>
{
input.skip_whitespace(); // Unnecessary for correctness, but may help try() rewind less.
let mut results = vec![parse_one(input)?];
loop {
input.skip_whitespace(); // Unnecessary for correctness, but may help try() rewind less.
let comma_location = input.current_source_location();
let comma = input.try(|i| i.expect_comma()).is_ok();
input.skip_whitespace(); // Unnecessary for correctness, but may help try() rewind less.
if let Ok(item) = input.try(&mut parse_one) {
results.push(item);
} else if comma {
return Err(comma_location.new_unexpected_token_error(Token::Comma));
} else {
break;
}
}
Ok(results)
}
}
/// Marker trait on T to automatically implement ToCss for Vec<T> when T's are
/// separated by some delimiter `delim`.
pub trait OneOrMoreSeparated {
/// Associated type indicating which separator is used.
type S: Separator;
}
impl OneOrMoreSeparated for UnicodeRange {
type S = Comma;
}
impl<T> ToCss for Vec<T> where T: ToCss + OneOrMoreSeparated {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write {
let mut iter = self.iter();
iter.next().unwrap().to_css(dest)?;
for item in iter {
dest.write_str(<T as OneOrMoreSeparated>::S::separator())?;
item.to_css(dest)?;
}
Ok(())
}
}
impl<T> ToCss for Box<T> where T: ?Sized + ToCss {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where W: Write,
{
(**self).to_css(dest)
}
}
impl<T> ToCss for Arc<T> where T: ?Sized + ToCss {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where W: Write,
{
(**self).to_css(dest)
}
}
impl ToCss for Au {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write {
self.to_f64_px().to_css(dest)?;
dest.write_str("px")
}
}
macro_rules! impl_to_css_for_predefined_type {
($name: ty) => {
impl<'a> ToCss for $name {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: Write {
::cssparser::ToCss::to_css(self, dest)
}
}
};
}
impl_to_css_for_predefined_type!(f32);
impl_to_css_for_predefined_type!(i32);
impl_to_css_for_predefined_type!(u16);
impl_to_css_for_predefined_type!(u32);
impl_to_css_for_predefined_type!(::cssparser::Token<'a>);
impl_to_css_for_predefined_type!(::cssparser::RGBA);
impl_to_css_for_predefined_type!(::cssparser::Color);
impl_to_css_for_predefined_type!(::cssparser::UnicodeRange);
#[macro_export]
macro_rules! define_css_keyword_enum {
(pub enum $name:ident { $($variant:ident = $css:expr,)+ }) => {
#[allow(missing_docs)]
#[cfg_attr(feature = "servo", derive(Deserialize, Serialize))]
#[derive(Clone, Copy, Debug, Eq, Hash, MallocSizeOf, PartialEq)]
pub enum $name {
$($variant),+
}
impl $name {
/// Parse this property from a CSS input stream.
pub fn parse<'i, 't>(input: &mut ::cssparser::Parser<'i, 't>)
-> Result<$name, $crate::ParseError<'i>> {
use cssparser::Token;
let location = input.current_source_location();
match *input.next()? {
Token::Ident(ref ident) => {
Self::from_ident(ident).map_err(|()| {
location.new_unexpected_token_error(
Token::Ident(ident.clone()),
)
})
}
ref token => {
Err(location.new_unexpected_token_error(token.clone()))
}
}
}
/// Parse this property from an already-tokenized identifier.
pub fn from_ident(ident: &str) -> Result<$name, ()> {
match_ignore_ascii_case! { ident,
$($css => Ok($name::$variant),)+
_ => Err(())
}
}
}
impl $crate::ToCss for $name {
fn to_css<W>(
&self,
dest: &mut $crate::CssWriter<W>,
) -> ::std::fmt::Result
where
W: ::std::fmt::Write,
{
match *self {
$( $name::$variant => ::std::fmt::Write::write_str(dest, $css) ),+
}
}
}
};
}
/// Helper types for the handling of specified values.
pub mod specified {
use ParsingMode;
/// Whether to allow negative lengths or not.
#[repr(u8)]
#[derive(Clone, Copy, Debug, Eq, MallocSizeOf, PartialEq, PartialOrd)]
pub enum AllowedNumericType {
/// Allow all kind of numeric values.
All,
/// Allow only non-negative numeric values.
NonNegative,
/// Allow only numeric values greater or equal to 1.0.
AtLeastOne,
}
impl Default for AllowedNumericType {
#[inline]
fn default() -> Self {
AllowedNumericType::All
}
}
impl AllowedNumericType {
/// Whether the value fits the rules of this numeric type.
#[inline]
pub fn is_ok(&self, parsing_mode: ParsingMode, val: f32) -> bool {
if parsing_mode.allows_all_numeric_values() {
return true;
}
match *self {
AllowedNumericType::All => true,
AllowedNumericType::NonNegative => val >= 0.0,
AllowedNumericType::AtLeastOne => val >= 1.0,
}
}
/// Clamp the value following the rules of this numeric type.
#[inline]
pub fn clamp(&self, val: f32) -> f32 {
match *self {
AllowedNumericType::NonNegative if val < 0. => 0.,
AllowedNumericType::AtLeastOne if val < 1. => 1.,
_ => val,
}
}
}
}<|fim▁end|> | // to write the separator next time we produce output again.
self.inner.prefix = Some(self.separator); |
<|file_name|>redis.go<|end_file_name|><|fim▁begin|>package redis
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/autorest/validation"
"net/http"
)
// Client is the REST API for Azure Redis Cache Service.
type Client struct {
BaseClient
}
// NewClient creates an instance of the Client client.
func NewClient(subscriptionID string) Client {
return NewClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewClientWithBaseURI creates an instance of the Client client.
func NewClientWithBaseURI(baseURI string, subscriptionID string) Client {
return Client{NewWithBaseURI(baseURI, subscriptionID)}
}
// CheckNameAvailability checks that the redis cache name is valid and is not already in use.
//
// parameters is parameters supplied to the CheckNameAvailability Redis operation.
func (client Client) CheckNameAvailability(ctx context.Context, parameters CheckNameAvailabilityParameters) (result autorest.Response, err error) {
req, err := client.CheckNameAvailabilityPreparer(ctx, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "CheckNameAvailability", nil, "Failure preparing request")
return
}
resp, err := client.CheckNameAvailabilitySender(req)
if err != nil {
result.Response = resp
err = autorest.NewErrorWithError(err, "redis.Client", "CheckNameAvailability", resp, "Failure sending request")
return
}
result, err = client.CheckNameAvailabilityResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "CheckNameAvailability", resp, "Failure responding to request")
}
return
}
// CheckNameAvailabilityPreparer prepares the CheckNameAvailability request.
func (client Client) CheckNameAvailabilityPreparer(ctx context.Context, parameters CheckNameAvailabilityParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Cache/CheckNameAvailability", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CheckNameAvailabilitySender sends the CheckNameAvailability request. The method will close the
// http.Response Body if it receives an error.
func (client Client) CheckNameAvailabilitySender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// CheckNameAvailabilityResponder handles the response to the CheckNameAvailability request. The method always
// closes the http.Response Body.
func (client Client) CheckNameAvailabilityResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByClosing())
result.Response = resp
return
}
// Create create or replace (overwrite/recreate, with potential downtime) an existing Redis cache.
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache. parameters is
// parameters supplied to the Create Redis operation.
func (client Client) Create(ctx context.Context, resourceGroupName string, name string, parameters CreateParameters) (result CreateFuture, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.CreateProperties", Name: validation.Null, Rule: true,
Chain: []validation.Constraint{{Target: "parameters.CreateProperties.Sku", Name: validation.Null, Rule: true,
Chain: []validation.Constraint{{Target: "parameters.CreateProperties.Sku.Capacity", Name: validation.Null, Rule: true, Chain: nil}}},
{Target: "parameters.CreateProperties.SubnetID", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.CreateProperties.SubnetID", Name: validation.Pattern, Rule: `^/subscriptions/[^/]*/resourceGroups/[^/]*/providers/Microsoft.(ClassicNetwork|Network)/virtualNetworks/[^/]*/subnets/[^/]*$`, Chain: nil}}},
{Target: "parameters.CreateProperties.StaticIP", Name: validation.Null, Rule: false,
Chain: []validation.Constraint{{Target: "parameters.CreateProperties.StaticIP", Name: validation.Pattern, Rule: `^\d+\.\d+\.\d+\.\d+$`, Chain: nil}}},
}},
{Target: "parameters.Location", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil {
return result, validation.NewError("redis.Client", "Create", err.Error())
}
req, err := client.CreatePreparer(ctx, resourceGroupName, name, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "Create", nil, "Failure preparing request")
return
}
result, err = client.CreateSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "Create", result.Response(), "Failure sending request")
return
}
return
}
// CreatePreparer prepares the Create request.
func (client Client) CreatePreparer(ctx context.Context, resourceGroupName string, name string, parameters CreateParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPut(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// CreateSender sends the Create request. The method will close the
// http.Response Body if it receives an error.
func (client Client) CreateSender(req *http.Request) (future CreateFuture, err error) {
sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client))
future.Future = azure.NewFuture(req)
future.req = req
_, err = future.Done(sender)
if err != nil {
return
}
err = autorest.Respond(future.Response(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated))
return
}
// CreateResponder handles the response to the Create request. The method always
// closes the http.Response Body.
func (client Client) CreateResponder(resp *http.Response) (result ResourceType, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusCreated),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Delete deletes a Redis cache.
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache.
func (client Client) Delete(ctx context.Context, resourceGroupName string, name string) (result DeleteFuture, err error) {
req, err := client.DeletePreparer(ctx, resourceGroupName, name)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "Delete", nil, "Failure preparing request")
return
}
result, err = client.DeleteSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "Delete", result.Response(), "Failure sending request")
return
}
return
}
// DeletePreparer prepares the Delete request.
func (client Client) DeletePreparer(ctx context.Context, resourceGroupName string, name string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsDelete(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// DeleteSender sends the Delete request. The method will close the
// http.Response Body if it receives an error.
func (client Client) DeleteSender(req *http.Request) (future DeleteFuture, err error) {
sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client))
future.Future = azure.NewFuture(req)
future.req = req
_, err = future.Done(sender)
if err != nil {
return
}
err = autorest.Respond(future.Response(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent))
return
}
// DeleteResponder handles the response to the Delete request. The method always
// closes the http.Response Body.
func (client Client) DeleteResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// ExportData export data from the redis cache to blobs in a container.
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache. parameters is
// parameters for Redis export operation.
func (client Client) ExportData(ctx context.Context, resourceGroupName string, name string, parameters ExportRDBParameters) (result ExportDataFuture, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.Prefix", Name: validation.Null, Rule: true, Chain: nil},
{Target: "parameters.Container", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil {
return result, validation.NewError("redis.Client", "ExportData", err.Error())
}
req, err := client.ExportDataPreparer(ctx, resourceGroupName, name, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ExportData", nil, "Failure preparing request")
return
}
result, err = client.ExportDataSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ExportData", result.Response(), "Failure sending request")
return
}
return
}
// ExportDataPreparer prepares the ExportData request.
func (client Client) ExportDataPreparer(ctx context.Context, resourceGroupName string, name string, parameters ExportRDBParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}/export", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ExportDataSender sends the ExportData request. The method will close the
// http.Response Body if it receives an error.
func (client Client) ExportDataSender(req *http.Request) (future ExportDataFuture, err error) {
sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client))
future.Future = azure.NewFuture(req)
future.req = req
_, err = future.Done(sender)
if err != nil {
return
}
err = autorest.Respond(future.Response(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent))
return
}
// ExportDataResponder handles the response to the ExportData request. The method always
// closes the http.Response Body.
func (client Client) ExportDataResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// ForceReboot reboot specified Redis node(s). This operation requires write permission to the cache resource. There
// can be potential data loss.
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache. parameters is
// specifies which Redis node(s) to reboot.
func (client Client) ForceReboot(ctx context.Context, resourceGroupName string, name string, parameters RebootParameters) (result ForceRebootResponse, err error) {
req, err := client.ForceRebootPreparer(ctx, resourceGroupName, name, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ForceReboot", nil, "Failure preparing request")
return
}
resp, err := client.ForceRebootSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "redis.Client", "ForceReboot", resp, "Failure sending request")
return
}
result, err = client.ForceRebootResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ForceReboot", resp, "Failure responding to request")
}
return
}
// ForceRebootPreparer prepares the ForceReboot request.
func (client Client) ForceRebootPreparer(ctx context.Context, resourceGroupName string, name string, parameters RebootParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}/forceReboot", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ForceRebootSender sends the ForceReboot request. The method will close the
// http.Response Body if it receives an error.
func (client Client) ForceRebootSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ForceRebootResponder handles the response to the ForceReboot request. The method always
// closes the http.Response Body.
func (client Client) ForceRebootResponder(resp *http.Response) (result ForceRebootResponse, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Get gets a Redis cache (resource description).
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache.
func (client Client) Get(ctx context.Context, resourceGroupName string, name string) (result ResourceType, err error) {
req, err := client.GetPreparer(ctx, resourceGroupName, name)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "redis.Client", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "Get", resp, "Failure responding to request")
}
return
}
// GetPreparer prepares the Get request.
func (client Client) GetPreparer(ctx context.Context, resourceGroupName string, name string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client Client) GetSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client Client) GetResponder(resp *http.Response) (result ResourceType, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ImportData import data into Redis cache.
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache. parameters is
// parameters for Redis import operation.
func (client Client) ImportData(ctx context.Context, resourceGroupName string, name string, parameters ImportRDBParameters) (result ImportDataFuture, err error) {
if err := validation.Validate([]validation.Validation{
{TargetValue: parameters,
Constraints: []validation.Constraint{{Target: "parameters.Files", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil {
return result, validation.NewError("redis.Client", "ImportData", err.Error())
}
req, err := client.ImportDataPreparer(ctx, resourceGroupName, name, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ImportData", nil, "Failure preparing request")
return
}
result, err = client.ImportDataSender(req)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ImportData", result.Response(), "Failure sending request")
return
}
return
}
// ImportDataPreparer prepares the ImportData request.
func (client Client) ImportDataPreparer(ctx context.Context, resourceGroupName string, name string, parameters ImportRDBParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}/import", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ImportDataSender sends the ImportData request. The method will close the
// http.Response Body if it receives an error.
func (client Client) ImportDataSender(req *http.Request) (future ImportDataFuture, err error) {
sender := autorest.DecorateSender(client, azure.DoRetryWithRegistration(client.Client))
future.Future = azure.NewFuture(req)
future.req = req
_, err = future.Done(sender)
if err != nil {
return
}
err = autorest.Respond(future.Response(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent))
return
}
// ImportDataResponder handles the response to the ImportData request. The method always
// closes the http.Response Body.
func (client Client) ImportDataResponder(resp *http.Response) (result autorest.Response, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted, http.StatusNoContent),
autorest.ByClosing())
result.Response = resp
return
}
// List gets all Redis caches in the specified subscription.
func (client Client) List(ctx context.Context) (result ListResultPage, err error) {
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "List", nil, "Failure preparing request")
return
}<|fim▁hole|> result.lr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "redis.Client", "List", resp, "Failure sending request")
return
}
result.lr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client Client) ListPreparer(ctx context.Context) (*http.Request, error) {
pathParameters := map[string]interface{}{
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/providers/Microsoft.Cache/Redis", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client Client) ListSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.
func (client Client) ListResponder(resp *http.Response) (result ListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client Client) listNextResults(lastResults ListResult) (result ListResult, err error) {
req, err := lastResults.listResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "redis.Client", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "redis.Client", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client Client) ListComplete(ctx context.Context) (result ListResultIterator, err error) {
result.page, err = client.List(ctx)
return
}
// ListByResourceGroup lists all Redis caches in a resource group.
//
// resourceGroupName is the name of the resource group.
func (client Client) ListByResourceGroup(ctx context.Context, resourceGroupName string) (result ListResultPage, err error) {
result.fn = client.listByResourceGroupNextResults
req, err := client.ListByResourceGroupPreparer(ctx, resourceGroupName)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ListByResourceGroup", nil, "Failure preparing request")
return
}
resp, err := client.ListByResourceGroupSender(req)
if err != nil {
result.lr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "redis.Client", "ListByResourceGroup", resp, "Failure sending request")
return
}
result.lr, err = client.ListByResourceGroupResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ListByResourceGroup", resp, "Failure responding to request")
}
return
}
// ListByResourceGroupPreparer prepares the ListByResourceGroup request.
func (client Client) ListByResourceGroupPreparer(ctx context.Context, resourceGroupName string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListByResourceGroupSender sends the ListByResourceGroup request. The method will close the
// http.Response Body if it receives an error.
func (client Client) ListByResourceGroupSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListByResourceGroupResponder handles the response to the ListByResourceGroup request. The method always
// closes the http.Response Body.
func (client Client) ListByResourceGroupResponder(resp *http.Response) (result ListResult, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listByResourceGroupNextResults retrieves the next set of results, if any.
func (client Client) listByResourceGroupNextResults(lastResults ListResult) (result ListResult, err error) {
req, err := lastResults.listResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "redis.Client", "listByResourceGroupNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListByResourceGroupSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "redis.Client", "listByResourceGroupNextResults", resp, "Failure sending next results request")
}
result, err = client.ListByResourceGroupResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "listByResourceGroupNextResults", resp, "Failure responding to next results request")
}
return
}
// ListByResourceGroupComplete enumerates all values, automatically crossing page boundaries as required.
func (client Client) ListByResourceGroupComplete(ctx context.Context, resourceGroupName string) (result ListResultIterator, err error) {
result.page, err = client.ListByResourceGroup(ctx, resourceGroupName)
return
}
// ListKeys retrieve a Redis cache's access keys. This operation requires write permission to the cache resource.
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache.
func (client Client) ListKeys(ctx context.Context, resourceGroupName string, name string) (result AccessKeys, err error) {
req, err := client.ListKeysPreparer(ctx, resourceGroupName, name)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ListKeys", nil, "Failure preparing request")
return
}
resp, err := client.ListKeysSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "redis.Client", "ListKeys", resp, "Failure sending request")
return
}
result, err = client.ListKeysResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ListKeys", resp, "Failure responding to request")
}
return
}
// ListKeysPreparer prepares the ListKeys request.
func (client Client) ListKeysPreparer(ctx context.Context, resourceGroupName string, name string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}/listKeys", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListKeysSender sends the ListKeys request. The method will close the
// http.Response Body if it receives an error.
func (client Client) ListKeysSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListKeysResponder handles the response to the ListKeys request. The method always
// closes the http.Response Body.
func (client Client) ListKeysResponder(resp *http.Response) (result AccessKeys, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// ListUpgradeNotifications gets any upgrade notifications for a Redis cache.
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache. history is how many
// minutes in past to look for upgrade notifications
func (client Client) ListUpgradeNotifications(ctx context.Context, resourceGroupName string, name string, history float64) (result NotificationListResponse, err error) {
req, err := client.ListUpgradeNotificationsPreparer(ctx, resourceGroupName, name, history)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ListUpgradeNotifications", nil, "Failure preparing request")
return
}
resp, err := client.ListUpgradeNotificationsSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "redis.Client", "ListUpgradeNotifications", resp, "Failure sending request")
return
}
result, err = client.ListUpgradeNotificationsResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "ListUpgradeNotifications", resp, "Failure responding to request")
}
return
}
// ListUpgradeNotificationsPreparer prepares the ListUpgradeNotifications request.
func (client Client) ListUpgradeNotificationsPreparer(ctx context.Context, resourceGroupName string, name string, history float64) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
"history": autorest.Encode("query", history),
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}/listUpgradeNotifications", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListUpgradeNotificationsSender sends the ListUpgradeNotifications request. The method will close the
// http.Response Body if it receives an error.
func (client Client) ListUpgradeNotificationsSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// ListUpgradeNotificationsResponder handles the response to the ListUpgradeNotifications request. The method always
// closes the http.Response Body.
func (client Client) ListUpgradeNotificationsResponder(resp *http.Response) (result NotificationListResponse, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// RegenerateKey regenerate Redis cache's access keys. This operation requires write permission to the cache resource.
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache. parameters is
// specifies which key to regenerate.
func (client Client) RegenerateKey(ctx context.Context, resourceGroupName string, name string, parameters RegenerateKeyParameters) (result AccessKeys, err error) {
req, err := client.RegenerateKeyPreparer(ctx, resourceGroupName, name, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "RegenerateKey", nil, "Failure preparing request")
return
}
resp, err := client.RegenerateKeySender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "redis.Client", "RegenerateKey", resp, "Failure sending request")
return
}
result, err = client.RegenerateKeyResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "RegenerateKey", resp, "Failure responding to request")
}
return
}
// RegenerateKeyPreparer prepares the RegenerateKey request.
func (client Client) RegenerateKeyPreparer(ctx context.Context, resourceGroupName string, name string, parameters RegenerateKeyParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPost(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}/regenerateKey", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// RegenerateKeySender sends the RegenerateKey request. The method will close the
// http.Response Body if it receives an error.
func (client Client) RegenerateKeySender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// RegenerateKeyResponder handles the response to the RegenerateKey request. The method always
// closes the http.Response Body.
func (client Client) RegenerateKeyResponder(resp *http.Response) (result AccessKeys, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// Update update an existing Redis cache.
//
// resourceGroupName is the name of the resource group. name is the name of the Redis cache. parameters is
// parameters supplied to the Update Redis operation.
func (client Client) Update(ctx context.Context, resourceGroupName string, name string, parameters UpdateParameters) (result ResourceType, err error) {
req, err := client.UpdatePreparer(ctx, resourceGroupName, name, parameters)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "Update", nil, "Failure preparing request")
return
}
resp, err := client.UpdateSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "redis.Client", "Update", resp, "Failure sending request")
return
}
result, err = client.UpdateResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "redis.Client", "Update", resp, "Failure responding to request")
}
return
}
// UpdatePreparer prepares the Update request.
func (client Client) UpdatePreparer(ctx context.Context, resourceGroupName string, name string, parameters UpdateParameters) (*http.Request, error) {
pathParameters := map[string]interface{}{
"name": autorest.Encode("path", name),
"resourceGroupName": autorest.Encode("path", resourceGroupName),
"subscriptionId": autorest.Encode("path", client.SubscriptionID),
}
const APIVersion = "2017-10-01"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsContentType("application/json; charset=utf-8"),
autorest.AsPatch(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Cache/Redis/{name}", pathParameters),
autorest.WithJSON(parameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// UpdateSender sends the Update request. The method will close the
// http.Response Body if it receives an error.
func (client Client) UpdateSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
azure.DoRetryWithRegistration(client.Client))
}
// UpdateResponder handles the response to the Update request. The method always
// closes the http.Response Body.
func (client Client) UpdateResponder(resp *http.Response) (result ResourceType, err error) {
err = autorest.Respond(
resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}<|fim▁end|> |
resp, err := client.ListSender(req)
if err != nil { |
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>use services::anoncreds::types::{PublicKey, RevocationPublicKey};
use utils::json::{JsonEncodable, JsonDecodable};
use services::ledger::constants::{
NODE,
NYM,
ATTRIB,
SCHEMA,
GET_ATTR,
GET_DDO,
GET_NYM,
GET_SCHEMA,
CLAIM_DEF,
GET_CLAIM_DEF,
STEWARD,
TRUSTEE,
GET_TXN
};
#[derive(Serialize, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Request<T: JsonEncodable> {
pub req_id: u64,
pub identifier: String,
pub operation: T,
#[serde(skip_serializing_if = "Option::is_none")]
pub signature: Option<String>
}
impl<T: JsonEncodable> Request<T> {
pub fn new(req_id: u64, identifier: String, operation: T) -> Request<T> {
Request {
req_id: req_id,
identifier: identifier,
operation: operation,
signature: None
}
}
}
impl<T: JsonEncodable> JsonEncodable for Request<T> {}
#[derive(Deserialize, Serialize, PartialEq, Debug)]
pub enum Role {
STEWARD = STEWARD,
TRUSTEE = TRUSTEE
}
#[derive(Serialize, PartialEq, Debug)]
pub struct NymOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub verkey: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub alias: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub role: Option<String>
}
impl NymOperation {
pub fn new(dest: String, verkey: Option<String>,
alias: Option<String>, role: Option<String>) -> NymOperation {
NymOperation {
_type: NYM.to_string(),
dest: dest,
verkey: verkey,
alias: alias,
role: role
}
}
}
impl JsonEncodable for NymOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetNymOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String
}
impl GetNymOperation {
pub fn new(dest: String) -> GetNymOperation {
GetNymOperation {
_type: GET_NYM.to_string(),
dest: dest
}
}
}
impl JsonEncodable for GetNymOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct AttribOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub hash: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub raw: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub enc: Option<String>
}
impl AttribOperation {
pub fn new(dest: String, hash: Option<String>, raw: Option<String>,
enc: Option<String>) -> AttribOperation {
AttribOperation {
_type: ATTRIB.to_string(),
dest: dest,
hash: hash,
raw: raw,
enc: enc,
}
}
}
impl JsonEncodable for AttribOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetAttribOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
pub raw: String
}
impl GetAttribOperation {
pub fn new(dest: String, raw: String) -> GetAttribOperation {
GetAttribOperation {
_type: GET_ATTR.to_string(),
dest: dest,
raw: raw
}
}
}
impl JsonEncodable for GetAttribOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct SchemaOperation {
#[serde(rename = "type")]<|fim▁hole|>impl SchemaOperation {
pub fn new(data: String) -> SchemaOperation {
SchemaOperation {
data: data,
_type: SCHEMA.to_string()
}
}
}
impl JsonEncodable for SchemaOperation {}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub struct SchemaOperationData {
name: String,
version: String,
keys: Vec<String>
}
impl SchemaOperationData {
pub fn new(name: String, version: String, keys: Vec<String>) -> SchemaOperationData {
SchemaOperationData {
name: name,
version: version,
keys: keys
}
}
}
impl JsonEncodable for SchemaOperationData {}
impl<'a> JsonDecodable<'a> for SchemaOperationData {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetSchemaOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
pub data: GetSchemaOperationData
}
impl GetSchemaOperation {
pub fn new(dest: String, data: GetSchemaOperationData) -> GetSchemaOperation {
GetSchemaOperation {
_type: GET_SCHEMA.to_string(),
dest: dest,
data: data
}
}
}
impl JsonEncodable for GetSchemaOperation {}
#[derive(Deserialize, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct GetSchemaResultData {
pub attr_names: Vec<String>,
pub name: String,
pub origin: String,
pub seq_no: String,
#[serde(rename = "type")]
pub _type: Option<String>,
pub version: String
}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub struct GetSchemaOperationData {
pub name: String,
pub version: String
}
impl GetSchemaOperationData {
pub fn new(name: String, version: String) -> GetSchemaOperationData {
GetSchemaOperationData {
name: name,
version: version
}
}
}
impl JsonEncodable for GetSchemaOperationData {}
impl<'a> JsonDecodable<'a> for GetSchemaOperationData {}
#[derive(Serialize, PartialEq, Debug)]
pub struct ClaimDefOperation {
#[serde(rename = "ref")]
pub _ref: i32,
pub data: String,
#[serde(rename = "type")]
pub _type: String,
pub signature_type: String
}
impl ClaimDefOperation {
pub fn new(_ref: i32, signature_type: String, data: String) -> ClaimDefOperation {
ClaimDefOperation {
_ref: _ref,
signature_type: signature_type,
data: data,
_type: CLAIM_DEF.to_string()
}
}
}
impl JsonEncodable for ClaimDefOperation {}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub struct ClaimDefOperationData {
pub primary: PublicKey,
pub revocation: Option<RevocationPublicKey>
}
impl ClaimDefOperationData {
pub fn new(primary: PublicKey, revocation: Option<RevocationPublicKey>) -> ClaimDefOperationData {
ClaimDefOperationData {
primary: primary,
revocation: revocation
}
}
}
impl JsonEncodable for ClaimDefOperationData {}
impl<'a> JsonDecodable<'a> for ClaimDefOperationData {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetClaimDefOperation {
#[serde(rename = "type")]
pub _type: String,
#[serde(rename = "ref")]
pub _ref: i32,
pub signature_type: String,
pub origin: String
}
impl GetClaimDefOperation {
pub fn new(_ref: i32, signature_type: String, origin: String) -> GetClaimDefOperation {
GetClaimDefOperation {
_type: GET_CLAIM_DEF.to_string(),
_ref: _ref,
signature_type: signature_type,
origin: origin
}
}
}
impl JsonEncodable for GetClaimDefOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct NodeOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
pub data: NodeOperationData
}
impl NodeOperation {
pub fn new(dest: String, data: NodeOperationData) -> NodeOperation {
NodeOperation {
_type: NODE.to_string(),
dest: dest,
data: data
}
}
}
impl JsonEncodable for NodeOperation {}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub enum Services {
VALIDATOR,
OBSERVER
}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub struct NodeOperationData {
pub node_ip: String,
pub node_port: i32,
pub client_ip: String,
pub client_port: i32,
pub alias: String,
pub services: Vec<Services>
}
impl NodeOperationData {
pub fn new(node_ip: String, node_port: i32, client_ip: String, client_port: i32, alias: String, services: Vec<Services>) -> NodeOperationData {
NodeOperationData {
node_ip: node_ip,
node_port: node_port,
client_ip: client_ip,
client_port: client_port,
alias: alias,
services: services
}
}
}
impl JsonEncodable for NodeOperationData {}
impl<'a> JsonDecodable<'a> for NodeOperationData {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetDdoOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String
}
impl GetDdoOperation {
pub fn new(dest: String) -> GetDdoOperation {
GetDdoOperation {
_type: GET_DDO.to_string(),
dest: dest
}
}
}
impl JsonEncodable for GetDdoOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetTxnOperation {
#[serde(rename = "type")]
pub _type: String,
pub data: i32
}
impl GetTxnOperation {
pub fn new(data: i32) -> GetTxnOperation {
GetTxnOperation {
_type: GET_TXN.to_string(),
data: data
}
}
}
impl JsonEncodable for GetTxnOperation {}
#[derive(Deserialize, Eq, PartialEq, Debug)]
pub struct Reply<T> {
pub op: String,
pub result: T,
}
impl<'a, T: JsonDecodable<'a>> JsonDecodable<'a> for Reply<T> {}
#[derive(Deserialize, Eq, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct GetNymReplyResult {
pub identifier: String,
pub req_id: u64,
#[serde(rename = "type")]
pub _type: String,
pub data: String,
pub dest: String
}
impl<'a> JsonDecodable<'a> for GetNymReplyResult {}
#[derive(Deserialize, Eq, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct GetNymResultData {
pub identifier: Option<String>,
pub dest: String,
pub role: Option<String>,
pub verkey: Option<String>
}
impl<'a> JsonDecodable<'a> for GetNymResultData {}<|fim▁end|> | pub _type: String,
pub data: String
}
|
<|file_name|>writer.py<|end_file_name|><|fim▁begin|>import cbor
import hashlib
import logging
import zlib
from .common import * # NOQA
LOG = logging.getLogger(__name__)
default_hashalgo = 'sha256'
state_bos = 0
state_metadata = 1
state_data = 2
state_eos = 3
class MuxError(Exception):
pass
class InvalidState(MuxError):
pass
class StreamWriter(object):
'''Writes one part to a mux stream.
A mux stream is a series for cbor-encoded dictionaries. Each
chunk has a 'type' attribute that identifies the chunk type. A
part has the following format:
+----------------------------------------+
| beginning-of-stream | blktype_bos |
+----------------------------------------+
| metadata (optional) | blktype_metadata |
+----------------------------------------+
| data0 | blktype_data |
. .
. .
| dataN | blktype_data |
+----------------------------------------+
| end-of-stream | blktype_eos |
+----------------------------------------+
Multiple parts may be concatenated to form a stream.
'''
def __init__(self, fh,
name=None, hashalgo=None, writehash=False,
compress=False):
self.fh = fh
self.name = name
self.hashalgo = hashalgo if hashalgo else default_hashalgo
self.writehash = writehash
self.compress = compress
self.state = state_bos
self.metadata = {}
self.byteswritten = 0
if self.writehash:
self.ctx = self._get_hash_context()
def _write_header(self):
'''Writes out a header block. The header block contains
information about the stream:
- version: the mux format version
- name (optional): name of this stream
- hashalgo (optional): hash algorithm used for checksums
- compress (optional): true if data is compressed
'''
if self.state != state_bos:
raise InvalidState()
header = {'version': mux_version}
if self.name:
header['name'] = self.name
if self.writehash:
header['hashalgo'] = self.hashalgo
if self.compress:
header['compress'] = True
self._write_block(blktype_bos, **header)
self.state = state_metadata
def _write_metadata(self):
'''Writes out a metadata block. A metadata block can
contains arbitrary key/value pairs in the 'metadata' key.'''
if self.state != state_metadata:
raise InvalidState()
if self.metadata:<|fim▁hole|> self.state = state_data
def _write_block(self, blktype, **kwargs):
LOG.debug('writing block: type=%s, content=%s',
blktype, repr(kwargs))
cbor.dump(dict(blktype=blktype, **kwargs), self.fh)
def _get_hash_context(self):
return getattr(hashlib, self.hashalgo)()
def add_metadata(self, k, v):
self.metadata[k] = v
def write(self, data):
'''Write a data block to the mux stream.'''
# Write out the header if we haven't already.
if self.state == state_bos:
self._write_header()
# Write out the metadata if we haven't already.
if self.state == state_metadata:
self._write_metadata()
# Blow up if something is wrong.
if self.state != state_data:
raise InvalidState()
if self.compress:
data = zlib.compress(data)
if self.writehash:
self.ctx.update(data)
self.byteswritten += len(data)
self._write_block(blktype_data, data=data)
def write_iter(self, data):
'''Write data blocks to the mux stream from an iterator.'''
for chunk in data:
self.write(chunk)
def finish(self):
'''Close the stream by writing an end-of-stream block.'''
if self.state == state_bos:
self._write_header()
if self.state == state_metadata:
self._write_metadata()
if self.state != state_data:
raise InvalidState()
hashargs = {}
if self.writehash:
hashargs['digest'] = self.ctx.digest()
self._write_block(blktype_eos,
size=self.byteswritten,
**hashargs)
self.state = state_eos<|fim▁end|> | self._write_block(blktype_metadata,
metadata=self.metadata)
|
<|file_name|>src.js<|end_file_name|><|fim▁begin|>import {
connecting,
connected,
reconnecting,
connectionFailed
} from './actions';
export default uri => store => {
let ws;
let hasEverConnected = false;
const RECONNECT_TIMEOUT_MS = 2000;
const ACTION = 'ACTION';
const connect = () => {
ws = new WebSocket(uri);
// NOTE: could maybe set a flat 'hasBeenOpenBefore' to help with error states/dispatches and such
ws.onopen = () => {
hasEverConnected = true;
store.dispatch(connected());
};
ws.onclose = function() {
if (hasEverConnected) {
store.dispatch(reconnecting());
setTimeout(connect, RECONNECT_TIMEOUT_MS);
} else {
//TODO: THIS TAKES A LOOOOONG TIME ON CHROME... MAYBE BUILD SOME EXTRA DISPATCHES?
store.dispatch(connectionFailed());
}
};
ws.onmessage = ({data}) => {
const serverAction = JSON.parse(data);
if (serverAction.type == ACTION) {
const localAction = serverAction.payload;
store.dispatch(localAction);
}
};
};
store.dispatch(connecting());
connect();
return next => action => {
if(WebSocket.OPEN === ws.readyState && action.meta && action.meta.remote) {
const serverAction = JSON.stringify({
type: ACTION,
payload: action
});<|fim▁hole|>};<|fim▁end|> | ws.send(serverAction);
}
return next(action);
}; |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>import os
import sys
import drivecasa
import logging
import shlex
import shutil
import subprocess
import yaml
import glob
casa = drivecasa.Casapy(log2term=True, echo_to_stdout=True, timeout=24*3600*10)
CONFIG = os.environ["CONFIG"]<|fim▁hole|>OUTPUT = os.environ["OUTPUT"]
MSDIR = os.environ["MSDIR"]
with open(CONFIG, "r") as _std:
cab = yaml.safe_load(_std)
junk = cab["junk"]
args = {}
for param in cab['parameters']:
name = param['name']
value = param['value']
if value is None:
continue
args[name] = value
script = ['{0}(**{1})'.format(cab['binary'], args)]
def log2term(result):
if result[1]:
err = '\n'.join(result[1] if result[1] else [''])
failed = err.lower().find('an error occurred running task') >= 0
if failed:
raise RuntimeError('CASA Task failed. See error message above')
sys.stdout.write('WARNING:: SEVERE messages from CASA run')
try:
result = casa.run_script(script, raise_on_severe=False)
log2term(result)
finally:
for item in junk:
for dest in [OUTPUT, MSDIR]: # these are the only writable volumes in the container
items = glob.glob("{dest}/{item}".format(**locals()))
for f in items:
if os.path.isfile(f):
os.remove(f)
elif os.path.isdir(f):
shutil.rmtree(f)<|fim▁end|> | INPUT = os.environ["INPUT"] |
<|file_name|>config.js<|end_file_name|><|fim▁begin|>/**
* Update: 15-5-11
* Editor: qihongye
*/
var fs = require('fs');
var path = require('path');
var fis = require('../lib/fis.js');
var _ = fis.file;
var defaultSettings = (require('../lib/config.js')).DEFALUT_SETTINGS;
var expect = require('chai').expect;
var u = fis.util;
var config = null;
describe('config: config',function(){
beforeEach(function(){
fis.project.setProjectRoot(__dirname);
fis.config.init(defaultSettings);
process.env.NODE_ENV = 'dev';
});
it('set / get', function () {
fis.set('namespace', 'common');
expect(fis.get('namespace')).to.equal('common');
fis.set('obj', {a:'a'});
fis.set('obj.b', 'b');
expect(fis.get('obj')).to.deep.equal({a:'a', b:'b'});
expect(fis.get('obj.c', {c: 'c'})).to.deep.equal({c:'c'});
expect(fis.get('obj.a')).to.equal('a');
expect(fis.get('obj.b')).to.equal('b');
});
it('media', function () {
fis.set('a', 'a');
fis.set('b', 'b');
fis.media('prod').set('a', 'aa');
expect(fis.get('a')).to.equal('a');
expect(fis.media('prod').get('a')).to.equal('aa');
expect(fis.media('prod').get('b')).to.equal('b');
expect(fis.media('prod').get('project.charset')).to.equal('utf8');
});
it('fis.match',function(){
fis.match('**', {
release: 'static/$&'
}); // fis.config.match
fis.match('**/js.js', {
domain: 'www.baidu.com',
useHash: false
}, 1);
path = __dirname+'/file/ext/modular/js.js?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('www.baidu.com/static/file/ext/modular/js.js?__inline');
//without domain
// useDomain 已经去除,所以应该不收其影响了
fis.match('**/js.js', {
useDomain: false
}, 2);
path = __dirname+'/file/ext/modular/js.js?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('www.baidu.com/static/file/ext/modular/js.js?__inline');
fis.match('**/js.js', {
release: null
}, 3);
//without path
path = __dirname+'/file/ext/modular/js.js?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('www.baidu.com/file/ext/modular/js.js?__inline');
// with ()
fis.match('**/v1.0-(*)/(*).html', {
release: '/$1/$2'
});
path = __dirname+'/file/ext/v1.0-layout/test.html?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('/layout/test.html?__inline');
fis.match('!**/js.js', {
release: '/static/$&',
useHash: true,
domain: 'www.baidu.com'
});
//with !
path = __dirname+'/file/ext/modular/js.js?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('www.baidu.com/file/ext/modular/js.js?__inline');
// with ! but not match
path = __dirname+'/file/ext/modular/js.less?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('www.baidu.com/static/file/ext/modular/js_'+ f.getHash() +'.less?__inline');
});
it('match ${}', function() {
fis.match('**/*.js', {
release: null,
useHash: false
})
fis.set('coffee', 'js');
fis.match('**/js.js', {
release: '/static/$&'
});
path = __dirname+'/file/ext/modular/js.js?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('/static/file/ext/modular/js.js?__inline');
path = __dirname+'/file/ext/modular/j.js?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('/file/ext/modular/j.js?__inline');
});
it('match 混合用法', function() {
fis.set('ROOT', 'js');
fis.match('**', {
useHash: false
});
fis.match('(**/${ROOT}.js)', {
release: '/static/js/$1'
});
fis.match('(**/${ROOT}.less)', {
release: '/static/js/$1'
});
path = __dirname+'/file/ext/modular/js.js?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('/static/js/file/ext/modular/js.js?__inline');
path = __dirname+'/file/ext/modular/js.less?__inline';
var f = _.wrap(path);
var url = f.getUrl();
expect(url).to.equal('/static/js/file/ext/modular/js.less?__inline');
});
it('del', function(){
fis.config.del();
var origin = fis.config.get();
fis.set('a.b', 'b');
fis.media('pro').set('a.b', 'b');
fis.config.del('a.b');
expect(fis.get('a')).to.deep.equal({});
expect(fis.media('pro').get('a.b')).to.equal('b');
fis.config.del('a');
expect(fis.get()).to.deep.equal(origin);
fis.media('pro').del('a');
expect(fis.media('pro').get()).to.deep.equal({});
});
it('getSortedMatches', function() {
fis.media('prod').match('a', {
name: ''
});
var matches = fis.media('prod')._matches.concat();
var initIndex = matches[matches.length - 1].index;
fis.match('b', {
name: ''
}, 1)
fis.match('c', {
name: ''
}, 2)
fis.media('prod').match('b', {
name: 'prod'
}, 1)
fis.media('prod').match('c', {
name: 'prod'
}, 2);
var result_gl = [
{
raw: 'b',
reg: u.glob('b'),
negate: false,
properties: {name: ''},
media: 'GLOBAL',
weight: 1,
index: initIndex + 1
},
{
raw: 'c',
reg: u.glob('c'),
negate: false,
properties: {name: ''},
media: 'GLOBAL',
weight: 2,
index: initIndex + 2
}
], result_prod = [
{
raw: 'a',
reg: u.glob('a'),
negate: false,
properties: {name: ''},
media: 'prod',
weight: 0,
index: initIndex + 0
},
{
raw: 'b',
reg: u.glob('b'),
negate: false,
properties: {name: ''},
media: 'GLOBAL',
weight: 1,
index: initIndex + 1
},
{
raw: 'b',
reg: u.glob('b'),
negate: false,
properties: {name: 'prod'},
media: 'prod',
weight: 1,
index: initIndex + 3
},
{
raw: 'c',
reg: u.glob('c'),
negate: false,
properties: {name: ''},
media: 'GLOBAL',
weight: 2,
index: initIndex + 2
},
{
raw: 'c',
reg: u.glob('c'),
negate: false,<|fim▁hole|> media: 'prod',
weight: 2,
index: initIndex + 4
},
];
var xp = fis.config.getSortedMatches();
expect(xp).to.deep.equal(result_gl);
var xp2 = fis.media('prod').getSortedMatches();
expect(xp2).to.deep.equal(result_prod);
});
it("hook",function(){
fis.config.hook("module");
expect(fis.env().parent.data.modules.hook[1]['__plugin']).to.equal('module');
});
it("unhook",function(){
fis.config.unhook("module");
expect(fis.env().parent.data.modules.hook.length).to.equal(1);
});
});<|fim▁end|> | properties: {name: 'prod'}, |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|><|fim▁hole|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "drf_ember.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)<|fim▁end|> | |
<|file_name|>0002_auto_20160912_2314.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-13 04:14
from __future__ import unicode_literals
from django.db import migrations, models
<|fim▁hole|> dependencies = [
('rii_Api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='location',
name='state',
field=models.CharField(max_length=2),
),
migrations.AlterField(
model_name='location',
name='venueName',
field=models.CharField(max_length=50),
),
migrations.AlterField(
model_name='year',
name='year',
field=models.IntegerField(choices=[(1919, 1919), (1920, 1920), (1921, 1921), (1922, 1922), (1923, 1923), (1924, 1924), (1925, 1925)], default=1919),
),
migrations.AlterField(
model_name='year',
name='yearSummary',
field=models.TextField(default='', max_length=2000),
),
]<|fim▁end|> |
class Migration(migrations.Migration):
|
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>use iso8601::{Date, DateTime, Time};
use xml::escape::escape_str_pcdata;
use std::borrow::Cow;
use std::fmt::Write;
/// Escape a string for use as XML characters.
///
/// The resulting string is *not* suitable for use in XML attributes, but XML-RPC doesn't use those.
pub fn escape_xml(s: &str) -> Cow<'_, str> {
escape_str_pcdata(s)
}
/// Formats a `DateTime` for use in XML-RPC.
///
/// Note that XML-RPC is extremely underspecified when it comes to datetime values. Apparently,
/// some clients [don't even support timezone information][wp-bug] (we do). For maximum
/// interoperability, this will omit fractional time and time zone if not specified.
///
/// [wp-bug]: https://core.trac.wordpress.org/ticket/1633#comment:4
pub fn format_datetime(date_time: &DateTime) -> String {
let Time {
hour,
minute,
second,
millisecond,
tz_offset_hours,
tz_offset_minutes,
} = date_time.time;
match date_time.date {
Date::YMD { year, month, day } => {
// The base format is based directly on the example in the spec and should always work:
let mut string = format!(<|fim▁hole|> );
// Only append milliseconds when they're >0
if millisecond > 0 {
write!(string, ".{:.3}", millisecond).unwrap();
}
// Only append time zone info if the offset is specified and not 00:00
if tz_offset_hours != 0 || tz_offset_minutes != 0 {
write!(
string,
"{:+03}:{:02}",
tz_offset_hours,
tz_offset_minutes.abs()
)
.unwrap();
}
string
}
// Other format are just not supported at all:
Date::Week { .. } | Date::Ordinal { .. } => unimplemented!(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use iso8601;
#[test]
fn formats_datetimes() {
let date_time = iso8601::datetime("2016-05-02T06:01:05-0830").unwrap();
let formatted = format_datetime(&date_time);
assert_eq!(formatted, "20160502T06:01:05-08:30");
assert_eq!(iso8601::datetime(&formatted).unwrap(), date_time);
// milliseconds / fraction
let date_time = iso8601::datetime("20160502T06:01:05.400").unwrap();
let formatted = format_datetime(&date_time);
assert_eq!(formatted, "20160502T06:01:05.400");
assert_eq!(iso8601::datetime(&formatted).unwrap(), date_time);
// milliseconds / fraction + time zone
let date_time = iso8601::datetime("20160502T06:01:05.400+01:02").unwrap();
let formatted = format_datetime(&date_time);
assert_eq!(formatted, "20160502T06:01:05.400+01:02");
assert_eq!(iso8601::datetime(&formatted).unwrap(), date_time);
}
}<|fim▁end|> | "{:04}{:02}{:02}T{:02}:{:02}:{:02}",
year, month, day, hour, minute, second |
<|file_name|>common.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.protobuf import duration_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.gaming.v1",
manifest={
"OperationMetadata",
"OperationStatus",
"LabelSelector",
"RealmSelector",
"Schedule",
"SpecSource",
"TargetDetails",
"TargetState",
"DeployedFleetDetails",
},
)
class OperationMetadata(proto.Message):
r"""Represents the metadata of the long-running operation.
Attributes:
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time the operation was
created.
end_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. The time the operation finished<|fim▁hole|> Output only. Server-defined resource path for
the target of the operation.
verb (str):
Output only. Name of the verb executed by the
operation.
status_message (str):
Output only. Human-readable status of the
operation, if any.
requested_cancellation (bool):
Output only. Identifies whether the user has requested
cancellation of the operation. Operations that have
successfully been cancelled have [Operation.error][] value
with a [google.rpc.Status.code][google.rpc.Status.code] of
1, corresponding to ``Code.CANCELLED``.
api_version (str):
Output only. API version used to start the
operation.
unreachable (Sequence[str]):
Output only. List of Locations that could not
be reached.
operation_status (Sequence[google.cloud.gaming_v1.types.OperationMetadata.OperationStatusEntry]):
Output only. Operation status for Game
Services API operations. Operation status is in
the form of key-value pairs where keys are
resource IDs and the values show the status of
the operation. In case of failures, the value
includes an error code and error message.
"""
create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
target = proto.Field(proto.STRING, number=3,)
verb = proto.Field(proto.STRING, number=4,)
status_message = proto.Field(proto.STRING, number=5,)
requested_cancellation = proto.Field(proto.BOOL, number=6,)
api_version = proto.Field(proto.STRING, number=7,)
unreachable = proto.RepeatedField(proto.STRING, number=8,)
operation_status = proto.MapField(
proto.STRING, proto.MESSAGE, number=9, message="OperationStatus",
)
class OperationStatus(proto.Message):
r"""
Attributes:
done (bool):
Output only. Whether the operation is done or
still in progress.
error_code (google.cloud.gaming_v1.types.OperationStatus.ErrorCode):
The error code in case of failures.
error_message (str):
The human-readable error message.
"""
class ErrorCode(proto.Enum):
r""""""
ERROR_CODE_UNSPECIFIED = 0
INTERNAL_ERROR = 1
PERMISSION_DENIED = 2
CLUSTER_CONNECTION = 3
done = proto.Field(proto.BOOL, number=1,)
error_code = proto.Field(proto.ENUM, number=2, enum=ErrorCode,)
error_message = proto.Field(proto.STRING, number=3,)
class LabelSelector(proto.Message):
r"""The label selector, used to group labels on the resources.
Attributes:
labels (Sequence[google.cloud.gaming_v1.types.LabelSelector.LabelsEntry]):
Resource labels for this selector.
"""
labels = proto.MapField(proto.STRING, proto.STRING, number=1,)
class RealmSelector(proto.Message):
r"""The realm selector, used to match realm resources.
Attributes:
realms (Sequence[str]):
List of realms to match.
"""
realms = proto.RepeatedField(proto.STRING, number=1,)
class Schedule(proto.Message):
r"""The schedule of a recurring or one time event. The event's time span
is specified by start_time and end_time. If the scheduled event's
timespan is larger than the cron_spec + cron_job_duration, the event
will be recurring. If only cron_spec + cron_job_duration are
specified, the event is effective starting at the local time
specified by cron_spec, and is recurring.
::
start_time|-------[cron job]-------[cron job]-------[cron job]---|end_time
cron job: cron spec start time + duration
Attributes:
start_time (google.protobuf.timestamp_pb2.Timestamp):
The start time of the event.
end_time (google.protobuf.timestamp_pb2.Timestamp):
The end time of the event.
cron_job_duration (google.protobuf.duration_pb2.Duration):
The duration for the cron job event. The
duration of the event is effective after the
cron job's start time.
cron_spec (str):
The cron definition of the scheduled event.
See https://en.wikipedia.org/wiki/Cron. Cron
spec specifies the local time as defined by the
realm.
"""
start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,)
end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp_pb2.Timestamp,)
cron_job_duration = proto.Field(
proto.MESSAGE, number=3, message=duration_pb2.Duration,
)
cron_spec = proto.Field(proto.STRING, number=4,)
class SpecSource(proto.Message):
r"""Encapsulates Agones fleet spec and Agones autoscaler spec
sources.
Attributes:
game_server_config_name (str):
The game server config resource. Uses the form:
``projects/{project}/locations/{location}/gameServerDeployments/{deployment_id}/configs/{config_id}``.
name (str):
The name of the Agones leet config or Agones
scaling config used to derive the Agones fleet
or Agones autoscaler spec.
"""
game_server_config_name = proto.Field(proto.STRING, number=1,)
name = proto.Field(proto.STRING, number=2,)
class TargetDetails(proto.Message):
r"""Details about the Agones resources.
Attributes:
game_server_cluster_name (str):
The game server cluster name. Uses the form:
``projects/{project}/locations/{location}/realms/{realm}/gameServerClusters/{cluster}``.
game_server_deployment_name (str):
The game server deployment name. Uses the form:
``projects/{project}/locations/{location}/gameServerDeployments/{deployment_id}``.
fleet_details (Sequence[google.cloud.gaming_v1.types.TargetDetails.TargetFleetDetails]):
Agones fleet details for game server clusters
and game server deployments.
"""
class TargetFleetDetails(proto.Message):
r"""Details of the target Agones fleet.
Attributes:
fleet (google.cloud.gaming_v1.types.TargetDetails.TargetFleetDetails.TargetFleet):
Reference to target Agones fleet.
autoscaler (google.cloud.gaming_v1.types.TargetDetails.TargetFleetDetails.TargetFleetAutoscaler):
Reference to target Agones fleet autoscaling
policy.
"""
class TargetFleet(proto.Message):
r"""Target Agones fleet specification.
Attributes:
name (str):
The name of the Agones fleet.
spec_source (google.cloud.gaming_v1.types.SpecSource):
Encapsulates the source of the Agones fleet
spec. The Agones fleet spec source.
"""
name = proto.Field(proto.STRING, number=1,)
spec_source = proto.Field(proto.MESSAGE, number=2, message="SpecSource",)
class TargetFleetAutoscaler(proto.Message):
r"""Target Agones autoscaler policy reference.
Attributes:
name (str):
The name of the Agones autoscaler.
spec_source (google.cloud.gaming_v1.types.SpecSource):
Encapsulates the source of the Agones fleet
spec. Details about the Agones autoscaler spec.
"""
name = proto.Field(proto.STRING, number=1,)
spec_source = proto.Field(proto.MESSAGE, number=2, message="SpecSource",)
fleet = proto.Field(
proto.MESSAGE,
number=1,
message="TargetDetails.TargetFleetDetails.TargetFleet",
)
autoscaler = proto.Field(
proto.MESSAGE,
number=2,
message="TargetDetails.TargetFleetDetails.TargetFleetAutoscaler",
)
game_server_cluster_name = proto.Field(proto.STRING, number=1,)
game_server_deployment_name = proto.Field(proto.STRING, number=2,)
fleet_details = proto.RepeatedField(
proto.MESSAGE, number=3, message=TargetFleetDetails,
)
class TargetState(proto.Message):
r"""Encapsulates the Target state.
Attributes:
details (Sequence[google.cloud.gaming_v1.types.TargetDetails]):
Details about Agones fleets.
"""
details = proto.RepeatedField(proto.MESSAGE, number=1, message="TargetDetails",)
class DeployedFleetDetails(proto.Message):
r"""Details of the deployed Agones fleet.
Attributes:
deployed_fleet (google.cloud.gaming_v1.types.DeployedFleetDetails.DeployedFleet):
Information about the Agones fleet.
deployed_autoscaler (google.cloud.gaming_v1.types.DeployedFleetDetails.DeployedFleetAutoscaler):
Information about the Agones autoscaler for
that fleet.
"""
class DeployedFleet(proto.Message):
r"""Agones fleet specification and details.
Attributes:
fleet (str):
The name of the Agones fleet.
fleet_spec (str):
The fleet spec retrieved from the Agones
fleet.
spec_source (google.cloud.gaming_v1.types.SpecSource):
The source spec that is used to create the
Agones fleet. The GameServerConfig resource may
no longer exist in the system.
status (google.cloud.gaming_v1.types.DeployedFleetDetails.DeployedFleet.DeployedFleetStatus):
The current status of the Agones fleet.
Includes count of game servers in various
states.
"""
class DeployedFleetStatus(proto.Message):
r"""DeployedFleetStatus has details about the Agones fleets such
as how many are running, how many allocated, and so on.
Attributes:
ready_replicas (int):
The number of GameServer replicas in the
READY state in this fleet.
allocated_replicas (int):
The number of GameServer replicas in the
ALLOCATED state in this fleet.
reserved_replicas (int):
The number of GameServer replicas in the
RESERVED state in this fleet. Reserved instances
won't be deleted on scale down, but won't cause
an autoscaler to scale up.
replicas (int):
The total number of current GameServer
replicas in this fleet.
"""
ready_replicas = proto.Field(proto.INT64, number=1,)
allocated_replicas = proto.Field(proto.INT64, number=2,)
reserved_replicas = proto.Field(proto.INT64, number=3,)
replicas = proto.Field(proto.INT64, number=4,)
fleet = proto.Field(proto.STRING, number=1,)
fleet_spec = proto.Field(proto.STRING, number=2,)
spec_source = proto.Field(proto.MESSAGE, number=3, message="SpecSource",)
status = proto.Field(
proto.MESSAGE,
number=5,
message="DeployedFleetDetails.DeployedFleet.DeployedFleetStatus",
)
class DeployedFleetAutoscaler(proto.Message):
r"""Details about the Agones autoscaler.
Attributes:
autoscaler (str):
The name of the Agones autoscaler.
spec_source (google.cloud.gaming_v1.types.SpecSource):
The source spec that is used to create the
autoscaler. The GameServerConfig resource may no
longer exist in the system.
fleet_autoscaler_spec (str):
The autoscaler spec retrieved from Agones.
"""
autoscaler = proto.Field(proto.STRING, number=1,)
spec_source = proto.Field(proto.MESSAGE, number=4, message="SpecSource",)
fleet_autoscaler_spec = proto.Field(proto.STRING, number=3,)
deployed_fleet = proto.Field(proto.MESSAGE, number=1, message=DeployedFleet,)
deployed_autoscaler = proto.Field(
proto.MESSAGE, number=2, message=DeployedFleetAutoscaler,
)
__all__ = tuple(sorted(__protobuf__.manifest))<|fim▁end|> | running.
target (str): |
<|file_name|>classes.py<|end_file_name|><|fim▁begin|>"""Defines the SMEFT class that provides the main API to smeftrunner."""<|fim▁hole|>from . import rge
from . import io
from . import definitions
from . import beta
from . import smpar
import pylha
from collections import OrderedDict
from math import sqrt
import numpy as np
import ckmutil.phases, ckmutil.diag
class SMEFT(object):
"""Parameter point in the Standard Model Effective Field Theory."""
def __init__(self):
"""Initialize the SMEFT instance."""
self.C_in = None
self.scale_in = None
self.scale_high = None
def set_initial(self, C_in, scale_in, scale_high):
r"""Set the initial values for parameters and Wilson coefficients at
the scale `scale_in`, setting the new physics scale $\Lambda$ to
`scale_high`."""
self.C_in = C_in
self.scale_in = scale_in
self.scale_high = scale_high
def load_initial(self, streams):
"""Load the initial values for parameters and Wilson coefficients from
one or several files.
`streams` should be a tuple of file-like objects strings."""
d = {}
for stream in streams:
s = io.load(stream)
if 'BLOCK' not in s:
raise ValueError("No BLOCK found")
d.update(s['BLOCK'])
d = {'BLOCK': d}
C = io.wc_lha2dict(d)
sm = io.sm_lha2dict(d)
C.update(sm)
C = definitions.symmetrize(C)
self.C_in = C
def set_initial_wcxf(self, wc, scale_high=None, get_smpar=False):
"""Load the initial values for Wilson coefficients from a
wcxf.WC instance.
Parameters:
- `scale_high`: since Wilson coefficients are dimensionless in
smeftrunner but not in WCxf, the high scale in GeV has to be provided.
If this parameter is None (default), either a previously defined
value will be used, or the scale attribute of the WC instance will
be used.
- `get_smpar`: boolean, optional, defaults to True. If True, an attempt
is made to determine the SM parameters from the requirement of
reproducing the correct SM masses and mixings at the electroweak
scale. As approximations are involved, the result might or might not
be reliable, depending on the size of the Wilson coefficients
affecting the SM masses and mixings. If False, Standard Model
parameters have to be provided separately and are assumed to be in
the weak basis used for the Warsaw basis as defined in WCxf,
i.e. in the basis where the down-type and charged lepton mass
matrices are diagonal.
"""
import wcxf
if wc.eft != 'SMEFT':
raise ValueError("Wilson coefficients use wrong EFT.")
if wc.basis != 'Warsaw':
raise ValueError("Wilson coefficients use wrong basis.")
if scale_high is not None:
self.scale_high = scale_high
elif self.scale_high is None:
self.scale_high = wc.scale
C = wcxf.translators.smeft.wcxf2arrays(wc.dict)
keys_dim5 = ['llphiphi']
keys_dim6 = list(set(definitions.WC_keys_0f + definitions.WC_keys_2f + definitions.WC_keys_4f) - set(keys_dim5))
self.scale_in = wc.scale
for k in keys_dim5:
if k in C:
C[k] = C[k]*self.scale_high
for k in keys_dim6:
if k in C:
C[k] = C[k]*self.scale_high**2
C = definitions.symmetrize(C)
# fill in zeros for missing WCs
for k, s in definitions.C_keys_shape.items():
if k not in C and k not in definitions.SM_keys:
if s == 1:
C[k] = 0
else:
C[k] = np.zeros(s)
if self.C_in is None:
self.C_in = C
else:
self.C_in.update(C)
if get_smpar:
self.C_in.update(self._get_sm_scale_in())
def load_wcxf(self, stream, get_smpar=True):
"""Load the initial values for Wilson coefficients from
a file-like object or a string in WCxf format.
Note that Standard Model parameters have to be provided separately
and are assumed to be in the weak basis used for the Warsaw basis as
defined in WCxf, i.e. in the basis where the down-type and charged
lepton mass matrices are diagonal."""
import wcxf
wc = wcxf.WC.load(stream)
self.set_initial_wcxf(wc, get_smpar=get_smpar)
def dump(self, C_out, scale_out=None, stream=None, fmt='lha', skip_redundant=True):
"""Return a string representation of the parameters and Wilson
coefficients `C_out` in DSixTools output format. If `stream` is
specified, export it to a file. `fmt` defaults to `lha` (the SLHA-like
DSixTools format), but can also be `json` or `yaml` (see the
pylha documentation)."""
C = OrderedDict()
if scale_out is not None:
C['SCALES'] = {'values': [[1, self.scale_high], [2, scale_out]]}
else:
C['SCALES'] = {'values': [[1, self.scale_high]]}
sm = io.sm_dict2lha(C_out)['BLOCK']
C.update(sm)
wc = io.wc_dict2lha(C_out, skip_redundant=skip_redundant)['BLOCK']
C.update(wc)
return pylha.dump({'BLOCK': C}, fmt=fmt, stream=stream)
def get_wcxf(self, C_out, scale_out):
"""Return the Wilson coefficients `C_out` as a wcxf.WC instance.
Note that the Wilson coefficients are rotated into the Warsaw basis
as defined in WCxf, i.e. to the basis where the down-type and charged
lepton mass matrices are diagonal."""
import wcxf
C = self.rotate_defaultbasis(C_out)
d = wcxf.translators.smeft.arrays2wcxf(C)
basis = wcxf.Basis['SMEFT', 'Warsaw']
d = {k: v for k, v in d.items() if k in basis.all_wcs and v != 0}
keys_dim5 = ['llphiphi']
keys_dim6 = list(set(definitions.WC_keys_0f + definitions.WC_keys_2f
+ definitions.WC_keys_4f) - set(keys_dim5))
for k in d:
if k.split('_')[0] in keys_dim5:
d[k] = d[k] / self.scale_high
for k in d:
if k.split('_')[0] in keys_dim6:
d[k] = d[k] / self.scale_high**2
d = wcxf.WC.dict2values(d)
wc = wcxf.WC('SMEFT', 'Warsaw', scale_out, d)
return wc
def dump_wcxf(self, C_out, scale_out, fmt='yaml', stream=None, **kwargs):
"""Return a string representation of the Wilson coefficients `C_out`
in WCxf format. If `stream` is specified, export it to a file.
`fmt` defaults to `yaml`, but can also be `json`.
Note that the Wilson coefficients are rotated into the Warsaw basis
as defined in WCxf, i.e. to the basis where the down-type and charged
lepton mass matrices are diagonal."""
wc = self.get_wcxf(C_out, scale_out)
return wc.dump(fmt=fmt, stream=stream, **kwargs)
def rgevolve(self, scale_out, **kwargs):
"""Solve the SMEFT RGEs from the initial scale to `scale_out`.
Returns a dictionary with parameters and Wilson coefficients at
`scale_out`. Additional keyword arguments will be passed to
the ODE solver `scipy.integrate.odeint`."""
self._check_initial()
return rge.smeft_evolve(C_in=self.C_in,
scale_high=self.scale_high,
scale_in=self.scale_in,
scale_out=scale_out,
**kwargs)
def rgevolve_leadinglog(self, scale_out):
"""Compute the leading logarithmix approximation to the solution
of the SMEFT RGEs from the initial scale to `scale_out`.
Returns a dictionary with parameters and Wilson coefficients.
Much faster but less precise that `rgevolve`.
"""
self._check_initial()
return rge.smeft_evolve_leadinglog(C_in=self.C_in,
scale_high=self.scale_high,
scale_in=self.scale_in,
scale_out=scale_out)
def _check_initial(self):
"""Check if initial values and scale as well as the new physics scale
have been set."""
if self.C_in is None:
raise Exception("You have to specify the initial conditions first.")
if self.scale_in is None:
raise Exception("You have to specify the initial scale first.")
if self.scale_high is None:
raise Exception("You have to specify the high scale first.")
def rotate_defaultbasis(self, C):
"""Rotate all parameters to the basis where the running down-type quark
and charged lepton mass matrices are diagonal and where the running
up-type quark mass matrix has the form V.S, with V unitary and S real
diagonal, and where the CKM and PMNS matrices have the standard
phase convention."""
v = sqrt(2*C['m2'].real/C['Lambda'].real)
Mep = v/sqrt(2) * (C['Ge'] - C['ephi'] * v**2/self.scale_high**2/2)
Mup = v/sqrt(2) * (C['Gu'] - C['uphi'] * v**2/self.scale_high**2/2)
Mdp = v/sqrt(2) * (C['Gd'] - C['dphi'] * v**2/self.scale_high**2/2)
Mnup = -v**2 * C['llphiphi']
UeL, Me, UeR = ckmutil.diag.msvd(Mep)
UuL, Mu, UuR = ckmutil.diag.msvd(Mup)
UdL, Md, UdR = ckmutil.diag.msvd(Mdp)
Unu, Mnu = ckmutil.diag.mtakfac(Mnup)
UuL, UdL, UuR, UdR = ckmutil.phases.rephase_standard(UuL, UdL, UuR, UdR)
Unu, UeL, UeR = ckmutil.phases.rephase_pmns_standard(Unu, UeL, UeR)
return definitions.flavor_rotation(C, Uq=UdL, Uu=UuR, Ud=UdR, Ul=UeL, Ue=UeR)
def _run_sm_scale_in(self, C_out, scale_sm=91.1876):
"""Get the SM parameters at the EW scale, using an estimate `C_out`
of the Wilson coefficients at that scale, and run them to the
input scale."""
# initialize an empty SMEFT instance
smeft_sm = SMEFT()
C_in_sm = beta.C_array2dict(np.zeros(9999))
# set the SM parameters to the values obtained from smpar.smeftpar
C_SM = smpar.smeftpar(scale_sm, self.scale_high, C_out, basis='Warsaw')
C_SM = {k: v for k, v in C_SM.items() if k in definitions.SM_keys}
# set the Wilson coefficients at the EW scale to C_out
C_in_sm.update(C_out)
C_in_sm.update(C_SM)
smeft_sm.set_initial(C_in_sm, scale_sm, scale_high=self.scale_high)
# run up (with 1% relative precision, ignore running of Wilson coefficients)
C_SM_high = smeft_sm.rgevolve(self.scale_in, newphys=False, rtol=0.01, atol=1)
return {k: v for k, v in C_SM_high.items() if k in definitions.SM_keys}
def _get_sm_scale_in(self, scale_sm=91.1876):
"""Get an estimate of the SM parameters at the input scale by running
them from the EW scale using constant values for the Wilson coefficients
(corresponding to their leading log approximated values at the EW
scale).
Note that this is not guaranteed to work and will fail if some of the
Wilson coefficients (the ones affecting the extraction of SM parameters)
are large."""
# intialize a copy of ourselves
_smeft = SMEFT()
_smeft.set_initial(self.C_in, self.scale_in, self.scale_high)
# Step 1: run the SM up, using the WCs at scale_input as (constant) estimate
_smeft.C_in.update(self._run_sm_scale_in(self.C_in, scale_sm=scale_sm))
# Step 2: run the WCs down in LL approximation
C_out = _smeft.rgevolve_leadinglog(scale_sm)
# Step 3: run the SM up again, this time using the WCs at scale_sm as (constant) estimate
return self._run_sm_scale_in(C_out, scale_sm=scale_sm)<|fim▁end|> | |
<|file_name|>search.controller.js<|end_file_name|><|fim▁begin|>(function() {
'use strict';
angular.module('columbyApp')
.controller('SearchCtrl', function($log,$rootScope, $scope, SearchSrv) {
/* ---------- SETUP ----------------------------------------------------------------------------- */
$scope.contentLoading = true;
$scope.search = {};
$rootScope.title = 'columby.com | search';
$scope.pagination = {
itemsPerPage: 20,
datasets:{
currentPage: 1
},
accounts:{
currentPage: 1
},
tags:{
currentPage: 1
}
};
/* ---------- SCOPE FUNCTIONS ------------------------------------------------------------------- */
$scope.doSearch = function(){
$scope.search.hits = null;
if ($scope.search.searchTerm.length>2){
$scope.search.message = 'Searching for: ' + $scope.search.searchTerm;<|fim▁hole|> $scope.search.hits = response;
$scope.search.message = null;
$scope.pagination.datasets.numPages = response.datasets.count / $scope.pagination.itemsPerPage;
$scope.pagination.accounts.numPages = response.accounts.count / $scope.pagination.itemsPerPage;
$scope.pagination.tags.numPages = response.tags.count / $scope.pagination.itemsPerPage;
}, function(err){
$scope.search.message = 'Error: ' + err.data.message;
});
} else {
$scope.search.message = 'Type at least 3 characters.';
}
};
/* ---------- INIT ---------------------------------------------------------------------------- */
if ($rootScope.searchTerm){
$scope.search.searchTerm = $rootScope.searchTerm;
$log.debug($scope.search.searchTerm);
delete $rootScope.searchTerm;
$log.debug($scope.search.searchTerm);
$scope.doSearch();
}
});
})();<|fim▁end|> | SearchSrv.query({
text: $scope.search.searchTerm,
limit: $scope.pagination.itemsPerPage
}).then(function (response) { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
###############################################################################
# #
# Copyright (C) 2015 Trustcode - www.trustcode.com.br #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #<|fim▁hole|># (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
###############################################################################
from . import sale_order<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::int;
use std::comm::stream;
use std::comm::Port;
use std::comm::Chan;
type MaybeInt = Option<int>;
fn fib_generator(max: int, chan: &Chan<MaybeInt>) {
let mut a = 0;
let mut b = 1;
loop {
let next = a + b;
if next > max {
break;
}
b = a;
a = next;
chan.send(Some(next));
}
chan.send(None);
}
fn main() {
let (port, chan): (Port<MaybeInt>, Chan<MaybeInt>) = stream();
do spawn {
fib_generator(4000000, &chan);
}
let mut accum: int = 0;
loop {
let next: MaybeInt = port.recv();
match next {
Some(x) if x % 2 == 0 => accum += x,
Some(_) => loop,<|fim▁hole|> }
println(fmt!("%d", accum));
}<|fim▁end|> | None => break,
}; |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from setuptools import setup, find_packages
import amqpy
if sys.version_info < (3, 2):
raise Exception('amqpy requires Python 3.2 or higher')
name = 'amqpy'
description = 'an AMQP 0.9.1 client library for Python >= 3.2.0'
keywords = ['amqp', 'rabbitmq', 'qpid']
classifiers = [
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking'
]
package_data = {
'': ['*.rst', '*.ini', 'AUTHORS', 'LICENSE'],
}
def long_description():
if os.path.exists('README.rst'):
with open('README.rst') as f:
return f.read()
else:
return description
setup(
name=name,
description=description,
long_description=long_description(),
version=amqpy.__version__,
author=amqpy.__author__,
author_email=amqpy.__contact__,
maintainer=amqpy.__maintainer__,
url=amqpy.__homepage__,
platforms=['any'],
license='LGPL',
packages=find_packages(exclude=['ez_setup', 'tests', 'tests.*']),
package_data=package_data,
tests_require=['pytest>=2.6'],
classifiers=classifiers,
keywords=keywords
)<|fim▁end|> | #!/usr/bin/env python3
import sys
import os |
<|file_name|>Class_LabExperimBased.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Class_LabExperimBased provides functionalities for data handling of data obtained in lab experiments in the field of (waste)water treatment.
Copyright (C) 2016 Chaim De Mulder
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see http://www.gnu.org/licenses/.
"""
import sys
#import os
#from os import listdir
#import pandas as pd
#import scipy as sp
#import numpy as np
#import datetime as dt
import matplotlib.pyplot as plt #plotten in python
import warnings as wn
from wwdata.Class_HydroData import HydroData
class LabExperimBased(HydroData):
"""
Superclass for a HydroData object, expanding the functionalities with
specific functions for data gathered is lab experiments.
Attributes
----------
timedata_column : str
name of the column containing the time data
data_type : str
type of the data provided
experiment_tag : str
A tag identifying the experiment; can be a date or a code used by
the producer/owner of the data.
time_unit : str
The time unit in which the time data is given
units : array
The units of the variables in the columns
"""
def __init__(self,data,timedata_column='index',data_type='NAT',
experiment_tag='No tag given',time_unit=None):
"""
initialisation of a LabExperimBased object, based on a previously defined
HydroData object.
"""
HydroData.__init__(self,data,timedata_column=timedata_column,data_type=data_type,
experiment_tag=experiment_tag,time_unit=time_unit)
def hours(self,time_column='index'):
"""
calculates the hours from the relative values
Parameters
----------
time_column : string
column containing the relative time values; default to index
"""
if time_column == 'index':
self.data['index']=self.time.values
self.data['h']= (self.data['indexes'])*24 + self.data['indexes'].shift(1)
self.data['h'].fillna(0,inplace=True)
self.data.drop('index', axis=1, inplace=True)
else:
self.data['h']= (self.data[time_column])*24 + self.data[time_column].shift(1)
self.data['h'].fillna(0,inplace=True)
def add_conc(self,column_name,x,y,new_name='default'):
"""
calculates the concentration values of the given column and adds them as
a new column to the DataFrame.
Parameters
----------
column_name : str
column with values
x : int
...
y : int
...
new_name : str
name of the new column, default to 'column_name + mg/L'
"""
if new_name == 'default':
new_name = column_name + ' ' + 'mg/L'
self.data[new_name] = self.data[column_name].values*x*y
## Instead of this function: define a dataframe/dict with conversion or
## concentration factors, so that you can have a function that automatically
## converts all parameters in the frame to concentrations
def check_ph(self,ph_column='pH',thresh=0.4):
"""
gives the maximal change in pH
Parameters
----------
ph_column : str
column with pH-values, default to 'pH'
threshold : int
threshold value for warning, default to '0.4'
"""
dph = self.data[ph_column].max()-self.data[ph_column].min()
if dph > thresh:
wn.warn('Strong change in pH during experiment!')
else:
self.delta_ph = dph
def in_out(self,columns):
"""
(start_values-end_values)
Parameters
----------
columns : array of strings
"""
inv=0
outv=0
indexes= self.time.values
for column in columns:
inv += self.data[column][indexes[0]]
for column in columns:
outv += self.data[column][indexes[-1]]
in_out = inv-outv
return in_out
def removal(self,columns):
"""
total removal of nitrogen
(1-(end_values/start_values))
Parameters
----------
columns : array of strings
"""
inv=0
outv=0
indexes= self.time.values
for column in columns:
inv += self.data[column][indexes[0]]
for column in columns:
outv += self.data[column][indexes[-1]]
removal = 1-(outv/inv)
return removal
def calc_slope(self,columns,time_column='h'):
"""
calculates the slope of the selected columns
Parameters
----------
columns : array of strings
columns to calculate the slope for
time_column : str
time used for calculation; default to 'h'
"""
for column in columns:
self.data[column + " " +'slope'] = (self.data[column].shift(1)-self.data[column])\
/(self.data[time_column]-self.data[time_column].shift(1))
def plot(self,columns,time_column='index'):<|fim▁hole|> ----------
columns : array of strings
columns to plot
time_column : str
time used for calculation; default to 'h'
"""
fig = plt.figure(figsize=(10,6))
ax = fig.add_subplot(111)
if time_column=='index':
for column in columns:
ax.plot(self.time,self.data[column],marker='o')
else:
for column in columns:
ax.plot(self.data[time_column],self.data[column],marker='o')
ax.legend()
return fig,ax
#######################################
def _print_removed_output(original,new,type_):
"""
function printing the output of functions that remove datapoints.
Parameters
----------
original : int
original length of the dataset
new : int
length of the new dataset
type_ : str
'removed' or 'dropped'
"""
print('Original dataset:',original,'datapoints')
print('New dataset:',new,'datapoints')
print(original-new,'datapoints ',type_)
def _log_removed_output(log_file,original,new,type_):
"""
function writing the output of functions that remove datapoints to a log file.
Parameters
----------
log_file : str
string containing the directory to the log file to be written out
original : int
original length of the dataset
new : int
length of the new dataset
type_ : str
'removed' or 'dropped'
"""
log_file = open(log_file,'a')
log_file.write(str('\nOriginal dataset: '+str(original)+' datapoints; new dataset: '+
str(new)+' datapoints'+str(original-new)+' datapoints ',type_))
log_file.close()<|fim▁end|> | """
calculates the slope of the selected columns
Parameters |
<|file_name|>widget-footer.js<|end_file_name|><|fim▁begin|>/**
* Widget Footer Directive
*/
angular<|fim▁hole|>function rdWidgetFooter() {
var directive = {
requires: '^rdWidget',
transclude: true,
template: '<div class="widget-footer" ng-transclude></div>',
restrict: 'E'
};
return directive;
};<|fim▁end|> | .module('Home')
.directive('rdWidgetFooter', rdWidgetFooter);
|
<|file_name|>D.js<|end_file_name|><|fim▁begin|>package test0438;
<|fim▁hole|>class D {
}<|fim▁end|> | import test0438_a.W;
|
<|file_name|>dev.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
"""
DEV SCRIPT
This is a hacky script meant to be run mostly automatically with the option of
interactions.
dev.py is supposed to be a developer non-gui interface into the IBEIS software.
dev.py runs experiments and serves as a scratchpad for new code and quick scripts
TODO:
Test to find typical "good" descriptor scores. Find nearest neighbors and
noramlizers for each feature in a query image. Based on ground truth and
spatial verification mark feature matches as true or false. Visualize the
feature scores of good matches vs bad matches. Lowe shows the pdf of
correct matches and the PDF for incorrect matches. We should also show the
same thing.
Done:
Cache nearest neighbors so different parameters later in the pipeline dont
take freaking forever.
CommandLine:
python dev.py --wshow -t query --db PZ_MTEST --qaid 110 --cfg score_method:nsum prescore_method:nsum
python dev.py --wshow -t query --db PZ_MTEST --qaid 110
python dev.py --wshow -t query --db PZ_MTEST --qaid 110 --cfg fg_on=True
python dev.py --wshow -t query --db PZ_MTEST --qaid 110 --cfg
"""
# TODO: ADD COPYRIGHT TAG
from __future__ import absolute_import, division, print_function
import multiprocessing
import sys
#from ibeis._devscript import devcmd, DEVCMD_FUNCTIONS, DEVPRECMD_FUNCTIONS, DEVCMD_FUNCTIONS2, devcmd2
from ibeis._devscript import devcmd, DEVCMD_FUNCTIONS, DEVPRECMD_FUNCTIONS
import utool as ut
from utool.util_six import get_funcname
import utool
#from ibeis.algo.hots import smk
import plottool as pt
import ibeis
if __name__ == '__main__':
multiprocessing.freeze_support()
ibeis._preload()
#from ibeis.all_imports import * # NOQA
#utool.util_importer.dynamic_import(__name__, ('_devcmds_ibeis', None),
# developing=True)
from ibeis._devcmds_ibeis import * # NOQA
# IBEIS
from ibeis.init import main_helpers # NOQA
from ibeis.other import dbinfo # NOQA
from ibeis.expt import experiment_configs # NOQA
from ibeis.expt import harness # NOQA
from ibeis import params # NOQA
print, print_, printDBG, rrr, profile = utool.inject(__name__, '[dev]')
#------------------
# DEV DEVELOPMENT
#------------------
# This is where you write all of the functions that will become pristine
# and then go in _devcmds_ibeis.py
"""
./dev.py -e print_results --db PZ_Master1 -a varysize_pzm:dper_name=[1,2],dsize=1500 -t candidacy_k:K=1 --intersect_hack
./dev.py -e draw_rank_cdf -t baseline -a baseline --show --db PZ_Master1
./dev.py -e get_dbinfo --db PZ_Master1 --aid_list=baseline
./dev.py -e get_dbinfo --db PZ_MTEST
./dev.py -e get_dbinfo --db PZ_Master1 --aid_list=baseline --hackshow-unixtime --show
./dev.py -e get_dbinfo --db PZ_Master1 --hackshow-unixtime --show
"""
# Quick interface into specific registered doctests
REGISTERED_DOCTEST_EXPERIMENTS = [
('ibeis.expt.experiment_drawing', 'draw_case_timedeltas', ['timedelta_hist', 'timedelta_pie']),
('ibeis.expt.experiment_drawing', 'draw_match_cases', ['draw_cases', 'cases']),
('ibeis.expt.experiment_drawing', 'draw_casetag_hist', ['taghist']),
('ibeis.expt.old_storage', 'draw_results'),
('ibeis.expt.experiment_drawing', 'draw_rank_cdf', ['rank_cdf']),
('ibeis.other.dbinfo', 'get_dbinfo'),
('ibeis.other.dbinfo', 'latex_dbstats'),
('ibeis.other.dbinfo', 'show_image_time_distributions', ['db_time_hist']),
('ibeis.expt.experiment_drawing', 'draw_rank_surface', ['rank_surface']),
('ibeis.expt.experiment_helpers', 'get_annotcfg_list', ['print_acfg']),
('ibeis.expt.experiment_printres', 'print_results', ['printres', 'print']),
('ibeis.expt.experiment_printres', 'print_latexsum', ['latexsum']),
('ibeis.dbio.export_subset', 'export_annots'),
('ibeis.expt.experiment_drawing', 'draw_annot_scoresep', ['scores', 'scores_good', 'scores_all']),
]
def _exec_doctest_func(modname, funcname):
module = ut.import_modname(modname)
func = module.__dict__[funcname]
testsrc = ut.get_doctest_examples(func)[0][0]
exec(testsrc, globals(), locals())
def _register_doctest_precmds():
from functools import partial
for tup in REGISTERED_DOCTEST_EXPERIMENTS:
modname, funcname = tup[:2]
aliases = tup[2] if len(tup) == 3 else []
aliases += [funcname]
_doctest_func = partial(_exec_doctest_func, modname, funcname)
devprecmd(*aliases)(_doctest_func)
_register_doctest_precmds()
@devcmd('tune', 'autotune')
def tune_flann(ibs, qaid_list, daid_list=None):
r"""
CommandLine:
python dev.py -t tune --db PZ_MTEST
python dev.py -t tune --db GZ_ALL
python dev.py -t tune --db GIR_Tanya
python dev.py -t tune --db PZ_Master0
Example:
>>> # ENABLE_DOCTEST
>>> from ibeis._devscript import * # NOQA
>>> # build test data
>>> # execute function
>>> result = func_wrapper()
>>> # verify results
>>> print(result)
"""
all_aids = ibs.get_valid_aids()
vecs = np.vstack(ibs.get_annot_vecs(all_aids))
print('Tunning flann for species={species}:'.format(species=ibs.get_database_species(all_aids)))
tuned_params = vt.tune_flann(vecs,
target_precision=.98,
build_weight=0.05,
memory_weight=0.00,
sample_fraction=0.1)
tuned_params
#tuned_params2 = vt.tune_flann(vecs,
# target_precision=.90,
# build_weight=0.001,
# memory_weight=0.00,
# sample_fraction=0.5)
#tuned_params2
@devcmd('incremental', 'inc')
def incremental_test(ibs, qaid_list, daid_list=None):
"""
Adds / queries new images one at a time to a clean test database.
Tests the complete system.
Args:
ibs (list) : IBEISController object
qaid_list (list) : list of annotation-ids to query
CommandLine:
python dev.py -t inc --db PZ_MTEST --qaid 1:30:3 --cmd
python dev.py --db PZ_MTEST --allgt --cmd
python dev.py --db PZ_MTEST --allgt -t inc
python dev.py -t inc --db PZ_MTEST --qaid 1:30:3 --cmd
python dev.py -t inc --db GZ_ALL --ninit 100 --noqcache
python dev.py -t inc --db PZ_MTEST --noqcache --interactive-after 40
python dev.py -t inc --db PZ_Master0 --noqcache --interactive-after 10000 --ninit 400
Example:
>>> from ibeis.all_imports import * # NOQA
>>> ibs = ibeis.opendb('PZ_MTEST')
>>> qaid_list = ibs.get_valid_aids()
>>> daid_list = None
"""
from ibeis.algo.hots import automated_matcher
ibs1 = ibs
num_initial = ut.get_argval('--ninit', type_=int, default=0)
return automated_matcher.incremental_test(ibs1, num_initial)
@devcmd('inspect')
def inspect_matches(ibs, qaid_list, daid_list):
print('<inspect_matches>')
from ibeis.gui import inspect_gui
return inspect_gui.test_review_widget(ibs, qaid_list, daid_list)
def get_ibslist(ibs):
print('[dev] get_ibslist')
ibs_GV = ibs
ibs_RI = ibs.clone_handle(nogravity_hack=True)
ibs_RIW = ibs.clone_handle(nogravity_hack=True, gravity_weighting=True)
ibs_list = [ibs_GV, ibs_RI, ibs_RIW]
return ibs_list
@devcmd('gv_scores')
def compgrav_draw_score_sep(ibs, qaid_list, daid_list):
print('[dev] compgrav_draw_score_sep')
ibs_list = get_ibslist(ibs)
for ibs_ in ibs_list:
draw_annot_scoresep(ibs_, qaid_list)
#--------------------
# RUN DEV EXPERIMENTS
#--------------------
#def run_registered_precmd(precmd_name):
# # Very hacky way to run just a single registered precmd
# for (func_aliases, func) in DEVPRECMD_FUNCTIONS:
# for aliases in func_aliases:
# ret = precmd_name in input_precmd_list
# if ret:
# func()
def run_devprecmds():
"""
Looks for pre-tests specified with the -t flag and runs them
"""
#input_precmd_list = params.args.tests[:]
input_precmd_list = ut.get_argval('-e', type_=list, default=[])
valid_precmd_list = []
def intest(*args, **kwargs):
for precmd_name in args:
valid_precmd_list.append(precmd_name)
ret = precmd_name in input_precmd_list
ret2 = precmd_name in params.unknown # Let unparsed args count towards tests
if ret or ret2:
if ret:
input_precmd_list.remove(precmd_name)
else:
ret = ret2
print('+===================')
print('| running precmd = %s' % (args,))
return ret
return False
ut.start_logging(appname='ibeis')
# Implicit (decorated) test functions
for (func_aliases, func) in DEVPRECMD_FUNCTIONS:
if intest(*func_aliases):
#with utool.Indenter('[dev.' + get_funcname(func) + ']'):
func()
print('Exiting after first precommand')
sys.exit(1)
if len(input_precmd_list) > 0:
raise AssertionError('Unhandled tests: ' + repr(input_precmd_list))
#@utool.indent_func('[dev]')
def run_devcmds(ibs, qaid_list, daid_list, acfg=None):
"""
This function runs tests passed in with the -t flag
"""
print('\n')
#print('[dev] run_devcmds')
print('==========================')
print('[DEV] RUN EXPERIMENTS %s' % ibs.get_dbname())
print('==========================')
input_test_list = params.args.tests[:]
print('input_test_list = %s' % (ut.list_str(input_test_list),))
# fnum = 1
valid_test_list = [] # build list for printing in case of failure
valid_test_helpstr_list = [] # for printing
def mark_test_handled(testname):
input_test_list.remove(testname)
def intest(*args, **kwargs):
helpstr = kwargs.get('help', '')
valid_test_helpstr_list.append(' -t ' + ', '.join(args) + helpstr)
for testname in args:
valid_test_list.append(testname)
ret = testname in input_test_list
ret2 = testname in params.unknown # Let unparsed args count towards tests
if ret or ret2:
if ret:
mark_test_handled(testname)
else:
ret = ret2
print('\n+===================')
print(' [dev] running testname = %s' % (args,))
print('+-------------------\n')
return ret
return False
valid_test_helpstr_list.append(' # --- Simple Tests ---')
# Explicit (simple) test functions
if intest('export'):
export(ibs)
if intest('dbinfo'):
dbinfo.get_dbinfo(ibs)
if intest('headers', 'schema'):
ibs.db.print_schema()
if intest('info'):
print(ibs.get_infostr())
if intest('printcfg'):
printcfg(ibs)
if intest('tables'):
ibs.print_tables()
if intest('imgtbl'):
ibs.print_image_table()
valid_test_helpstr_list.append(' # --- Decor Tests ---')
locals_ = locals()
# Implicit (decorated) test functions
for (func_aliases, func) in DEVCMD_FUNCTIONS:
if intest(*func_aliases):
funcname = get_funcname(func)
#with utool.Indenter('[dev.' + funcname + ']'):
with utool.Timer(funcname):
#print('[dev] qid_list=%r' % (qaid_list,))
# FIXME: , daid_list
if len(ut.get_func_argspec(func).args) == 0:
ret = func()
else:
ret = func(ibs, qaid_list, daid_list)
# Add variables returned by the function to the
# "local scope" (the exec scop)
if hasattr(ret, 'items'):
for key, val in ret.items():
if utool.is_valid_varname(key):
locals_[key] = val
valid_test_helpstr_list.append(' # --- Config Tests ---')
# ------
# RUNS EXPERIMENT HARNESS OVER VALID TESTNAMES SPECIFIED WITH -t
# ------
# Config driven test functions
# Allow any testcfg to be in tests like: vsone_1 or vsmany_3
test_cfg_name_list = []
for test_cfg_name in experiment_configs.TEST_NAMES:
if intest(test_cfg_name):
test_cfg_name_list.append(test_cfg_name)
# Hack to allow for very customized harness tests
for testname in input_test_list[:]:
if testname.startswith('custom:'):
test_cfg_name_list.append(testname)
mark_test_handled(testname)
if len(test_cfg_name_list):
fnum = pt.next_fnum()
# Run Experiments
# backwards compatibility yo
acfgstr_name_list = {'OVERRIDE_HACK': (qaid_list, daid_list)}
assert False, 'This way of running tests no longer works. It may be fixed in the future'
#acfg
harness.test_configurations(ibs, acfgstr_name_list, test_cfg_name_list)
valid_test_helpstr_list.append(' # --- Help ---')
if intest('help'):
print('valid tests are:')
print('\n'.join(valid_test_helpstr_list))
return locals_
if len(input_test_list) > 0:
print('valid tests are: \n')
print('\n'.join(valid_test_list))
raise Exception('Unknown tests: %r ' % input_test_list)
return locals_
#-------------------
# CUSTOM DEV FUNCS
#-------------------
#------------------
# DEV MAIN
#------------------
def dev_snippets(main_locals):
""" Common variables for convineince when interacting with IPython """
print('[dev] dev_snippets')
species = 'zebra_grevys'
quick = True
fnum = 1
# Get reference to IBEIS Controller
ibs = main_locals['ibs']
if 'back' in main_locals:
# Get reference to GUI Backend
back = main_locals['back']
if back is not None:
# Get reference to GUI Frontend
front = getattr(back, 'front', None)
ibswgt = front
view = ibswgt.views['images']
model = ibswgt.models['names_tree']
selection_model = view.selectionModel()
if ibs is not None:
#ibs.dump_tables()
annots = ibs.annots()
images = ibs.images()
aid_list = ibs.get_valid_aids()
gid_list = ibs.get_valid_gids()
#nid_list = ibs.get_valid_nids()
#valid_nid_list = ibs.get_annot_name_rowids(aid_list)
#valid_aid_names = ibs.get_annot_names(aid_list)
#valid_aid_gtrues = ibs.get_annot_groundtruth(aid_list)
return locals()
def get_sortbystr(str_list, key_list, strlbl=None, keylbl=None):
sortx = key_list.argsort()
ndigits = max(len(str(key_list.max())), 0 if keylbl is None else len(keylbl))
keyfmt = '%' + str(ndigits) + 'd'
if keylbl is not None:
header = keylbl + ' --- ' + strlbl
else:
header = None
sorted_strs = ([(keyfmt % key + ' --- ' + str_) for str_, key in zip(str_list[sortx], key_list[sortx])])
def boxjoin(list_, header=None):
topline = '+----------'
botline = 'L__________'
boxlines = []
boxlines.append(topline + '\n')
if header is not None:
boxlines.append(header + '\n')
boxlines.append(topline)
body = utool.indentjoin(list_, '\n | ')
boxlines.append(body + '\n ')
boxlines.append(botline + '\n')
return ''.join(boxlines)
return boxjoin(sorted_strs, header)
@devcmd('test_feats')
def test_feats(ibs, qaid_list, daid_list=None):
"""
test_feats shows features using several different parameters
Args:
ibs (IBEISController):
qaid_list (int): query annotation id
CommandLine:
python dev.py -t test_feats --db PZ_MTEST --all --qindex 0 --show -w
Example:
>>> import ibeis
>>> ibs = ibeis.opendb('testdb1')
>>> qaid_list = [1]
"""
from ibeis import viz
from ibeis.expt import experiment_configs
import utool as ut
NUM_PASSES = 1 if not utool.get_argflag('--show') else 2
varyparams_list = [experiment_configs.featparams]
def test_featcfg_combo(ibs, aid, alldictcomb, count, nKpts_list, cfgstr_list):
for dict_ in ut.progiter(alldictcomb, lbl='FeatCFG Combo: '):
# Set ibs parameters to the current config
for key_, val_ in six.iteritems(dict_):
ibs.cfg.feat_cfg[key_] = val_
cfgstr_ = ibs.cfg.feat_cfg.get_cfgstr()
if count == 0:
# On first run just record info
kpts = ibs.get_annot_kpts(aid)
nKpts_list.append(len(kpts))
cfgstr_list.append(cfgstr_)
if count == 1:
kpts = ibs.get_annot_kpts(aid)
# If second run happens display info
cfgpackstr = utool.packstr(cfgstr_, textwidth=80,
breakchars=',', newline_prefix='',
break_words=False, wordsep=',')
title_suffix = (' len(kpts) = %r \n' % len(kpts)) + cfgpackstr
viz.show_chip(ibs, aid, fnum=pt.next_fnum(),
title_suffix=title_suffix, darken=.8,
ell_linewidth=2, ell_alpha=.6)
alldictcomb = utool.flatten(map(utool.all_dict_combinations, varyparams_list))
for count in range(NUM_PASSES):
nKpts_list = []
cfgstr_list = []
for aid in qaid_list:
test_featcfg_combo(ibs, aid, alldictcomb, count, nKpts_list, cfgstr_list)
#for dict_ in alldictcomb:
if count == 0:
nKpts_list = np.array(nKpts_list)
cfgstr_list = np.array(cfgstr_list)
print(get_sortbystr(cfgstr_list, nKpts_list, 'cfg', 'nKpts'))
def devfunc(ibs, qaid_list):
""" Function for developing something """
print('[dev] devfunc')
import ibeis # NOQA
from ibeis.algo import Config # NOQA
#from ibeis.algo.Config import * # NOQA
feat_cfg = Config.FeatureConfig()
#feat_cfg.printme3()
print('\ncfgstr..')
print(feat_cfg.get_cfgstr())
print(utool.dict_str(feat_cfg.get_hesaff_params()))
from ibeis import viz
aid = 1
ibs.cfg.feat_cfg.threshold = 16.0 / 3.0
kpts = ibs.get_annot_kpts(aid)
print('len(kpts) = %r' % len(kpts))
from ibeis.expt import experiment_configs
#varyparams_list = [
# #{
# # 'threshold': [16.0 / 3.0, 32.0 / 3.0], # 8.0 / 3.0
# # 'numberOfScales': [3, 2, 1],
# # 'maxIterations': [16, 32],
# # 'convergenceThreshold': [.05, .1],
# # 'initialSigma': [1.6, 1.2],
# #},
# {
# #'threshold': [16.0 / 3.0, 32.0 / 3.0], # 8.0 / 3.0
# 'numberOfScales': [1],
# #'maxIterations': [16, 32],
# #'convergenceThreshold': [.05, .1],
# #'initialSigma': [6.0, 3.0, 2.0, 1.6, 1.2, 1.1],
# 'initialSigma': [3.2, 1.6, 0.8],
# 'edgeEigenValueRatio': [10, 5, 3],
# },
#]
varyparams_list = [experiment_configs.featparams]
# low threshold = more keypoints
# low initialSigma = more keypoints
nKpts_list = []
cfgstr_list = []
alldictcomb = utool.flatten([utool.util_dict.all_dict_combinations(varyparams) for varyparams in featparams_list])
NUM_PASSES = 1 if not utool.get_argflag('--show') else 2
for count in range(NUM_PASSES):
for aid in qaid_list:
#for dict_ in utool.progiter(alldictcomb, lbl='feature param comb: ', total=len(alldictcomb)):
for dict_ in alldictcomb:
for key_, val_ in six.iteritems(dict_):
ibs.cfg.feat_cfg[key_] = val_
cfgstr_ = ibs.cfg.feat_cfg.get_cfgstr()
cfgstr = utool.packstr(cfgstr_, textwidth=80,
breakchars=',', newline_prefix='', break_words=False, wordsep=',')
if count == 0:
kpts = ibs.get_annot_kpts(aid)<|fim▁hole|> #print('___________')
#print('len(kpts) = %r' % len(kpts))
#print(cfgstr)
nKpts_list.append(len(kpts))
cfgstr_list.append(cfgstr_)
if count == 1:
title_suffix = (' len(kpts) = %r \n' % len(kpts)) + cfgstr
viz.show_chip(ibs, aid, fnum=pt.next_fnum(),
title_suffix=title_suffix, darken=.4,
ell_linewidth=2, ell_alpha=.8)
if count == 0:
nKpts_list = np.array(nKpts_list)
cfgstr_list = np.array(cfgstr_list)
print(get_sortbystr(cfgstr_list, nKpts_list, 'cfg', 'nKpts'))
pt.present()
locals_ = locals()
return locals_
def run_dev(ibs):
"""
main developer command
CommandLine:
python dev.py --db PZ_Master0 --controlled --print-rankhist
"""
print('[dev] --- RUN DEV ---')
# Get reference to controller
if ibs is not None:
# Get aids marked as test cases
if not ut.get_argflag('--no-expanded-aids'):
ibs, qaid_list, daid_list = main_helpers.testdata_expanded_aids(ibs=ibs)
#qaid_list = main_helpers.get_test_qaids(ibs, default_qaids=[1])
#daid_list = main_helpers.get_test_daids(ibs, default_daids='all', qaid_list=qaid_list)
print('[run_def] Test Annotations:')
#print('[run_dev] * qaid_list = %s' % ut.packstr(qaid_list, 80, nlprefix='[run_dev] '))
else:
qaid_list = []
daid_list = []
try:
assert len(qaid_list) > 0, 'assert!'
assert len(daid_list) > 0, 'daid_list!'
except AssertionError as ex:
utool.printex(ex, 'len(qaid_list) = 0', iswarning=True)
utool.printex(ex, 'or len(daid_list) = 0', iswarning=True)
#qaid_list = ibs.get_valid_aids()[0]
if len(qaid_list) > 0 or True:
# Run the dev experiments
expt_locals = run_devcmds(ibs, qaid_list, daid_list)
# Add experiment locals to local namespace
execstr_locals = utool.execstr_dict(expt_locals, 'expt_locals')
exec(execstr_locals)
if ut.get_argflag('--devmode'):
# Execute the dev-func and add to local namespace
devfunc_locals = devfunc(ibs, qaid_list)
exec(utool.execstr_dict(devfunc_locals, 'devfunc_locals'))
return locals()
#-------------
# EXAMPLE TEXT
#-------------
EXAMPLE_TEXT = '''
### DOWNLOAD A TEST DATABASE (IF REQUIRED) ###
python dev.py --t mtest
python dev.py --t nauts
./resetdbs.sh # FIXME
python ibeis/dbio/ingest_database.py <- see module for usage
### LIST AVAIABLE DATABASES ###
python dev.py -t list_dbs
### CHOOSE A DATABASE ###
python dev.py --db PZ_Master0 --setdb
python dev.py --db GZ_ALL --setdb
python dev.py --db PZ_MTEST --setdb
python dev.py --db NAUT_test --setdb
python dev.py --db testdb1 --setdb
python dev.py --db seals2 --setdb
### DATABASE INFORMATION ###
python dev.py -t dbinfo
### EXPERIMENTS ###
python dev.py --allgt -t best
python dev.py --allgt -t vsone
python dev.py --allgt -t vsmany
python dev.py --allgt -t nsum
# Newstyle experiments
# commmand # annot settings # test settings
python -m ibeis.dev -a default:qaids=allgt -t best
### COMPARE TWO CONFIGS ###
python dev.py --allgt -t nsum vsmany vsone
python dev.py --allgt -t nsum vsmany
python dev.py --allgt -t nsum vsmany vsone smk
### VARY DATABASE SIZE
python -m ibeis.dev -a default:qaids=allgt,dsize=100,qper_name=1,qmin_per_name=1 -t default --db PZ_MTEST
python -m ibeis.dev -a candidacy:qsize=10,dsize=100 -t default --db PZ_MTEST --verbtd
### VIZ A SET OF MATCHES ###
python dev.py --db PZ_MTEST -t query --qaid 72 110 -w
#python dev.py --allgt -t vsone vsmany
#python dev.py --allgt -t vsone --vz --vh
### RUN A SMALL AMOUNT OF VSONE TESTS ###
python dev.py --allgt -t vsone --qindex 0:1 --vz --vh --vf --noqcache
python dev.py --allgt --qindex 0:20 --
### DUMP ANALYSIS FIGURES TO DISK ###
python dev.py --allgt -t best --vf --vz --fig-dname query_analysis_easy
python dev.py --allgt -t best --vf --vh --fig-dname query_analysis_hard
python dev.py --allgt -t best --vf --va --fig-dname query_analysis_all
python dev.py --db PZ_MTEST --set-aids-as-hard 27 28 44 49 50 51 53 54 66 72 89 97 110
python dev.py --hard -t best vsone nsum
>>>
'''
#L______________
#def run_devmain2():
# input_test_list = ut.get_argval(('--tests', '-t',), type_=list, default=[])[:]
# print('input_test_list = %s' % (ut.list_str(input_test_list),))
# # fnum = 1
# valid_test_list = [] # build list for printing in case of failure
# valid_test_helpstr_list = [] # for printing
# def mark_test_handled(testname):
# input_test_list.remove(testname)
# def intest(*args, **kwargs):
# helpstr = kwargs.get('help', '')
# valid_test_helpstr_list.append(' -t ' + ', '.join(args) + helpstr)
# for testname in args:
# valid_test_list.append(testname)
# ret = testname in input_test_list
# ret2 = testname in params.unknown # Let unparsed args count towards tests
# if ret or ret2:
# if ret:
# mark_test_handled(testname)
# else:
# ret = ret2
# print('\n+===================')
# print(' [dev2] running testname = %s' % (args,))
# print('+-------------------\n')
# return ret
# return False
# anynewhit = False
# # Implicit (decorated) test functions
# print('DEVCMD_FUNCTIONS2 = %r' % (DEVCMD_FUNCTIONS2,))
# for (func_aliases, func) in DEVCMD_FUNCTIONS2:
# if intest(*func_aliases):
# funcname = get_funcname(func)
# with utool.Timer(funcname):
# if len(ut.get_func_argspec(func).args) == 0:
# func()
# anynewhit = True
# else:
# func(ibs, qaid_list, daid_list)
# anynewhit = True
# if anynewhit:
# sys.exit(1)
def devmain():
"""
The Developer Script
A command line interface to almost everything
-w # wait / show the gui / figures are visible
--cmd # ipython shell to play with variables
-t # run list of tests
Examples:
"""
helpstr = ut.codeblock(
'''
Dev is meant to be run as an interactive script.
The dev.py script runs any test you regiter with @devcmd in any combination
of configurations specified by a Config object.
Dev caches information in order to get quicker results. # FIXME: Provide quicker results # FIXME: len(line)
''')
INTRO_TITLE = 'The dev.py Script'
#INTRO_TEXT = ''.join((ut.bubbletext(INTRO_TITLE, font='cybermedium'), helpstr))
INTRO_TEXT = ut.bubbletext(INTRO_TITLE, font='cybermedium')
INTRO_STR = ut.msgblock('dev.py Intro', INTRO_TEXT)
EXAMPLE_STR = ut.msgblock('dev.py Examples', ut.codeblock(EXAMPLE_TEXT))
if ut.NOT_QUIET:
print(INTRO_STR)
if ut.get_argflag(('--help', '--verbose')):
print(EXAMPLE_STR)
CMD = ut.get_argflag('--cmd')
NOGUI = not ut.get_argflag('--gui')
if len(sys.argv) == 1:
print('Run dev.py with arguments!')
sys.exit(1)
# Run Precommands
run_devprecmds()
#
#
# Run IBEIS Main, create controller, and possibly gui
print('++dev')
main_locals = ibeis.main(gui=ut.get_argflag('--gui'))
#utool.set_process_title('IBEIS_dev')
#
#
# Load snippet variables
SNIPPITS = True and CMD
if SNIPPITS:
snippet_locals = dev_snippets(main_locals)
snippet_execstr = utool.execstr_dict(snippet_locals, 'snippet_locals')
exec(snippet_execstr)
#
#
# Development code
RUN_DEV = True # RUN_DEV = '__IPYTHON__' in vars()
if RUN_DEV:
dev_locals = run_dev(main_locals['ibs'])
dev_execstr = utool.execstr_dict(dev_locals, 'dev_locals')
exec(dev_execstr)
command = ut.get_argval('--eval', type_=str, default=None)
if command is not None:
result = eval(command, globals(), locals())
print('result = %r' % (result,))
#ibs.search_annot_notes('360')
#
#
# Main Loop (IPython interaction, or some exec loop)
#if '--nopresent' not in sys.argv or '--noshow' in sys.argv:
ut.show_if_requested()
if ut.get_argflag(('--show', '--wshow')):
pt.present()
main_execstr = ibeis.main_loop(main_locals, ipy=(NOGUI or CMD))
exec(main_execstr)
#
#
# Memory profile
if ut.get_argflag('--memprof'):
utool.print_resource_usage()
utool.memory_profile()
print('exiting dev')
if __name__ == '__main__':
multiprocessing.freeze_support() # for win32
# HACK to run tests without specifing ibs first
#run_devmain2()
devmain()
r"""
CurrentExperiments:
# Full best settings run
./dev.py -t custom --db PZ_Master0 --allgt --species=zebra_plains
# Full best settings run without spatial verification
./dev.py -t custom:sv_on=False --db PZ_Master0 --allgt --species=zebra_plains
./dev.py -t custom --db PZ_Master0 --allgt --species=zebra_plains --hs
# Check to see if new spatial verification helps
./dev.py -t custom:full_homog_checks=False custom:full_homog_checks=True --db PZ_Master0 --allgt --species=zebra_plains
# Yay it does
# Look for how many false negatives are in the bottom batch
./dev.py -t custom --db PZ_MTEST --species=zebra_plains --print-rankhist
./dev.py -t custom --db PZ_MTEST --controlled --print-rankhist
./dev.py -t custom --db PZ_Master0 --controlled --print-rankhist
./dev.py -t \
custom \
custom:rotation_invariance=True,affine_invariance=False \
custom:rotation_invariance=True,augment_queryside_hack=True \
--db PZ_Master0 --controlled --print-rankhist --print-bestcfg
./dev.py -t \
custom:rotation_invariance=True,affine_invariance=False \
custom:rotation_invariance=True,augment_queryside_hack=True \
--db NNP_Master3 --controlled --print-rankhist --print-bestcfg
ElephantEarExperiments
--show --vh
./dev.py -t custom:affine_invariance=True --db Elephants_drop1_ears --allgt --print-rankhist
./dev.py -t custom:affine_invariance=False --db Elephants_drop1_ears --allgt --print-rankhist
./dev.py -t custom:affine_invariance=False,histeq=True --db Elephants_drop1_ears --allgt --print-rankhist
./dev.py -t custom:affine_invariance=False,adapteq=True --db Elephants_drop1_ears --allgt --print-rankhist
./dev.py -t custom:affine_invariance=False,fg_on=False --db Elephants_drop1_ears --allgt
./dev.py -t custom:affine_invariance=False,histeq=True,fg_on=False --db Elephants_drop1_ears --allgt
./dev.py -t custom:affine_invariance=False,adapteq=True,fg_on=False --db Elephants_drop1_ears --allgt
./dev.py -t elph --db Elephants_drop1_ears --allgt
Sift vs Siam Experiments
./dev.py -t custom:feat_type=hesaff+siam128,algorithm=linear custom:feat_type=hesaff+sift --db testdb1 --allgt
./dev.py -t custom:feat_type=hesaff+siam128,algorithm=linear custom:feat_type=hesaff+sift --db PZ_MTEST --allgt
./dev.py -t custom:feat_type=hesaff+siam128,lnbnn_on=False,fg_on=False,bar_l2_on=True custom:feat_type=hesaff+sift,fg_on=False --db PZ_MTEST --allgt
./dev.py -t custom:feat_type=hesaff+siam128 custom:feat_type=hesaff+sift --db PZ_MTEST --allgt --print-rankhist
./dev.py -t custom:feat_type=hesaff+siam128 --db PZ_MTEST --allgt --print-rankhist
./dev.py -t custom:feat_type=hesaff+sift --db PZ_MTEST --allgt --print-rankhist
./dev.py -t custom:feat_type=hesaff+siam128 custom:feat_type=hesaff+sift --db PZ_Master0 --allgt
./dev.py -t custom:feat_type=hesaff+siam128 --db testdb1 --allgt
Without SV:
agg rank histogram = {
(0, 1): 2276,
(1, 5): 126,
(5, 50): 99,
(50, 8624): 108,
(8624, 8625): 28,
}
With SV:
agg rank histogram = {
(0, 1): 2300,
(1, 5): 106,
(5, 50): 16,
(50, 8624): 0,
(8624, 8625): 215,
}
Guesses:
0 2 2 2 4 4 4 4 0 0
0 0 4 2 2 4 4 4 2 2
2 4 4 4 1 1 1 2 2 2
0 0 1 1 1 2 0 0 1
"""<|fim▁end|> | |
<|file_name|>curry.spec.ts<|end_file_name|><|fim▁begin|>import { expect } from 'chai';
import { Curry } from './curry';
describe('curry', () => {
it('should curry the method with default arity', () => {
class MyClass {
@Curry()
add(a: any, b?: any) {
return a + b;
}
}
const myClass = new MyClass();
const add5 = myClass.add(5);
expect(add5).to.be.an.instanceOf(Function);
expect(add5(10)).to.equal(15);
});
it('should curry the method with default arity (paramless)', () => {
class MyClass {
@Curry
add(a: any, b?: any) {
return a + b;
}
}
const myClass = new MyClass();
const add5 = myClass.add(5);
<|fim▁hole|> });
it('should curry the method with fixed arity', () => {
class MyClass {
@Curry(2)
add(a: any, b?: any, c?: any) {
return a + b * c;
}
}
const myClass = new MyClass();
const add5 = myClass.add(5);
expect(add5).to.be.an.instanceOf(Function);
expect(add5(10, 2)).to.equal(25);
});
it('should retain the class context', () => {
class MyClass {
value = 10;
@Curry()
add(a: any, b?: any): any {
return (a + b) * this.value;
}
}
const myClass = new MyClass();
const add5 = myClass.add(5);
expect(add5(2)).to.equal(70);
});
});<|fim▁end|> | expect(add5).to.be.an.instanceOf(Function);
expect(add5(10)).to.equal(15); |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
import datetime
import json
import sys
import time
from flask import current_app
class DateTimeEncoder(json.JSONEncoder):
"""Custom JSON Encoder to handle datetime objects
from:
`http://stackoverflow.com/questions/12122007/python-json-encoder-to-support-datetime`_
also consider:
`http://hg.tryton.org/2.4/trytond/file/ade5432ac476/trytond/protocols/jsonrpc.py#l53`_
"""
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
elif isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, datetime.timedelta):
return (datetime.datetime.min + obj).time().isoformat()
else:
return super(DateTimeEncoder, self).default(obj)
class UnicodeSafeJsonWrapper(dict):
"""JSON received via POST has keys as unicode. This makes get work with plain
`str` keys.
"""
def __getitem__(self, key):
ret = self.get(key)
if ret is None:
raise KeyError(key)
return ret
def get(self, key, default=None):
return super(UnicodeSafeJsonWrapper, self).get(unicode(key), default) # noqa
<|fim▁hole|> return json.dumps(obj, cls=DateTimeEncoder, **kwargs)
def service_for(obj):
module = sys.modules['ggrc.services']
if type(obj) is str or type(obj) is unicode: # noqa
model_type = obj
else:
model_type = obj.__class__.__name__
return getattr(module, model_type, None)
def url_for(obj, id=None):
service = service_for(obj)
if service is None:
return None
if id is not None:
return service.url_for(id=id)
return service.url_for(obj)
def view_service_for(obj):
module = sys.modules['ggrc.views']
if type(obj) is str or type(obj) is unicode: # noqa
model_type = obj
else:
model_type = obj.__class__.__name__
return getattr(module, model_type, None)
def view_url_for(obj, id=None):
service = view_service_for(obj)
if service is None:
return None
if id is not None:
return service.url_for(id=id)
return service.url_for(obj)
def encoded_dict(in_dict):
# http://stackoverflow.com/questions/6480723/urllib-urlencode-doesnt-like-unicode-values-how-about-this-workaround
out_dict = {}
for k, v in in_dict.iteritems():
if isinstance(v, unicode): # noqa
v = v.encode('utf8')
elif isinstance(v, str):
# Must be encoded in UTF-8
v.decode('utf8')
out_dict[k] = v
return out_dict
def merge_dict(destination, source, path=None):
"""merges source into destination"""
if path is None:
path = []
for key in source:
if key in destination:
if isinstance(destination[key], dict) and isinstance(source[key], dict):
merge_dict(destination[key], source[key], path + [str(key)])
elif destination[key] == source[key]:
pass # same leaf value
else:
raise Exception('Conflict at %s' % '.'.join(path + [str(key)]))
else:
destination[key] = source[key]
return destination
def merge_dicts(*args):
result = {}
for arg in args:
result = merge_dict(result, arg)
return result
class BenchmarkContextManager(object):
def __init__(self, message):
self.message = message
def __enter__(self):
self.start = time.time()
def __exit__(self, exc_type, exc_value, exc_trace):
end = time.time()
current_app.logger.info("{:.4f} {}".format(end - self.start, self.message))
benchmark = BenchmarkContextManager<|fim▁end|> |
def as_json(obj, **kwargs): |
<|file_name|>dispatcher.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
use std::collections::hash_map::Entry;
use std::io::{self, Cursor};
use std::net::SocketAddr;
use std::thread;
use bip_handshake::{DiscoveryInfo, InitiateMessage, Protocol};
use bip_util::bt::PeerId;
use chrono::{DateTime, Duration};
use chrono::offset::Utc;
use futures::future::Either;
use futures::sink::{Wait, Sink};
use nom::IResult;
use rand;
use umio::{ELoopBuilder, Dispatcher, Provider};
use umio::external::{self, Timeout};
use announce::{AnnounceRequest, SourceIP, DesiredPeers};
use client::{ClientToken, ClientRequest, RequestLimiter, ClientMetadata, ClientResponse};
use client::error::{ClientResult, ClientError};
use option::AnnounceOptions;
use request::{self, TrackerRequest, RequestType};
use response::{TrackerResponse, ResponseType};
use scrape::ScrapeRequest;
const EXPECTED_PACKET_LENGTH: usize = 1500;
const CONNECTION_ID_VALID_DURATION_MILLIS: i64 = 60000;
const MAXIMUM_REQUEST_RETRANSMIT_ATTEMPTS: u64 = 8;
/// Internal dispatch timeout.
enum DispatchTimeout {
Connect(ClientToken),
CleanUp,
}
/// Internal dispatch message for clients.
pub enum DispatchMessage {
Request(SocketAddr, ClientToken, ClientRequest),
StartTimer,
Shutdown,
}
/// Create a new background dispatcher to execute request and send responses back.
///
/// Assumes msg_capacity is less than usize::max_value().
pub fn create_dispatcher<H>(bind: SocketAddr,
handshaker: H,
msg_capacity: usize,
limiter: RequestLimiter)
-> io::Result<external::Sender<DispatchMessage>>
where H: Sink + DiscoveryInfo + 'static + Send,
H::SinkItem: From<Either<InitiateMessage, ClientMetadata>>
{
// Timer capacity is plus one for the cache cleanup timer
let builder = ELoopBuilder::new()
.channel_capacity(msg_capacity)
.timer_capacity(msg_capacity + 1)
.bind_address(bind)
.buffer_length(EXPECTED_PACKET_LENGTH);
let mut eloop = try!(builder.build());
let channel = eloop.channel();
let dispatch = ClientDispatcher::new(handshaker, bind, limiter);
thread::spawn(move || {
eloop.run(dispatch).expect("bip_utracker: ELoop Shutdown Unexpectedly...");
});
channel.send(DispatchMessage::StartTimer)
.expect("bip_utracker: ELoop Failed To Start Connect ID Timer...");
Ok(channel)
}
// ----------------------------------------------------------------------------//
/// Dispatcher that executes requests asynchronously.
struct ClientDispatcher<H> {
handshaker: Wait<H>,
pid: PeerId,
port: u16,
bound_addr: SocketAddr,
active_requests: HashMap<ClientToken, ConnectTimer>,
id_cache: ConnectIdCache,
limiter: RequestLimiter,
}
impl<H> ClientDispatcher<H>
where H: Sink + DiscoveryInfo,
H::SinkItem: From<Either<InitiateMessage, ClientMetadata>>
{
/// Create a new ClientDispatcher.
pub fn new(handshaker: H, bind: SocketAddr, limiter: RequestLimiter) -> ClientDispatcher<H> {
let peer_id = handshaker.peer_id();
let port = handshaker.port();
ClientDispatcher {
handshaker: handshaker.wait(),
pid: peer_id,
port: port,
bound_addr: bind,
active_requests: HashMap::new(),
id_cache: ConnectIdCache::new(),
limiter: limiter,
}
}
/// Shutdown the current dispatcher, notifying all pending requests.
pub fn shutdown<'a>(&mut self, provider: &mut Provider<'a, ClientDispatcher<H>>) {
// Notify all active requests with the appropriate error
for token_index in 0..self.active_requests.len() {
let next_token = *self.active_requests.keys().skip(token_index).next().unwrap();
self.notify_client(next_token, Err(ClientError::ClientShutdown));
}
// TODO: Clear active timeouts
self.active_requests.clear();
provider.shutdown();
}
/// Finish a request by sending the result back to the client.
pub fn notify_client(&mut self, token: ClientToken, result: ClientResult<ClientResponse>) {
self.handshaker.send(Either::B(ClientMetadata::new(token, result)).into())
.unwrap_or_else(|_| panic!("NEED TO FIX"));
self.limiter.acknowledge();
}
/// Process a request to be sent to the given address and associated with the given token.
pub fn send_request<'a>(&mut self,
provider: &mut Provider<'a, ClientDispatcher<H>>,
addr: SocketAddr,
token: ClientToken,
request: ClientRequest) {
// Check for IP version mismatch between source addr and dest addr
match (self.bound_addr, addr) {
(SocketAddr::V4(_), SocketAddr::V6(_)) |
(SocketAddr::V6(_), SocketAddr::V4(_)) => {
self.notify_client(token, Err(ClientError::IPVersionMismatch));
return;
}
_ => (),
};
self.active_requests.insert(token, ConnectTimer::new(addr, request));
self.process_request(provider, token, false);
}
/// Process a response received from some tracker and match it up against our sent requests.
pub fn recv_response<'a, 'b>(&mut self,
provider: &mut Provider<'a, ClientDispatcher<H>>,
addr: SocketAddr,
response: TrackerResponse<'b>) {
let token = ClientToken(response.transaction_id());
let conn_timer = if let Some(conn_timer) = self.active_requests.remove(&token) {
if conn_timer.message_params().0 == addr {
conn_timer
} else {
return;
} // TODO: Add Logging (Server Receive Addr Different Than Send Addr)
} else {
return;
}; // TODO: Add Logging (Server Gave Us Invalid Transaction Id)
provider.clear_timeout(conn_timer.timeout_id()
.expect("bip_utracker: Failed To Clear Request Timeout"));
// Check if the response requires us to update the connection timer
if let &ResponseType::Connect(id) = response.response_type() {
self.id_cache.put(addr, id);
self.active_requests.insert(token, conn_timer);
self.process_request(provider, token, false);
} else {
// Match the request type against the response type and update our client
match (conn_timer.message_params().1, response.response_type()) {
(&ClientRequest::Announce(hash, _), &ResponseType::Announce(ref res)) => {
// Forward contact information on to the handshaker
for addr in res.peers().iter() {
self.handshaker.send(Either::A(InitiateMessage::new(Protocol::BitTorrent, hash, addr)).into())
.unwrap_or_else(|_| panic!("NEED TO FIX"));
}
self.notify_client(token, Ok(ClientResponse::Announce(res.to_owned())));
}
(&ClientRequest::Scrape(..), &ResponseType::Scrape(ref res)) => {
self.notify_client(token, Ok(ClientResponse::Scrape(res.to_owned())));
}
(_, &ResponseType::Error(ref res)) => {
self.notify_client(token, Err(ClientError::ServerMessage(res.to_owned())));
}
_ => {
self.notify_client(token, Err(ClientError::ServerError));
}
}
}
}
/// Process an existing request, either re requesting a connection id or sending the actual request again.
///
/// If this call is the result of a timeout, that will decide whether to cancel the request or not.
fn process_request<'a>(&mut self,
provider: &mut Provider<'a, ClientDispatcher<H>>,
token: ClientToken,
timed_out: bool) {
let mut conn_timer = if let Some(conn_timer) = self.active_requests.remove(&token) {
conn_timer
} else {
return;
}; // TODO: Add logging
// Resolve the duration of the current timeout to use
let next_timeout = match conn_timer.current_timeout(timed_out) {
Some(timeout) => timeout,
None => {
self.notify_client(token, Err(ClientError::MaxTimeout));
return;
}
};
let addr = conn_timer.message_params().0;
let opt_conn_id = self.id_cache.get(conn_timer.message_params().0);
// Resolve the type of request we need to make
let (conn_id, request_type) = match (opt_conn_id, conn_timer.message_params().1) {
(Some(id), &ClientRequest::Announce(hash, state)) => {
let source_ip = match addr {
SocketAddr::V4(_) => SourceIP::ImpliedV4,
SocketAddr::V6(_) => SourceIP::ImpliedV6,
};
let key = rand::random::<u32>();
(id,
RequestType::Announce(AnnounceRequest::new(hash,
self.pid,
state,
source_ip,
key,
DesiredPeers::Default,
self.port,
AnnounceOptions::new())))
}
(Some(id), &ClientRequest::Scrape(hash)) => {
let mut scrape_request = ScrapeRequest::new();
scrape_request.insert(hash);
(id, RequestType::Scrape(scrape_request))
}
(None, _) => (request::CONNECT_ID_PROTOCOL_ID, RequestType::Connect),
};
let tracker_request = TrackerRequest::new(conn_id, token.0, request_type);
// Try to write the request out to the server
let mut write_success = false;
provider.outgoing(|bytes| {
let mut writer = Cursor::new(bytes);
write_success = tracker_request.write_bytes(&mut writer).is_ok();
if write_success {
Some((writer.position() as usize, addr))
} else {
None
}
});
// If message was not sent (too long to fit) then end the request
if !write_success {
self.notify_client(token, Err(ClientError::MaxLength));
} else {
conn_timer.set_timeout_id(
provider.set_timeout(DispatchTimeout::Connect(token), next_timeout)
.expect("bip_utracker: Failed To Set Timeout For Request"));
self.active_requests.insert(token, conn_timer);
}
}
}
impl<H> Dispatcher for ClientDispatcher<H>
where H: Sink + DiscoveryInfo,
H::SinkItem: From<Either<InitiateMessage, ClientMetadata>>
{
type Timeout = DispatchTimeout;
type Message = DispatchMessage;
fn incoming<'a>(&mut self,
mut provider: Provider<'a, Self>,
message: &[u8],
addr: SocketAddr) {
let response = match TrackerResponse::from_bytes(message) {
IResult::Done(_, rsp) => rsp,
_ => return, // TODO: Add Logging
};
self.recv_response(&mut provider, addr, response);
}
fn notify<'a>(&mut self, mut provider: Provider<'a, Self>, message: DispatchMessage) {
match message {
DispatchMessage::Request(addr, token, req_type) => {
self.send_request(&mut provider, addr, token, req_type);
}
DispatchMessage::StartTimer => self.timeout(provider, DispatchTimeout::CleanUp),
DispatchMessage::Shutdown => self.shutdown(&mut provider),
}
}
fn timeout<'a>(&mut self, mut provider: Provider<'a, Self>, timeout: DispatchTimeout) {
match timeout {
DispatchTimeout::Connect(token) => self.process_request(&mut provider, token, true),
DispatchTimeout::CleanUp => {
self.id_cache.clean_expired();
provider.set_timeout(DispatchTimeout::CleanUp,
CONNECTION_ID_VALID_DURATION_MILLIS as u64)
.expect("bip_utracker: Failed To Restart Connect Id Cleanup Timer");
}
};
}
}
// ----------------------------------------------------------------------------//
/// Contains logic for making sure a valid connection id is present
/// and correctly timing out when sending requests to the server.
struct ConnectTimer {
addr: SocketAddr,
attempt: u64,
request: ClientRequest,
timeout_id: Option<Timeout>,
}
impl ConnectTimer {
/// Create a new ConnectTimer.
pub fn new(addr: SocketAddr, request: ClientRequest) -> ConnectTimer {
ConnectTimer {
addr: addr,
attempt: 0,
request: request,
timeout_id: None,
}
}
/// Yields the current timeout value to use or None if the request should time out completely.
pub fn current_timeout(&mut self, timed_out: bool) -> Option<u64> {
if self.attempt == MAXIMUM_REQUEST_RETRANSMIT_ATTEMPTS {
None
} else {
if timed_out {
self.attempt += 1;
}
Some(calculate_message_timeout_millis(self.attempt))
}
}
<|fim▁hole|> self.timeout_id
}
/// Sets a new timeout id.
pub fn set_timeout_id(&mut self, id: Timeout) {
self.timeout_id = Some(id);
}
/// Yields the message parameters for the current connection.
pub fn message_params(&self) -> (SocketAddr, &ClientRequest) {
(self.addr, &self.request)
}
}
/// Calculates the timeout for the request given the attempt count.
fn calculate_message_timeout_millis(attempt: u64) -> u64 {
(15 * 2u64.pow(attempt as u32)) * 1000
}
// ----------------------------------------------------------------------------//
/// Cache for storing connection ids associated with a specific server address.
struct ConnectIdCache {
cache: HashMap<SocketAddr, (u64, DateTime<Utc>)>,
}
impl ConnectIdCache {
/// Create a new connect id cache.
fn new() -> ConnectIdCache {
ConnectIdCache { cache: HashMap::new() }
}
/// Get an un expired connection id for the given addr.
fn get(&mut self, addr: SocketAddr) -> Option<u64> {
match self.cache.entry(addr) {
Entry::Vacant(_) => None,
Entry::Occupied(occ) => {
let curr_time = Utc::now();
let prev_time = occ.get().1;
if is_expired(curr_time, prev_time) {
occ.remove();
None
} else {
Some(occ.get().0)
}
}
}
}
/// Put an un expired connection id into cache for the given addr.
fn put(&mut self, addr: SocketAddr, connect_id: u64) {
let curr_time = Utc::now();
self.cache.insert(addr, (connect_id, curr_time));
}
/// Removes all entries that have expired.
fn clean_expired(&mut self) {
let curr_time = Utc::now();
let mut curr_index = 0;
let mut opt_curr_entry = self.cache.iter().skip(curr_index).map(|(&k, &v)| (k, v)).next();
while let Some((addr, (_, prev_time))) = opt_curr_entry.take() {
if is_expired(curr_time, prev_time) {
self.cache.remove(&addr);
}
curr_index += 1;
opt_curr_entry = self.cache.iter().skip(curr_index).map(|(&k, &v)| (k, v)).next();
}
}
}
/// Returns true if the connect id received at prev_time is now expired.
fn is_expired(curr_time: DateTime<Utc>, prev_time: DateTime<Utc>) -> bool {
let valid_duration = Duration::milliseconds(CONNECTION_ID_VALID_DURATION_MILLIS);
let difference = prev_time.signed_duration_since(curr_time);
difference >= valid_duration
}<|fim▁end|> | /// Yields the current timeout id if one is set.
pub fn timeout_id(&self) -> Option<Timeout> { |
<|file_name|>20.d.ts<|end_file_name|><|fim▁begin|>import * as React from "react";
import { CarbonIconProps } from "../../";
declare const FlowData20: React.ForwardRefExoticComponent<
CarbonIconProps & React.RefAttributes<SVGSVGElement><|fim▁hole|>export default FlowData20;<|fim▁end|> | >; |
<|file_name|>test_nova.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright (c) 2017 Hewlett Packard Enterprise, L.P.
# Copyright (c) 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import libvirt
import os
import time
from tests import data
from tests import utils
def verify_private_key(stdout):
line = [l for l in stdout.split('\n') if l != '']
if ((line[0] == '-----BEGIN PRIVATE KEY-----' and
line[-1] == '-----END PRIVATE KEY-----')):
return stdout
return ''
def cwd(filename):
return os.path.join(os.path.dirname(__file__), 'data', filename)
server1 = data.server['11111111-2222-3333-4444-555555555555']
server1_image = data.image[server1['image_id']]
class DwarfTestCase(utils.TestCase):
def setUp(self):
super(DwarfTestCase, self).setUp()
self.start_dwarf()
def tearDown(self):
self.stop_dwarf()
super(DwarfTestCase, self).tearDown()
def test_nova_flavors(self):
self.exec_verify(['nova', 'flavor-list'],
filename=cwd('nova_flavor-list'))
self.exec_verify(['nova', 'flavor-show', '100'],
filename=cwd('nova_flavor-show'))
self.exec_verify(['nova', 'flavor-delete', '100'],
filename=cwd('nova_flavor-delete'))
self.exec_verify(['nova', 'flavor-create', 'test.flavor', '999',
'1024', '15', '2'],<|fim▁hole|> filename=cwd('nova_flavor-create'))
def test_nova_keypairs(self):
self.exec_verify(['nova', 'keypair-add', 'test key', '--pub-key',
cwd('nova_keypair-add.pub')],
stdout='')
self.exec_verify(['nova', 'keypair-list'],
filename=cwd('nova_keypair-list'))
self.exec_verify(['nova', 'keypair-show', 'test key'],
filename=cwd('nova_keypair-show'))
self.exec_verify(['nova', 'keypair-delete', 'test key'],
stdout='')
self.exec_verify(['nova', 'keypair-add', 'test key'],
callback=verify_private_key)
def test_nova_servers(self):
# Preload an image
self.create_image(server1_image)
self.exec_verify(['nova', 'boot', '--flavor', server1['flavor_id'],
'--image', server1['image_id'], server1['name']],
filename=cwd('nova_boot'))
self.exec_verify(['nova', 'list'],
filename=cwd('nova_list.building'))
libvirt.DOMAIN_STATE = libvirt.VIR_DOMAIN_RUNNING
libvirt.IP_ADDRESS = server1['ip']
time.sleep(3)
# Should show the IP and status 'active'
self.exec_verify(['nova', 'list'],
filename=cwd('nova_list'))
self.exec_verify(['nova', 'show', server1['id']],
filename=cwd('nova_show'))
self.exec_verify(['nova', 'console-log', server1['id']],
stdout='Test server console log\n')
self.exec_verify(['nova', 'stop', server1['id']],
stdout='Request to stop server %s has been '
'accepted.\n' % server1['id'])
# Should show status 'stopped'
self.exec_verify(['nova', 'show', server1['id']],
filename=cwd('nova_show.stopped'))
self.exec_verify(['nova', 'start', server1['id']],
stdout='Request to start server %s has been '
'accepted.\n' % server1['id'])
# Should show status 'active'
self.exec_verify(['nova', 'show', server1['id']],
filename=cwd('nova_show'))
self.exec_verify(['nova', 'reboot', server1['id']],
stdout='Request to reboot server <Server: %s> has '
'been accepted.\n' % server1['name'])<|fim▁end|> | |
<|file_name|>moin_migration_cleanup.py<|end_file_name|><|fim▁begin|>import re
from waliki.signals import page_saved
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from waliki.models import Page
from django.utils.translation import ugettext_lazy as _
from django.utils.text import get_text_list
try:
from waliki.attachments.models import Attachment
except ImportError:
Attachment = None
try:
from sh import pandoc, echo
pandoc = pandoc.bake(_tty_out=False)
echo = echo.bake(_tty_out=False)
except ImportError:
pandoc = None
def clean_meta(rst_content):
"""remove moinmoin metada from the top of the file"""
rst = rst_content.split('\n')
for i, line in enumerate(rst):
if line.startswith('#'):
continue
break
return '\n'.join(rst[i:])
def delete_relative_links(rst_content):
"""remove links relatives. Waliki point them correctly implicitly"""
return re.sub(r'^(\.\. .*: \.\./.*)\n$', '', rst_content, flags=re.MULTILINE)
def attachments(rst_content, slug):
def rep(matchobj):
for filename in matchobj.groups(1):
try:
a = Attachment.objects.filter(file__endswith=filename, page__slug=slug)[0]
except IndexError:
print('Cant find %s in %s' % (filename, slug))
return None
return '`%s <%s>`_' % (filename, a.get_absolute_url())
return re.sub(r'`attachment:(.*)`_', rep, rst_content, flags=re.MULTILINE)
def directives(rst_content):
for directive in re.findall(r':(\w+):`.*`', rst_content, flags=re.MULTILINE):
rst_content += """
.. role:: {directive}
:class: {directive}
""".format(directive=directive)
return rst_content
def emojis(rst_content):
# require
emojis_map = {
':)': 'smile',
':-)': 'smile',
';)': 'wink',
';-)': 'wink',
':-?': 'smirk',
':?': 'smirk',
':(': 'confused',
':-(': 'confused',
':D': 'laughing',
':-D': 'laughing',
':-P': 'stuck_out_tongue_closed_eyes',
':P': 'stuck_out_tongue_closed_eyes',
":'(": 'cry',
":'-(": 'cry',
}
def replace_emoji(match):
replacement = emojis_map.get(match.groups()[0], '')
if replacement:
return '|%s|' % replacement
return ''
result = re.sub(r'\|((?:\:|;).{1,3})\|', replace_emoji, rst_content, flags=re.MULTILINE)
return result
def email(rst_content):
pattern = r'`\[\[MailTo\((.*)\)\]\]`_(?:\.\.)?'
return re.sub(pattern, r'``\1``', rst_content)
def title_level(rst_content):
def dashrepl(matchobj):
return '-' * len(matchobj.group(0))
pattern = r'^~+$'
return re.sub(pattern, dashrepl, rst_content, flags=re.MULTILINE)
def code(rst_content):
if not pandoc:
return rst_content
pattern = r'^\:\:\n\s+\.\. raw:: html\n\s+(<span class\=\"line\"\>.*?|\s+?<\/span\>)\n\s*$'
def convert(match):
source = match.groups()[0]
source = '\n'.join(l.strip() for l in source.split('\n'))
source = "<pre>%s</pre>" % source
rst_source = pandoc(echo(source), f='html', t='rst').stdout.decode('utf8')
# rst_source = rst_source.strip().replace('\n', '\n ') + '\n'
return rst_source
result = re.sub(pattern, convert, rst_content, flags=re.DOTALL | re.MULTILINE)
return result
class Command(BaseCommand):
help = 'Cleanup filters for a moin2git import'
option_list = (
make_option('--limit-to',
dest='slug',
default='',
help="optional namespace"),
make_option('--filters',
dest='filters',
default='all',
help="comma separated list of filter functions to apply"),
make_option('--message',
dest='message',
default=_("RestructuredText clean up"),
help="log message"),
) + BaseCommand.option_list
def handle(self, *args, **options):
valid_filters = ['meta', 'links',
'attachments', 'directives',
'emojis', 'title', 'email', 'code', 'title_level']
slug = options['slug']
filters = options['filters']
if filters == 'all':
filters = valid_filters
else:
filters = [f.strip() for f in filters.split(',')]
if not set(filters).issubset(valid_filters):
valid = get_text_list(valid_filters, 'and')
raise CommandError("At least one filter is unknown. Valid filters are:\n %s" % valid)
if slug:
pages = Page.objects.filter(slug__startswith=slug)
else:
pages = Page.objects.all()
for page in pages:
title = None
print('\nApplying filter/s %s to %s' % (get_text_list(filters, 'and'), page.slug))
raw = page.raw
if 'meta' in filters:
raw = clean_meta(raw)
if 'links' in filters:
raw = delete_relative_links(raw)
if 'attachments' in filters:
raw = attachments(raw, page.slug)
if 'directives' in filters:
raw = directives(raw)
if 'emojis' in filters:
raw = emojis(raw)
if 'email' in filters:
raw = email(raw)
if 'title_level' in filters:
raw = title_level(raw)
if 'code' in filters:
if not pandoc:
print('The filter "code" need Pandoc installed in your system. Ignoring')
else:
raw = code(raw)
if 'title' in filters and not page.title:
title = page._get_part('get_document_title')
if raw != page.raw or title:
if title:
page.title = title
if raw != page.raw:
page.raw = raw
page.save()
page_saved.send_robust(sender='moin',<|fim▁hole|> else:
print('Nothing changed. Ignoring update')<|fim▁end|> | page=page,
author=None,
message=options['message'],
form_extra_data={}) |
<|file_name|>implem_iterative_no_stack.cpp<|end_file_name|><|fim▁begin|>#include <algorithm>
#include <iterator>
#include <vector>
#include "ihanoi.hpp"
#include "aim.hpp"
// Algorithm to be tested
template <int from, int other, int to>
struct ApplyMapping
{
std::pair<std::size_t, std::size_t> operator()(std::pair<std::size_t, std::size_t> const& move)
{
static_assert(from != other && other != to && to != from, "Sticks must have different IDs");
static_assert(from == 0 || other == 0 || to == 0, "Stick #0 not found");
static_assert(from == 1 || other == 1 || to == 1, "Stick #1 not found");
static_assert(from == 2 || other == 2 || to == 2, "Stick #2 not found");
return std::make_pair(move.first == 0 ? from : (move.first == 1 ? other : to), move.second == 0 ? from : (move.second == 1 ? other : to));
}
};
<|fim▁hole|> {
return;
}
const unsigned int height = tower.height_of(0);
std::vector<std::pair<std::size_t, std::size_t>> moves;
moves.reserve((1 << height) -1); // Solution contains 2 ^ height -1 moves at the end of the execution
for (unsigned int i = 1 ; i != height+1 ; ++i)
{
// At the beginning of this iteration we have:
// - moves: contains the list of moves necessary to move a HanoiTower of size (i-1)
// from 0 to 2 (stick id)
std::size_t size_previous = moves.size();
moves.push_back(std::make_pair(0, 2));
std::copy(moves.begin(), std::next(moves.begin(), size_previous), std::back_inserter(moves));
std::transform(moves.begin(), std::next(moves.begin(), size_previous), moves.begin(), ApplyMapping<0,2,1>()); // we move a tower of size (i-1) from stick #0 to stick #1
std::transform(std::next(moves.begin(), size_previous+1), moves.end(), std::next(moves.begin(), size_previous+1), ApplyMapping<1,0,2>()); // we move a tower of size (i-1) from stick #1 to stick #2
}
for (auto const& move : moves)
{
tower.move(move.second, move.first);
}
}<|fim▁end|> | void hanoi(IHanoi& tower)
{
if (tower.height_of(0) == 0) |
<|file_name|>util_test.py<|end_file_name|><|fim▁begin|>#-*- coding: utf-8 -*-
"""
@author: Rinze de Laat
Copyright © 2013 Rinze de Laat, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License version 2 as published by the Free Software
Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
Odemis. If not, see http://www.gnu.org/licenses/.
"""
from __future__ import division
from functools import partial
import gc
import logging
from odemis import util
from odemis.util import limit_invocation, TimeoutError
from odemis.util import timeout
import time
import unittest
import weakref
logging.getLogger().setLevel(logging.DEBUG)
class TestLimitInvocation(unittest.TestCase):
def test_not_too_often(self):
self.count = 0
now = time.time()
end = now + 1.1 # a bit more than 1 s
while time.time() < end:
self.count_max_1s()
time.sleep(0.01)
self.assertLessEqual(self.count, 2, "method was called more than twice in 1 second: %d" % self.count)
time.sleep(2) # wait for the last potential calls to happen
self.assertLessEqual(self.count, 3, "method was called more than three times in 2 seconds: %d" % self.count)
@limit_invocation(1)
def count_max_1s(self):
# never called more than once per second
self.count += 1
time.sleep(0.2)
def test_gc(self):
u = Useless()
wku = weakref.ref(u)
now = time.time()
end = now + 1.1 # a bit more than 1 s
while time.time() < end:
u.doit(time.time(), b=3)
time.sleep(0.01)
# Check the object u has nothing preventing it from being dereferenced
del u
time.sleep(1) # wait for the last potential calls to happen
self.assertIsNone(wku())
class Useless(object):
"""
Independent class for testing limit_invocation decorator
"""
def __del__(self):
print "Useless %r is gone" % self
@limit_invocation(0.1)
def doit(self, a, b=None):
print "doing it %s, %s" % (a, b)
class TestTimeout(unittest.TestCase):
@timeout(1.2)
def test_notimeout(self):
time.sleep(1)
def test_timeout(self):
self.assertRaises(TimeoutError, self.toolong)
@timeout(0.5)<|fim▁hole|> def toolong(self):
# will always timeout
time.sleep(1)
class SortedAccordingTestCase(unittest.TestCase):
def test_simple(self):
in_exp = ((([1, 2, 3], [3, 2, 1]), [3, 2, 1]),
(([1, 2, 3], [4, 2]), [2, 1, 3]),
(([], [4, 2]), []),
((["b", "a"], []), ["b", "a"]),
)
for i, eo in in_exp:
o = util.sorted_according_to(*i)
self.assertEqual(o, eo, "Failed to get correct output for %s" % (i,))
class AlmostEqualTestCase(unittest.TestCase):
def test_simple(self):
in_exp = {(0., 0): True,
(-5, -5.): True,
(1., 1. - 1e-9): True,
(1., 1. - 1e-3): False,
(1., 1. + 1e-3): False,
(-5e-8, -5e-8 + 1e-19): True,
(5e18, 5e18 + 1): True,
}
for i, eo in in_exp.items():
o = util.almost_equal(*i)
self.assertEqual(o, eo, "Failed to get correct output for %s" % (i,))
# Bounding box clipping test data generation
def tp(trans, ps):
""" Translate points ps using trans """
r = []
i = 0
for p in ps:
r.append(p + trans[i])
i = (i + 1) % len(trans)
return tuple(r)
# First we define a bounding boxes, at different locations
bounding_boxes = [(-2, -2, 0, 0),
(-1, -1, 1, 1),
(0, 0, 2, 2),
(2, 2, 4, 4)]
# From this, we generate boxes that are situated all around these
# bounding boxes, but that do not touch or overlap them.
def relative_boxes(bb):
t_left = [(-3, i) for i in range(-3, 4)]
to_the_left = [tp(t, bb) for t in t_left]
t_top = [(i, -3) for i in range(-3, 4)]
to_the_top = [tp(t, bb) for t in t_top]
t_right = [(3, i) for i in range(-3, 4)]
to_the_right = [tp(t, bb) for t in t_right]
t_bottom = [(i, 3) for i in range(-3, 4)]
to_the_bottom = [tp(t, bb) for t in t_bottom]
outside_boxes = to_the_left + to_the_top + to_the_right + to_the_bottom
# Selection boxes that touch the outside of the bounding box
touch_left = [tp((1, 0), b) for b in to_the_left[1:-1]]
touch_top = [tp((0, 1), b) for b in to_the_top[1:-1]]
touch_right = [tp((-1, 0), b) for b in to_the_right[1:-1]]
touch_bottom = [tp((0, -1), b) for b in to_the_bottom[1:-1]]
touching_boxes = touch_left + touch_top + touch_right + touch_bottom
# Partial overlapping boxes
overlap_left = [tp((1, 0), b) for b in touch_left[1:-1]]
overlap_top = [tp((0, 1), b) for b in touch_top[1:-1]]
overlap_right = [tp((-1, 0), b) for b in touch_right[1:-1]]
overlap_bottom = [tp((0, -1), b) for b in touch_bottom[1:-1]]
overlap_boxes = overlap_left + overlap_top + overlap_right + overlap_bottom
return outside_boxes, touching_boxes, overlap_boxes
class CanvasTestCase(unittest.TestCase):
def test_clipping(self):
tmp = "{}: {} - {} -> {}"
for bb in bounding_boxes:
outside, touching, overlap = relative_boxes(bb)
for b in outside:
r = util.rect_intersect(b, bb)
msg = tmp.format("outside", b, bb, r)
self.assertIsNone(r, msg)
for b in touching:
r = util.rect_intersect(b, bb)
msg = tmp.format("touching", b, bb, r)
self.assertIsNone(r, msg)
for b in overlap:
r = util.rect_intersect(b, bb)
msg = tmp.format("overlap", b, bb, r)
self.assertIsNotNone(r, msg)
# 'Manual' checks
if bb == (-1, -1, 1, 1):
if b[:2] == (-2, -2):
self.assertEqual(r, (-1, -1, 0, 0), msg)
elif b[:2] == (0, -1):
self.assertEqual(r, (0, -1, 1, 1), msg)
elif b[:2] == (0, 0):
self.assertEqual(r, (0, 0, 1, 1), msg)
# full and exact overlap
b = bb
r = util.rect_intersect(b, bb)
self.assertEqual(r, bb)
# inner overlap
b = (bb[0] + 1, bb[1] + 1, bb[2], bb[3])
r = util.rect_intersect(b, bb)
self.assertEqual(r, b)
# overflowing overlap
b = (bb[0] - 1, bb[1] - 1, bb[2] + 1, bb[2] + 1)
r = util.rect_intersect(b, bb)
self.assertEqual(r, bb)
def test_line_clipping(self):
bounding_box = (0, 4, 4, 0)
clip = partial(util.clip_line, *bounding_box)
# Test lines within bounding box, i.e. no clipping should occur
internal = [
(0, 0, 0, 0),
(2, 2, 2, 2),
(0, 0, 4, 4),
(4, 4, 0, 0),
(0, 2, 2, 0),
(2, 0, 0, 2),
]
for line in internal:
self.assertEqual(line, clip(*line))
# Test clipping for lines originating in the center of the bounding box and ending outside
# of it.
inner_to_outer = [
((2, 2, 2, 6), (2, 2, 2, 4)),
((2, 2, 6, 2), (2, 2, 4, 2)),
((2, 2, 2, -2), (2, 2, 2, 0)),
((2, 2, -2, 2), (2, 2, 0, 2)),
((2, 2, 6, -2), (2, 2, 4, 0)),
((2, 2, -2, -2), (2, 2, 0, 0)),
((2, 2, -2, -2), (2, 2, 0, 0)),
]
for orig, clipped in inner_to_outer:
self.assertEqual(clipped, clip(*orig))
outer_to_inner = [
((2, 6, 2, 2), (2, 4, 2, 2)),
((6, 2, 2, 2), (4, 2, 2, 2)),
((2, -2, 2, 2), (2, 0, 2, 2)),
((-2, 2, 2, 2), (0, 2, 2, 2)),
((6, -2, 2, 2), (4, 0, 2, 2)),
((-2, -2, 2, 2), (0, 0, 2, 2)),
((-2, -2, 2, 2), (0, 0, 2, 2)),
]
for orig, clipped in outer_to_inner:
self.assertEqual(clipped, clip(*orig))
if __name__ == "__main__":
unittest.main()<|fim▁end|> | |
<|file_name|>vaspdata.py<|end_file_name|><|fim▁begin|>"""
Classes::
VASPData -- A collection of functions that wrap bash code to extract
data from VASP output into managable .dat (.txt) files.
"""
import numpy as np<|fim▁hole|>from subprocess import call, check_output
from ast import literal_eval
class VASPData(object):
"""
A collection of functions that wrap bash code to extract
data from VASP output into managable .dat (.txt) files.
Variables::
name -- A string containing the path to the
VASP data.
Funtions::
extract_symops_trans -- Get symmetry operations and translations
from OUTCAR -> symops_trans.dat.
extract_kpts_eigenvals -- Get k-points, weights, and eigenvalues
from EIGENVAL -> kpts_eigenvals.dat.
extract_kmax -- Get kmax from KPOINTS -> kmax.dat (details about
what kmax is are given in readdata.py).
"""
def __init__(self, name_of_data_directory, kpts_eigenvals=True,
symops_trans=True, kmax=True):
"""
Arguments::
name_of_data_directory -- See Variables::name.
Keyword Arguments::
kpts_eigenvals, symops_trans, kmax -- All are booleans that
specify if that bit of data should be extracted from the
VASP output files. One may use False if the corresponding
.dat file already exists or is handmade. Default is True for
all three.
"""
self.name = name_of_data_directory
if kpts_eigenvals:
self.extract_kpts_eigenvals()
if symops_trans:
self.extract_symops_trans()
if kmax:
self.extract_kmax()
def extract_symops_trans(self):
"""
Use some bash code to look inside OUTCAR, grab the
symmetry operators and translations, and then write them to a
file called symops_trans.dat. File is written to the same folder
the OUTCAR is in.
"""
name = self.name
call("grep -A 4 -E 'isymop' " + name + "/OUTCAR | cut -c 11-50 > " +
name + "/symops_trans.dat; echo '' >> " + name +
"/symops_trans.dat", shell=True)
def extract_kpts_eigenvals(self):
""""
Use some bash code to look inside EIGENVAL and grab the
k-points, weights, and eigenvalues associated with each band at
each k-point. Write them to a file called kpts_eigenvals.dat.
File is written to the same folder the EIGENVAL is in.
"""
name = self.name
length = check_output('less ' + name + '/EIGENVAL | wc -l', shell=True)
num = str([int(s) for s in length.split() if s.isdigit()][0] - 7)
call('tail -n' + num + ' ' + name +
'/EIGENVAL | cut -c 1-60 > ' + name + '/kpts_eigenvals.dat',
shell=True)
def extract_kmax(self):
"""
Look inside KPOINTS and grab the number of kpoints used in
one direction. If the grid is not cubic i.e. 12 12 5 it will
take the smallest. Also assumes the KPOINTS has this format:
nxmxp! comment line
0
Monkhorst
12 12 12
0 0 0
at least as far as what line the 12 12 12 is on. To be concrete
the only requirement is that the grid is specified on
the fourth line. If one wishes to use a different format for the
KPOINTS file they can set the kmax bool to False and generate
their own kmax.dat in the same directory as the VASP data to be
used by readdata.py. GRID SIZE ON FOURTH LINE.
"""
name = self.name
with open(name+'/KPOINTS', 'r') as inf:
line = [literal_eval(x) for x in
inf.readlines()[3].strip().split()]
k = min(line)
kmax = np.ceil(k/(2*np.sqrt(3)))
with open(name+'/kmax.dat', 'w') as outf:
outf.write(str(kmax))<|fim▁end|> | |
<|file_name|>feature.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Python package for feature in MLlib.
"""
import sys
import warnings
from py4j.protocol import Py4JJavaError
from pyspark import since
from pyspark.rdd import RDD
from pyspark.mllib.common import callMLlibFunc, JavaModelWrapper
from pyspark.mllib.linalg import Vectors, _convert_to_vector
from pyspark.mllib.util import JavaLoader, JavaSaveable
__all__ = ['Normalizer', 'StandardScalerModel', 'StandardScaler',
'HashingTF', 'IDFModel', 'IDF', 'Word2Vec', 'Word2VecModel',
'ChiSqSelector', 'ChiSqSelectorModel', 'ElementwiseProduct']
class VectorTransformer(object):
"""
Base class for transformation of a vector or RDD of vector
"""
def transform(self, vector):
"""
Applies transformation on a vector.
Parameters
----------
vector : :py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
vector or convertible or RDD to be transformed.
"""
raise NotImplementedError
class Normalizer(VectorTransformer):
r"""
Normalizes samples individually to unit L\ :sup:`p`\ norm
For any 1 <= `p` < float('inf'), normalizes samples using
sum(abs(vector) :sup:`p`) :sup:`(1/p)` as norm.
For `p` = float('inf'), max(abs(vector)) will be used as norm for
normalization.
.. versionadded:: 1.2.0
Parameters
----------
p : float, optional
Normalization in L^p^ space, p = 2 by default.
Examples
--------
>>> from pyspark.mllib.linalg import Vectors
>>> v = Vectors.dense(range(3))
>>> nor = Normalizer(1)
>>> nor.transform(v)
DenseVector([0.0, 0.3333, 0.6667])
>>> rdd = sc.parallelize([v])
>>> nor.transform(rdd).collect()
[DenseVector([0.0, 0.3333, 0.6667])]
>>> nor2 = Normalizer(float("inf"))
>>> nor2.transform(v)
DenseVector([0.0, 0.5, 1.0])
"""
def __init__(self, p=2.0):
assert p >= 1.0, "p should be greater than 1.0"
self.p = float(p)
def transform(self, vector):
"""
Applies unit length normalization on a vector.
.. versionadded:: 1.2.0
Parameters
----------
vector : :py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
vector or RDD of vector to be normalized.
Returns
-------
:py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
normalized vector(s). If the norm of the input is zero, it
will return the input vector.
"""
if isinstance(vector, RDD):
vector = vector.map(_convert_to_vector)
else:
vector = _convert_to_vector(vector)
return callMLlibFunc("normalizeVector", self.p, vector)
class JavaVectorTransformer(JavaModelWrapper, VectorTransformer):
"""
Wrapper for the model in JVM
"""
def transform(self, vector):
"""
Applies transformation on a vector or an RDD[Vector].
Parameters
----------
vector : :py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
Input vector(s) to be transformed.
Notes
-----
In Python, transform cannot currently be used within
an RDD transformation or action.
Call transform directly on the RDD instead.
"""
if isinstance(vector, RDD):
vector = vector.map(_convert_to_vector)
else:
vector = _convert_to_vector(vector)
return self.call("transform", vector)
class StandardScalerModel(JavaVectorTransformer):
"""
Represents a StandardScaler model that can transform vectors.
.. versionadded:: 1.2.0
"""
def transform(self, vector):
"""
Applies standardization transformation on a vector.
.. versionadded:: 1.2.0
Parameters
----------
vector : :py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
Input vector(s) to be standardized.
Returns
-------
:py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
Standardized vector(s). If the variance of a column is
zero, it will return default `0.0` for the column with
zero variance.
Notes
-----
In Python, transform cannot currently be used within
an RDD transformation or action.
Call transform directly on the RDD instead.
"""
return JavaVectorTransformer.transform(self, vector)
@since('1.4.0')
def setWithMean(self, withMean):
"""
Setter of the boolean which decides
whether it uses mean or not
"""
self.call("setWithMean", withMean)
return self
@since('1.4.0')
def setWithStd(self, withStd):
"""
Setter of the boolean which decides
whether it uses std or not
"""
self.call("setWithStd", withStd)
return self
@property
@since('2.0.0')
def withStd(self):
"""
Returns if the model scales the data to unit standard deviation.
"""
return self.call("withStd")
@property
@since('2.0.0')
def withMean(self):
"""
Returns if the model centers the data before scaling.
"""
return self.call("withMean")
@property
@since('2.0.0')
def std(self):
"""
Return the column standard deviation values.
"""
return self.call("std")
@property
@since('2.0.0')
def mean(self):
"""
Return the column mean values.
"""
return self.call("mean")
class StandardScaler(object):
"""
Standardizes features by removing the mean and scaling to unit
variance using column summary statistics on the samples in the
training set.
.. versionadded:: 1.2.0
Parameters
----------
withMean : bool, optional
False by default. Centers the data with mean
before scaling. It will build a dense output, so take
care when applying to sparse input.
withStd : bool, optional
True by default. Scales the data to unit
standard deviation.
Examples
--------
>>> vs = [Vectors.dense([-2.0, 2.3, 0]), Vectors.dense([3.8, 0.0, 1.9])]
>>> dataset = sc.parallelize(vs)
>>> standardizer = StandardScaler(True, True)
>>> model = standardizer.fit(dataset)
>>> result = model.transform(dataset)
>>> for r in result.collect(): r
DenseVector([-0.7071, 0.7071, -0.7071])
DenseVector([0.7071, -0.7071, 0.7071])
>>> int(model.std[0])
4
>>> int(model.mean[0]*10)
9
>>> model.withStd
True
>>> model.withMean
True
"""
def __init__(self, withMean=False, withStd=True):
if not (withMean or withStd):
warnings.warn("Both withMean and withStd are false. The model does nothing.")
self.withMean = withMean
self.withStd = withStd
def fit(self, dataset):
"""
Computes the mean and variance and stores as a model to be used
for later scaling.
.. versionadded:: 1.2.0
Parameters
----------
dataset : :py:class:`pyspark.RDD`
The data used to compute the mean and variance
to build the transformation model.
Returns
-------
:py:class:`StandardScalerModel`
"""
dataset = dataset.map(_convert_to_vector)
jmodel = callMLlibFunc("fitStandardScaler", self.withMean, self.withStd, dataset)
return StandardScalerModel(jmodel)
class ChiSqSelectorModel(JavaVectorTransformer):
"""
Represents a Chi Squared selector model.
.. versionadded:: 1.4.0
"""
def transform(self, vector):
"""
Applies transformation on a vector.
.. versionadded:: 1.4.0
Examples
--------
vector : :py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
Input vector(s) to be transformed.
Returns
-------
:py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
transformed vector(s).
"""
return JavaVectorTransformer.transform(self, vector)
class ChiSqSelector(object):
"""
Creates a ChiSquared feature selector.
The selector supports different selection methods: `numTopFeatures`, `percentile`, `fpr`,
`fdr`, `fwe`.
* `numTopFeatures` chooses a fixed number of top features according to a chi-squared test.
* `percentile` is similar but chooses a fraction of all features
instead of a fixed number.
* `fpr` chooses all features whose p-values are below a threshold,
thus controlling the false positive rate of selection.
* `fdr` uses the `Benjamini-Hochberg procedure <https://en.wikipedia.org/wiki/
False_discovery_rate#Benjamini.E2.80.93Hochberg_procedure>`_
to choose all features whose false discovery rate is below a threshold.
* `fwe` chooses all features whose p-values are below a threshold. The threshold is scaled by
1/numFeatures, thus controlling the family-wise error rate of selection.
By default, the selection method is `numTopFeatures`, with the default number of top features
set to 50.
.. versionadded:: 1.4.0
Examples
--------
>>> from pyspark.mllib.linalg import SparseVector, DenseVector
>>> from pyspark.mllib.regression import LabeledPoint
>>> data = sc.parallelize([
... LabeledPoint(0.0, SparseVector(3, {0: 8.0, 1: 7.0})),
... LabeledPoint(1.0, SparseVector(3, {1: 9.0, 2: 6.0})),
... LabeledPoint(1.0, [0.0, 9.0, 8.0]),
... LabeledPoint(2.0, [7.0, 9.0, 5.0]),
... LabeledPoint(2.0, [8.0, 7.0, 3.0])
... ])
>>> model = ChiSqSelector(numTopFeatures=1).fit(data)
>>> model.transform(SparseVector(3, {1: 9.0, 2: 6.0}))
SparseVector(1, {})
>>> model.transform(DenseVector([7.0, 9.0, 5.0]))
DenseVector([7.0])
>>> model = ChiSqSelector(selectorType="fpr", fpr=0.2).fit(data)
>>> model.transform(SparseVector(3, {1: 9.0, 2: 6.0}))
SparseVector(1, {})
>>> model.transform(DenseVector([7.0, 9.0, 5.0]))
DenseVector([7.0])
>>> model = ChiSqSelector(selectorType="percentile", percentile=0.34).fit(data)
>>> model.transform(DenseVector([7.0, 9.0, 5.0]))
DenseVector([7.0])
"""
def __init__(self, numTopFeatures=50, selectorType="numTopFeatures", percentile=0.1, fpr=0.05,
fdr=0.05, fwe=0.05):
self.numTopFeatures = numTopFeatures
self.selectorType = selectorType
self.percentile = percentile
self.fpr = fpr
self.fdr = fdr
self.fwe = fwe
@since('2.1.0')
def setNumTopFeatures(self, numTopFeatures):
"""
set numTopFeature for feature selection by number of top features.
Only applicable when selectorType = "numTopFeatures".
"""
self.numTopFeatures = int(numTopFeatures)
return self
@since('2.1.0')
def setPercentile(self, percentile):
"""
set percentile [0.0, 1.0] for feature selection by percentile.
Only applicable when selectorType = "percentile".
"""
self.percentile = float(percentile)
return self
@since('2.1.0')
def setFpr(self, fpr):
"""
set FPR [0.0, 1.0] for feature selection by FPR.
Only applicable when selectorType = "fpr".
"""
self.fpr = float(fpr)
return self
@since('2.2.0')
def setFdr(self, fdr):
"""
set FDR [0.0, 1.0] for feature selection by FDR.
Only applicable when selectorType = "fdr".
"""
self.fdr = float(fdr)
return self
@since('2.2.0')
def setFwe(self, fwe):
"""
set FWE [0.0, 1.0] for feature selection by FWE.
Only applicable when selectorType = "fwe".
"""
self.fwe = float(fwe)
return self
@since('2.1.0')
def setSelectorType(self, selectorType):
"""
set the selector type of the ChisqSelector.
Supported options: "numTopFeatures" (default), "percentile", "fpr", "fdr", "fwe".
"""
self.selectorType = str(selectorType)
return self
def fit(self, data):
"""
Returns a ChiSquared feature selector.
.. versionadded:: 1.4.0
Parameters
----------
data : :py:class:`pyspark.RDD` of :py:class:`pyspark.mllib.regression.LabeledPoint`
containing the labeled dataset with categorical features.
Real-valued features will be treated as categorical for each
distinct value. Apply feature discretizer before using this function.
"""
jmodel = callMLlibFunc("fitChiSqSelector", self.selectorType, self.numTopFeatures,
self.percentile, self.fpr, self.fdr, self.fwe, data)
return ChiSqSelectorModel(jmodel)
class PCAModel(JavaVectorTransformer):
"""
Model fitted by [[PCA]] that can project vectors to a low-dimensional space using PCA.
.. versionadded:: 1.5.0
"""
class PCA(object):
"""
A feature transformer that projects vectors to a low-dimensional space using PCA.
.. versionadded:: 1.5.0
Examples
--------
>>> data = [Vectors.sparse(5, [(1, 1.0), (3, 7.0)]),
... Vectors.dense([2.0, 0.0, 3.0, 4.0, 5.0]),
... Vectors.dense([4.0, 0.0, 0.0, 6.0, 7.0])]
>>> model = PCA(2).fit(sc.parallelize(data))
>>> pcArray = model.transform(Vectors.sparse(5, [(1, 1.0), (3, 7.0)])).toArray()
>>> pcArray[0]
1.648...
>>> pcArray[1]
-4.013...
"""
def __init__(self, k):
"""
Parameters
----------
k : int
number of principal components.
"""
self.k = int(k)
def fit(self, data):
"""
Computes a [[PCAModel]] that contains the principal components of the input vectors.
.. versionadded:: 1.5.0
Parameters
----------
data : :py:class:`pyspark.RDD`
source vectors
"""
jmodel = callMLlibFunc("fitPCA", self.k, data)
return PCAModel(jmodel)
class HashingTF(object):
"""
Maps a sequence of terms to their term frequencies using the hashing
trick.
.. versionadded:: 1.2.0
Parameters
----------
numFeatures : int, optional
number of features (default: 2^20)
Notes
-----
The terms must be hashable (can not be dict/set/list...).
Examples
--------
>>> htf = HashingTF(100)
>>> doc = "a a b b c d".split(" ")
>>> htf.transform(doc)
SparseVector(100, {...})
"""
def __init__(self, numFeatures=1 << 20):
self.numFeatures = numFeatures
self.binary = False
@since("2.0.0")
def setBinary(self, value):
"""
If True, term frequency vector will be binary such that non-zero
term counts will be set to 1
(default: False)
"""
self.binary = value
return self
@since('1.2.0')
def indexOf(self, term):
""" Returns the index of the input term. """
return hash(term) % self.numFeatures
@since('1.2.0')
def transform(self, document):
"""
Transforms the input document (list of terms) to term frequency
vectors, or transform the RDD of document to RDD of term
frequency vectors.
"""
if isinstance(document, RDD):
return document.map(self.transform)
freq = {}
for term in document:
i = self.indexOf(term)
freq[i] = 1.0 if self.binary else freq.get(i, 0) + 1.0
return Vectors.sparse(self.numFeatures, freq.items())
class IDFModel(JavaVectorTransformer):
"""
Represents an IDF model that can transform term frequency vectors.
.. versionadded:: 1.2.0
"""
def transform(self, x):
"""
Transforms term frequency (TF) vectors to TF-IDF vectors.
If `minDocFreq` was set for the IDF calculation,
the terms which occur in fewer than `minDocFreq`
documents will have an entry of 0.
.. versionadded:: 1.2.0
Parameters
----------
x : :py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
an RDD of term frequency vectors or a term frequency
vector
Returns
-------
:py:class:`pyspark.mllib.linalg.Vector` or :py:class:`pyspark.RDD`
an RDD of TF-IDF vectors or a TF-IDF vector
Notes
-----
In Python, transform cannot currently be used within
an RDD transformation or action.
Call transform directly on the RDD instead.
"""
return JavaVectorTransformer.transform(self, x)
@since('1.4.0')
def idf(self):
"""
Returns the current IDF vector.
"""
return self.call('idf')
@since('3.0.0')
def docFreq(self):
"""
Returns the document frequency.
"""
return self.call('docFreq')
@since('3.0.0')
def numDocs(self):
"""
Returns number of documents evaluated to compute idf
"""
return self.call('numDocs')
class IDF(object):
"""
Inverse document frequency (IDF).
The standard formulation is used: `idf = log((m + 1) / (d(t) + 1))`,
where `m` is the total number of documents and `d(t)` is the number
of documents that contain term `t`.
This implementation supports filtering out terms which do not appear
in a minimum number of documents (controlled by the variable
`minDocFreq`). For terms that are not in at least `minDocFreq`
documents, the IDF is found as 0, resulting in TF-IDFs of 0.
.. versionadded:: 1.2.0
Parameters
----------
minDocFreq : int
minimum of documents in which a term should appear for filtering
Examples
--------
>>> n = 4
>>> freqs = [Vectors.sparse(n, (1, 3), (1.0, 2.0)),
... Vectors.dense([0.0, 1.0, 2.0, 3.0]),
... Vectors.sparse(n, [1], [1.0])]
>>> data = sc.parallelize(freqs)
>>> idf = IDF()
>>> model = idf.fit(data)
>>> tfidf = model.transform(data)
>>> for r in tfidf.collect(): r
SparseVector(4, {1: 0.0, 3: 0.5754})
DenseVector([0.0, 0.0, 1.3863, 0.863])
SparseVector(4, {1: 0.0})
>>> model.transform(Vectors.dense([0.0, 1.0, 2.0, 3.0]))
DenseVector([0.0, 0.0, 1.3863, 0.863])
>>> model.transform([0.0, 1.0, 2.0, 3.0])
DenseVector([0.0, 0.0, 1.3863, 0.863])
>>> model.transform(Vectors.sparse(n, (1, 3), (1.0, 2.0)))
SparseVector(4, {1: 0.0, 3: 0.5754})
"""
def __init__(self, minDocFreq=0):
self.minDocFreq = minDocFreq
def fit(self, dataset):
"""
Computes the inverse document frequency.
.. versionadded:: 1.2.0
Parameters
----------
dataset : :py:class:`pyspark.RDD`
an RDD of term frequency vectors
"""
if not isinstance(dataset, RDD):
raise TypeError("dataset should be an RDD of term frequency vectors")
jmodel = callMLlibFunc("fitIDF", self.minDocFreq, dataset.map(_convert_to_vector))
return IDFModel(jmodel)
class Word2VecModel(JavaVectorTransformer, JavaSaveable, JavaLoader):
"""
class for Word2Vec model
"""
def transform(self, word):
"""
Transforms a word to its vector representation
.. versionadded:: 1.2.0
Parameters
----------
word : str
a word
Returns
-------
:py:class:`pyspark.mllib.linalg.Vector`
vector representation of word(s)
Notes
-----
Local use only
"""
try:
return self.call("transform", word)
except Py4JJavaError:
raise ValueError("%s not found" % word)
def findSynonyms(self, word, num):
"""
Find synonyms of a word
.. versionadded:: 1.2.0
Parameters
----------
word : str or :py:class:`pyspark.mllib.linalg.Vector`
a word or a vector representation of word
num : int
number of synonyms to find
Returns
-------
:py:class:`collections.abc.Iterable`
array of (word, cosineSimilarity)
Notes
-----
Local use only
"""
if not isinstance(word, str):
word = _convert_to_vector(word)
words, similarity = self.call("findSynonyms", word, num)
return zip(words, similarity)
@since('1.4.0')
def getVectors(self):
"""
Returns a map of words to their vector representations.
"""
return self.call("getVectors")
@classmethod
@since('1.5.0')
def load(cls, sc, path):
"""
Load a model from the given path.
"""
jmodel = sc._jvm.org.apache.spark.mllib.feature \
.Word2VecModel.load(sc._jsc.sc(), path)
model = sc._jvm.org.apache.spark.mllib.api.python.Word2VecModelWrapper(jmodel)
return Word2VecModel(model)
class Word2Vec(object):
"""Word2Vec creates vector representation of words in a text corpus.
The algorithm first constructs a vocabulary from the corpus
and then learns vector representation of words in the vocabulary.
The vector representation can be used as features in
natural language processing and machine learning algorithms.
We used skip-gram model in our implementation and hierarchical
softmax method to train the model. The variable names in the
implementation matches the original C implementation.
For original C implementation,
see https://code.google.com/p/word2vec/
For research papers, see
Efficient Estimation of Word Representations in Vector Space
and Distributed Representations of Words and Phrases and their
Compositionality.
.. versionadded:: 1.2.0
Examples
--------
>>> sentence = "a b " * 100 + "a c " * 10
>>> localDoc = [sentence, sentence]
>>> doc = sc.parallelize(localDoc).map(lambda line: line.split(" "))
>>> model = Word2Vec().setVectorSize(10).setSeed(42).fit(doc)
Querying for synonyms of a word will not return that word:
>>> syms = model.findSynonyms("a", 2)
>>> [s[0] for s in syms]
['b', 'c']
But querying for synonyms of a vector may return the word whose
representation is that vector:
>>> vec = model.transform("a")
>>> syms = model.findSynonyms(vec, 2)
>>> [s[0] for s in syms]
['a', 'b']
>>> import os, tempfile
>>> path = tempfile.mkdtemp()
>>> model.save(sc, path)
>>> sameModel = Word2VecModel.load(sc, path)
>>> model.transform("a") == sameModel.transform("a")
True
>>> syms = sameModel.findSynonyms("a", 2)
>>> [s[0] for s in syms]
['b', 'c']
>>> from shutil import rmtree
>>> try:
... rmtree(path)
... except OSError:
... pass
"""
def __init__(self):
"""
Construct Word2Vec instance
"""
self.vectorSize = 100
self.learningRate = 0.025
self.numPartitions = 1
self.numIterations = 1
self.seed = None
self.minCount = 5
self.windowSize = 5
@since('1.2.0')
def setVectorSize(self, vectorSize):
"""
Sets vector size (default: 100).
"""
self.vectorSize = vectorSize
return self
@since('1.2.0')
def setLearningRate(self, learningRate):
"""
Sets initial learning rate (default: 0.025).
"""
self.learningRate = learningRate
return self
@since('1.2.0')
def setNumPartitions(self, numPartitions):
"""
Sets number of partitions (default: 1). Use a small number for
accuracy.
"""
self.numPartitions = numPartitions
return self
@since('1.2.0')
def setNumIterations(self, numIterations):
"""
Sets number of iterations (default: 1), which should be smaller
than or equal to number of partitions.
"""
self.numIterations = numIterations
return self
@since('1.2.0')
def setSeed(self, seed):
"""
Sets random seed.<|fim▁hole|> self.seed = seed
return self
@since('1.4.0')
def setMinCount(self, minCount):
"""
Sets minCount, the minimum number of times a token must appear
to be included in the word2vec model's vocabulary (default: 5).
"""
self.minCount = minCount
return self
@since('2.0.0')
def setWindowSize(self, windowSize):
"""
Sets window size (default: 5).
"""
self.windowSize = windowSize
return self
def fit(self, data):
"""
Computes the vector representation of each word in vocabulary.
.. versionadded:: 1.2.0
Parameters
----------
data : :py:class:`pyspark.RDD`
training data. RDD of list of string
Returns
-------
:py:class:`Word2VecModel`
"""
if not isinstance(data, RDD):
raise TypeError("data should be an RDD of list of string")
jmodel = callMLlibFunc("trainWord2VecModel", data, int(self.vectorSize),
float(self.learningRate), int(self.numPartitions),
int(self.numIterations), self.seed,
int(self.minCount), int(self.windowSize))
return Word2VecModel(jmodel)
class ElementwiseProduct(VectorTransformer):
"""
Scales each column of the vector, with the supplied weight vector.
i.e the elementwise product.
.. versionadded:: 1.5.0
Examples
--------
>>> weight = Vectors.dense([1.0, 2.0, 3.0])
>>> eprod = ElementwiseProduct(weight)
>>> a = Vectors.dense([2.0, 1.0, 3.0])
>>> eprod.transform(a)
DenseVector([2.0, 2.0, 9.0])
>>> b = Vectors.dense([9.0, 3.0, 4.0])
>>> rdd = sc.parallelize([a, b])
>>> eprod.transform(rdd).collect()
[DenseVector([2.0, 2.0, 9.0]), DenseVector([9.0, 6.0, 12.0])]
"""
def __init__(self, scalingVector):
self.scalingVector = _convert_to_vector(scalingVector)
@since('1.5.0')
def transform(self, vector):
"""
Computes the Hadamard product of the vector.
"""
if isinstance(vector, RDD):
vector = vector.map(_convert_to_vector)
else:
vector = _convert_to_vector(vector)
return callMLlibFunc("elementwiseProductVector", self.scalingVector, vector)
def _test():
import doctest
from pyspark.sql import SparkSession
globs = globals().copy()
spark = SparkSession.builder\
.master("local[4]")\
.appName("mllib.feature tests")\
.getOrCreate()
globs['sc'] = spark.sparkContext
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
spark.stop()
if failure_count:
sys.exit(-1)
if __name__ == "__main__":
sys.path.pop(0)
_test()<|fim▁end|> | """ |
<|file_name|>writeback.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Type resolution: the phase that finds all the types in the AST with
// unresolved type variables and replaces "ty_var" types with their
// substitutions.
use self::ResolveReason::*;
use astconv::AstConv;
use check::FnCtxt;
use middle::def;
use middle::pat_util;
use middle::ty::{mod, Ty, MethodCall, MethodCallee};
use middle::ty_fold::{TypeFolder,TypeFoldable};
use middle::infer::{force_all, resolve_all, resolve_region};
use middle::infer::resolve_type;
use middle::infer;
use write_substs_to_tcx;
use write_ty_to_tcx;
use util::ppaux::Repr;
use std::cell::Cell;
use syntax::ast;
use syntax::codemap::{DUMMY_SP, Span};
use syntax::print::pprust::pat_to_string;
use syntax::visit;
use syntax::visit::Visitor;
///////////////////////////////////////////////////////////////////////////
// Entry point functions
pub fn resolve_type_vars_in_expr(fcx: &FnCtxt, e: &ast::Expr) {
assert_eq!(fcx.writeback_errors.get(), false);
let mut wbcx = WritebackCx::new(fcx);
wbcx.visit_expr(e);
wbcx.visit_upvar_borrow_map();
wbcx.visit_unboxed_closures();
wbcx.visit_object_cast_map();
}
pub fn resolve_type_vars_in_fn(fcx: &FnCtxt,
decl: &ast::FnDecl,
blk: &ast::Block) {
assert_eq!(fcx.writeback_errors.get(), false);
let mut wbcx = WritebackCx::new(fcx);
wbcx.visit_block(blk);
for arg in decl.inputs.iter() {
wbcx.visit_node_id(ResolvingPattern(arg.pat.span), arg.id);
wbcx.visit_pat(&*arg.pat);
// Privacy needs the type for the whole pattern, not just each binding
if !pat_util::pat_is_binding(&fcx.tcx().def_map, &*arg.pat) {
wbcx.visit_node_id(ResolvingPattern(arg.pat.span),
arg.pat.id);
}
}
wbcx.visit_upvar_borrow_map();
wbcx.visit_unboxed_closures();
wbcx.visit_object_cast_map();
}
///////////////////////////////////////////////////////////////////////////
// The Writerback context. This visitor walks the AST, checking the
// fn-specific tables to find references to types or regions. It
// resolves those regions to remove inference variables and writes the
// final result back into the master tables in the tcx. Here and
// there, it applies a few ad-hoc checks that were not convenient to
// do elsewhere.
struct WritebackCx<'cx, 'tcx: 'cx> {
fcx: &'cx FnCtxt<'cx, 'tcx>,
}
impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
fn new(fcx: &'cx FnCtxt<'cx, 'tcx>) -> WritebackCx<'cx, 'tcx> {
WritebackCx { fcx: fcx }
}
fn tcx(&self) -> &'cx ty::ctxt<'tcx> {
self.fcx.tcx()
}
}
///////////////////////////////////////////////////////////////////////////
// Impl of Visitor for Resolver
//
// This is the master code which walks the AST. It delegates most of
// the heavy lifting to the generic visit and resolve functions
// below. In general, a function is made into a `visitor` if it must
// traffic in node-ids or update tables in the type context etc.
impl<'cx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'tcx> {
fn visit_item(&mut self, _: &ast::Item) {
// Ignore items
}
fn visit_stmt(&mut self, s: &ast::Stmt) {
if self.fcx.writeback_errors.get() {
return;
}
self.visit_node_id(ResolvingExpr(s.span), ty::stmt_node_id(s));
visit::walk_stmt(self, s);
}
fn visit_expr(&mut self, e: &ast::Expr) {
if self.fcx.writeback_errors.get() {
return;
}
self.visit_node_id(ResolvingExpr(e.span), e.id);
self.visit_method_map_entry(ResolvingExpr(e.span),<|fim▁hole|> ast::ExprProc(ref decl, _) => {
for input in decl.inputs.iter() {
let _ = self.visit_node_id(ResolvingExpr(e.span),
input.id);
}
}
_ => {}
}
visit::walk_expr(self, e);
}
fn visit_block(&mut self, b: &ast::Block) {
if self.fcx.writeback_errors.get() {
return;
}
self.visit_node_id(ResolvingExpr(b.span), b.id);
visit::walk_block(self, b);
}
fn visit_pat(&mut self, p: &ast::Pat) {
if self.fcx.writeback_errors.get() {
return;
}
self.visit_node_id(ResolvingPattern(p.span), p.id);
debug!("Type for pattern binding {} (id {}) resolved to {}",
pat_to_string(p),
p.id,
ty::node_id_to_type(self.tcx(), p.id).repr(self.tcx()));
visit::walk_pat(self, p);
}
fn visit_local(&mut self, l: &ast::Local) {
if self.fcx.writeback_errors.get() {
return;
}
let var_ty = self.fcx.local_ty(l.span, l.id);
let var_ty = self.resolve(&var_ty, ResolvingLocal(l.span));
write_ty_to_tcx(self.tcx(), l.id, var_ty);
visit::walk_local(self, l);
}
fn visit_ty(&mut self, t: &ast::Ty) {
match t.node {
ast::TyFixedLengthVec(ref ty, ref count_expr) => {
self.visit_ty(&**ty);
write_ty_to_tcx(self.tcx(), count_expr.id, ty::mk_uint());
}
_ => visit::walk_ty(self, t)
}
}
}
impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
fn visit_upvar_borrow_map(&self) {
if self.fcx.writeback_errors.get() {
return;
}
for (upvar_id, upvar_borrow) in self.fcx.inh.upvar_borrow_map.borrow().iter() {
let r = upvar_borrow.region;
let r = self.resolve(&r, ResolvingUpvar(*upvar_id));
let new_upvar_borrow = ty::UpvarBorrow { kind: upvar_borrow.kind,
region: r };
debug!("Upvar borrow for {} resolved to {}",
upvar_id.repr(self.tcx()),
new_upvar_borrow.repr(self.tcx()));
self.fcx.tcx().upvar_borrow_map.borrow_mut().insert(
*upvar_id, new_upvar_borrow);
}
}
fn visit_unboxed_closures(&self) {
if self.fcx.writeback_errors.get() {
return
}
for (def_id, unboxed_closure) in self.fcx
.inh
.unboxed_closures
.borrow()
.iter() {
let closure_ty = self.resolve(&unboxed_closure.closure_type,
ResolvingUnboxedClosure(*def_id));
let unboxed_closure = ty::UnboxedClosure {
closure_type: closure_ty,
kind: unboxed_closure.kind,
};
self.fcx
.tcx()
.unboxed_closures
.borrow_mut()
.insert(*def_id, unboxed_closure);
}
}
fn visit_object_cast_map(&self) {
if self.fcx.writeback_errors.get() {
return
}
for (&node_id, trait_ref) in self.fcx
.inh
.object_cast_map
.borrow()
.iter()
{
let span = ty::expr_span(self.tcx(), node_id);
let reason = ResolvingExpr(span);
let closure_ty = self.resolve(trait_ref, reason);
self.tcx()
.object_cast_map
.borrow_mut()
.insert(node_id, closure_ty);
}
}
fn visit_node_id(&self, reason: ResolveReason, id: ast::NodeId) {
// Resolve any borrowings for the node with id `id`
self.visit_adjustments(reason, id);
// Resolve the type of the node with id `id`
let n_ty = self.fcx.node_ty(id);
let n_ty = self.resolve(&n_ty, reason);
write_ty_to_tcx(self.tcx(), id, n_ty);
debug!("Node {} has type {}", id, n_ty.repr(self.tcx()));
// Resolve any substitutions
self.fcx.opt_node_ty_substs(id, |item_substs| {
write_substs_to_tcx(self.tcx(), id,
self.resolve(item_substs, reason));
});
}
fn visit_adjustments(&self, reason: ResolveReason, id: ast::NodeId) {
match self.fcx.inh.adjustments.borrow_mut().remove(&id) {
None => {
debug!("No adjustments for node {}", id);
}
Some(adjustment) => {
let adj_object = ty::adjust_is_object(&adjustment);
let resolved_adjustment = match adjustment {
ty::AdjustAddEnv(store) => {
// FIXME(eddyb) #2190 Allow only statically resolved
// bare functions to coerce to a closure to avoid
// constructing (slower) indirect call wrappers.
match self.tcx().def_map.borrow().get(&id) {
Some(&def::DefFn(..)) |
Some(&def::DefStaticMethod(..)) |
Some(&def::DefVariant(..)) |
Some(&def::DefStruct(_)) => {
}
_ => {
span_err!(self.tcx().sess, reason.span(self.tcx()), E0100,
"cannot coerce non-statically resolved bare fn to closure");
span_help!(self.tcx().sess, reason.span(self.tcx()),
"consider embedding the function in a closure");
}
}
ty::AdjustAddEnv(self.resolve(&store, reason))
}
ty::AdjustDerefRef(adj) => {
for autoderef in range(0, adj.autoderefs) {
let method_call = MethodCall::autoderef(id, autoderef);
self.visit_method_map_entry(reason, method_call);
}
if adj_object {
let method_call = MethodCall::autoobject(id);
self.visit_method_map_entry(reason, method_call);
}
ty::AdjustDerefRef(ty::AutoDerefRef {
autoderefs: adj.autoderefs,
autoref: self.resolve(&adj.autoref, reason),
})
}
};
debug!("Adjustments for node {}: {}", id, resolved_adjustment);
self.tcx().adjustments.borrow_mut().insert(
id, resolved_adjustment);
}
}
}
fn visit_method_map_entry(&self,
reason: ResolveReason,
method_call: MethodCall) {
// Resolve any method map entry
match self.fcx.inh.method_map.borrow_mut().remove(&method_call) {
Some(method) => {
debug!("writeback::resolve_method_map_entry(call={}, entry={})",
method_call,
method.repr(self.tcx()));
let new_method = MethodCallee {
origin: self.resolve(&method.origin, reason),
ty: self.resolve(&method.ty, reason),
substs: self.resolve(&method.substs, reason),
};
self.tcx().method_map.borrow_mut().insert(
method_call,
new_method);
}
None => {}
}
}
fn resolve<T:ResolveIn<'tcx>>(&self, t: &T, reason: ResolveReason) -> T {
t.resolve_in(&mut Resolver::new(self.fcx, reason))
}
}
///////////////////////////////////////////////////////////////////////////
// Resolution reason.
enum ResolveReason {
ResolvingExpr(Span),
ResolvingLocal(Span),
ResolvingPattern(Span),
ResolvingUpvar(ty::UpvarId),
ResolvingUnboxedClosure(ast::DefId),
}
impl Copy for ResolveReason {}
impl ResolveReason {
fn span(&self, tcx: &ty::ctxt) -> Span {
match *self {
ResolvingExpr(s) => s,
ResolvingLocal(s) => s,
ResolvingPattern(s) => s,
ResolvingUpvar(upvar_id) => {
ty::expr_span(tcx, upvar_id.closure_expr_id)
}
ResolvingUnboxedClosure(did) => {
if did.krate == ast::LOCAL_CRATE {
ty::expr_span(tcx, did.node)
} else {
DUMMY_SP
}
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// Convenience methods for resolving different kinds of things.
trait ResolveIn<'tcx> {
fn resolve_in<'a>(&self, resolver: &mut Resolver<'a, 'tcx>) -> Self;
}
impl<'tcx, T: TypeFoldable<'tcx>> ResolveIn<'tcx> for T {
fn resolve_in<'a>(&self, resolver: &mut Resolver<'a, 'tcx>) -> T {
self.fold_with(resolver)
}
}
///////////////////////////////////////////////////////////////////////////
// The Resolver. This is the type folding engine that detects
// unresolved types and so forth.
struct Resolver<'cx, 'tcx: 'cx> {
tcx: &'cx ty::ctxt<'tcx>,
infcx: &'cx infer::InferCtxt<'cx, 'tcx>,
writeback_errors: &'cx Cell<bool>,
reason: ResolveReason,
}
impl<'cx, 'tcx> Resolver<'cx, 'tcx> {
fn new(fcx: &'cx FnCtxt<'cx, 'tcx>,
reason: ResolveReason)
-> Resolver<'cx, 'tcx>
{
Resolver::from_infcx(fcx.infcx(), &fcx.writeback_errors, reason)
}
fn from_infcx(infcx: &'cx infer::InferCtxt<'cx, 'tcx>,
writeback_errors: &'cx Cell<bool>,
reason: ResolveReason)
-> Resolver<'cx, 'tcx>
{
Resolver { infcx: infcx,
tcx: infcx.tcx,
writeback_errors: writeback_errors,
reason: reason }
}
fn report_error(&self, e: infer::fixup_err) {
self.writeback_errors.set(true);
if !self.tcx.sess.has_errors() {
match self.reason {
ResolvingExpr(span) => {
span_err!(self.tcx.sess, span, E0101,
"cannot determine a type for this expression: {}",
infer::fixup_err_to_string(e));
}
ResolvingLocal(span) => {
span_err!(self.tcx.sess, span, E0102,
"cannot determine a type for this local variable: {}",
infer::fixup_err_to_string(e));
}
ResolvingPattern(span) => {
span_err!(self.tcx.sess, span, E0103,
"cannot determine a type for this pattern binding: {}",
infer::fixup_err_to_string(e));
}
ResolvingUpvar(upvar_id) => {
let span = self.reason.span(self.tcx);
span_err!(self.tcx.sess, span, E0104,
"cannot resolve lifetime for captured variable `{}`: {}",
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_string(),
infer::fixup_err_to_string(e));
}
ResolvingUnboxedClosure(_) => {
let span = self.reason.span(self.tcx);
self.tcx.sess.span_err(span,
"cannot determine a type for this \
unboxed closure")
}
}
}
}
}
impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> {
fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx> {
self.tcx
}
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
if !ty::type_needs_infer(t) {
return t;
}
match resolve_type(self.infcx, None, t, resolve_all | force_all) {
Ok(t) => t,
Err(e) => {
self.report_error(e);
ty::mk_err()
}
}
}
fn fold_region(&mut self, r: ty::Region) -> ty::Region {
match resolve_region(self.infcx, r, resolve_all | force_all) {
Ok(r) => r,
Err(e) => {
self.report_error(e);
ty::ReStatic
}
}
}
}
///////////////////////////////////////////////////////////////////////////
// During type check, we store promises with the result of trait
// lookup rather than the actual results (because the results are not
// necessarily available immediately). These routines unwind the
// promises. It is expected that we will have already reported any
// errors that may be encountered, so if the promises store an error,
// a dummy result is returned.<|fim▁end|> | MethodCall::expr(e.id));
match e.node {
ast::ExprClosure(_, _, ref decl, _) | |
<|file_name|>nevergrad.py<|end_file_name|><|fim▁begin|>import logging
import pickle
from typing import Dict, Optional, Union, List, Sequence
from ray.tune.result import DEFAULT_METRIC
from ray.tune.sample import Categorical, Domain, Float, Integer, LogUniform, \
Quantized
from ray.tune.suggest.suggestion import UNRESOLVED_SEARCH_SPACE, \
UNDEFINED_METRIC_MODE, UNDEFINED_SEARCH_SPACE
from ray.tune.suggest.variant_generator import parse_spec_vars
from ray.tune.utils import flatten_dict
from ray.tune.utils.util import unflatten_dict
try:
import nevergrad as ng
from nevergrad.optimization import Optimizer
from nevergrad.optimization.base import ConfiguredOptimizer
Parameter = ng.p.Parameter
except ImportError:
ng = None
Optimizer = None
ConfiguredOptimizer = None
Parameter = None
from ray.tune.suggest import Searcher
logger = logging.getLogger(__name__)
class NevergradSearch(Searcher):
"""Uses Nevergrad to optimize hyperparameters.
Nevergrad is an open source tool from Facebook for derivative free
optimization. More info can be found at:
https://github.com/facebookresearch/nevergrad.
You will need to install Nevergrad via the following command:
.. code-block:: bash
$ pip install nevergrad
Parameters:
optimizer (nevergrad.optimization.Optimizer|class): Optimizer provided
from Nevergrad. Alter
space (list|nevergrad.parameter.Parameter): Nevergrad parametrization
to be passed to optimizer on instantiation, or list of parameter
names if you passed an optimizer object.
metric (str): The training result objective value attribute. If None
but a mode was passed, the anonymous metric `_metric` will be used
per default.
mode (str): One of {min, max}. Determines whether objective is
minimizing or maximizing the metric attribute.
points_to_evaluate (list): Initial parameter suggestions to be run
first. This is for when you already have some good parameters
you want hyperopt to run first to help the TPE algorithm
make better suggestions for future parameters. Needs to be
a list of dict of hyperopt-named variables.
use_early_stopped_trials: Deprecated.
max_concurrent: Deprecated.
Tune automatically converts search spaces to Nevergrad's format:
.. code-block:: python
import nevergrad as ng
config = {
"width": tune.uniform(0, 20),
"height": tune.uniform(-100, 100),
"activation": tune.choice(["relu", "tanh"])
}
current_best_params = [{
"width": 10,
"height": 0,
"activation": relu",
}]
ng_search = NevergradSearch(
optimizer=ng.optimizers.OnePlusOne,
metric="mean_loss",
mode="min",
points_to_evaluate=current_best_params)
run(my_trainable, config=config, search_alg=ng_search)
If you would like to pass the search space manually, the code would
look like this:
.. code-block:: python
import nevergrad as ng
space = ng.p.Dict(
width=ng.p.Scalar(lower=0, upper=20),
height=ng.p.Scalar(lower=-100, upper=100),
activation=ng.p.Choice(choices=["relu", "tanh"])
)
ng_search = NevergradSearch(
optimizer=ng.optimizers.OnePlusOne,
space=space,
metric="mean_loss",
mode="min")
run(my_trainable, search_alg=ng_search)
"""
def __init__(self,
optimizer: Union[None, Optimizer, ConfiguredOptimizer] = None,
space: Optional[Union[Dict, Parameter]] = None,
metric: Optional[str] = None,
mode: Optional[str] = None,
max_concurrent: Optional[int] = None,
points_to_evaluate: Optional[List[Dict]] = None,
**kwargs):
assert ng is not None, """Nevergrad must be installed!
You can install Nevergrad with the command:
`pip install nevergrad`."""
if mode:
assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."
super(NevergradSearch, self).__init__(
metric=metric, mode=mode, max_concurrent=max_concurrent, **kwargs)
self._space = None
self._opt_factory = None
self._nevergrad_opt = None
<|fim▁hole|> elif not isinstance(points_to_evaluate, Sequence):
raise ValueError(
f"Invalid object type passed for `points_to_evaluate`: "
"{type(points_to_evaluate)}. "
f"Please pass a list of points (dictionaries) instead.")
else:
self._points_to_evaluate = list(points_to_evaluate)
if isinstance(space, dict) and space:
resolved_vars, domain_vars, grid_vars = parse_spec_vars(space)
if domain_vars or grid_vars:
logger.warning(
UNRESOLVED_SEARCH_SPACE.format(
par="space", cls=type(self)))
space = self.convert_search_space(space)
if isinstance(optimizer, Optimizer):
if space is not None or isinstance(space, list):
raise ValueError(
"If you pass a configured optimizer to Nevergrad, either "
"pass a list of parameter names or None as the `space` "
"parameter.")
self._parameters = space
self._nevergrad_opt = optimizer
elif isinstance(optimizer, ConfiguredOptimizer):
self._opt_factory = optimizer
self._parameters = None
self._space = space
else:
raise ValueError(
"The `optimizer` argument passed to NevergradSearch must be "
"either an `Optimizer` or a `ConfiguredOptimizer`.")
self._live_trial_mapping = {}
self.max_concurrent = max_concurrent
if self._nevergrad_opt or self._space:
self._setup_nevergrad()
def _setup_nevergrad(self):
if self._opt_factory:
self._nevergrad_opt = self._opt_factory(self._space)
# nevergrad.tell internally minimizes, so "max" => -1
if self._mode == "max":
self._metric_op = -1.
elif self._mode == "min":
self._metric_op = 1.
if self._metric is None and self._mode:
# If only a mode was passed, use anonymous metric
self._metric = DEFAULT_METRIC
if hasattr(self._nevergrad_opt, "instrumentation"): # added in v0.2.0
if self._nevergrad_opt.instrumentation.kwargs:
if self._nevergrad_opt.instrumentation.args:
raise ValueError(
"Instrumented optimizers should use kwargs only")
if self._parameters is not None:
raise ValueError("Instrumented optimizers should provide "
"None as parameter_names")
else:
if self._parameters is None:
raise ValueError("Non-instrumented optimizers should have "
"a list of parameter_names")
if len(self._nevergrad_opt.instrumentation.args) != 1:
raise ValueError(
"Instrumented optimizers should use kwargs only")
if self._parameters is not None and \
self._nevergrad_opt.dimension != len(self._parameters):
raise ValueError("len(parameters_names) must match optimizer "
"dimension for non-instrumented optimizers")
def set_search_properties(self, metric: Optional[str], mode: Optional[str],
config: Dict) -> bool:
if self._nevergrad_opt or self._space:
return False
space = self.convert_search_space(config)
self._space = space
if metric:
self._metric = metric
if mode:
self._mode = mode
self._setup_nevergrad()
return True
def suggest(self, trial_id: str) -> Optional[Dict]:
if not self._nevergrad_opt:
raise RuntimeError(
UNDEFINED_SEARCH_SPACE.format(
cls=self.__class__.__name__, space="space"))
if not self._metric or not self._mode:
raise RuntimeError(
UNDEFINED_METRIC_MODE.format(
cls=self.__class__.__name__,
metric=self._metric,
mode=self._mode))
if self.max_concurrent:
if len(self._live_trial_mapping) >= self.max_concurrent:
return None
if self._points_to_evaluate is not None:
if len(self._points_to_evaluate) > 0:
point_to_evaluate = self._points_to_evaluate.pop(0)
self._nevergrad_opt.suggest(point_to_evaluate)
suggested_config = self._nevergrad_opt.ask()
self._live_trial_mapping[trial_id] = suggested_config
# in v0.2.0+, output of ask() is a Candidate,
# with fields args and kwargs
if not suggested_config.kwargs:
if self._parameters:
return unflatten_dict(
dict(zip(self._parameters, suggested_config.args[0])))
return unflatten_dict(suggested_config.value)
else:
return unflatten_dict(suggested_config.kwargs)
def on_trial_complete(self,
trial_id: str,
result: Optional[Dict] = None,
error: bool = False):
"""Notification for the completion of trial.
The result is internally negated when interacting with Nevergrad
so that Nevergrad Optimizers can "maximize" this value,
as it minimizes on default.
"""
if result:
self._process_result(trial_id, result)
self._live_trial_mapping.pop(trial_id)
def _process_result(self, trial_id: str, result: Dict):
ng_trial_info = self._live_trial_mapping[trial_id]
self._nevergrad_opt.tell(ng_trial_info,
self._metric_op * result[self._metric])
def save(self, checkpoint_path: str):
trials_object = (self._nevergrad_opt, self._parameters)
with open(checkpoint_path, "wb") as outputFile:
pickle.dump(trials_object, outputFile)
def restore(self, checkpoint_path: str):
with open(checkpoint_path, "rb") as inputFile:
trials_object = pickle.load(inputFile)
self._nevergrad_opt = trials_object[0]
self._parameters = trials_object[1]
@staticmethod
def convert_search_space(spec: Dict) -> Parameter:
spec = flatten_dict(spec, prevent_delimiter=True)
resolved_vars, domain_vars, grid_vars = parse_spec_vars(spec)
if grid_vars:
raise ValueError(
"Grid search parameters cannot be automatically converted "
"to a Nevergrad search space.")
def resolve_value(domain: Domain) -> Parameter:
sampler = domain.get_sampler()
if isinstance(sampler, Quantized):
logger.warning("Nevergrad does not support quantization. "
"Dropped quantization.")
sampler = sampler.get_sampler()
if isinstance(domain, Float):
if isinstance(sampler, LogUniform):
return ng.p.Log(
lower=domain.lower,
upper=domain.upper,
exponent=sampler.base)
return ng.p.Scalar(lower=domain.lower, upper=domain.upper)
if isinstance(domain, Integer):
return ng.p.Scalar(
lower=domain.lower,
upper=domain.upper).set_integer_casting()
if isinstance(domain, Categorical):
return ng.p.Choice(choices=domain.categories)
raise ValueError("SkOpt does not support parameters of type "
"`{}`".format(type(domain).__name__))
# Parameter name is e.g. "a/b/c" for nested dicts
space = {
"/".join(path): resolve_value(domain)
for path, domain in domain_vars
}
return ng.p.Dict(**space)<|fim▁end|> | if points_to_evaluate is None:
self._points_to_evaluate = None |
<|file_name|>moderator.py<|end_file_name|><|fim▁begin|># Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controllers for the moderator page."""
from core.controllers import base
from core.domain import acl_decorators
from core.domain import activity_domain
from core.domain import activity_services
from core.domain import email_manager
from core.domain import summary_services
import feconf
class ModeratorPage(base.BaseHandler):
"""The moderator page."""
@acl_decorators.can_access_moderator_page
def get(self):
"""Handles GET requests."""
self.render_template('pages/moderator/moderator.html')
class FeaturedActivitiesHandler(base.BaseHandler):
"""The moderator page handler for featured activities."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@acl_decorators.can_access_moderator_page
def get(self):
"""Handles GET requests."""
self.render_json({
'featured_activity_references': [
activity_reference.to_dict() for activity_reference in
activity_services.get_featured_activity_references()
],
})
@acl_decorators.can_access_moderator_page
def post(self):
"""Handles POST requests."""
featured_activity_reference_dicts = self.payload.get(
'featured_activity_reference_dicts')
featured_activity_references = [
activity_domain.ActivityReference(
reference_dict['type'], reference_dict['id'])
for reference_dict in featured_activity_reference_dicts]
try:
summary_services.require_activities_to_be_public(
featured_activity_references)
except Exception as e:
raise self.InvalidInputException(e)
activity_services.update_featured_activity_references(
featured_activity_references)
self.render_json({})
<|fim▁hole|>
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
@acl_decorators.can_send_moderator_emails
def get(self, action):
"""Handles GET requests."""
self.render_json({
'draft_email_body': (
email_manager.get_draft_moderator_action_email(action)),
})<|fim▁end|> | class EmailDraftHandler(base.BaseHandler):
"""Provide default email templates for moderator emails.""" |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>import { NlpjsNlu, NlpjsNluConfig } from './NlpjsNlu';
declare module '@jovotech/framework/dist/types/Extensible' {<|fim▁hole|> interface ExtensiblePluginConfig {
NlpjsNlu?: NlpjsNluConfig;
}
interface ExtensiblePlugins {
NlpjsNlu?: NlpjsNlu;
}
}
export * from './NlpjsNlu';<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! # The Standard Library
//!
//! This module defines the standard libary used when evaluating Robin expressions
//! Most of this module contains code for converting Robin functions to special-case JS constructs
mod stdlib_names;
use itertools::{join, Itertools};
use ast::Expression;
use error::Error;
use stdlib::stdlib_names::*;
use table::Table;
use to_javascript::ToJavaScript;
/// This type is a function which contains a function name, arguments and standard library
type Callback = fn(String, &mut [Box<Expression>], &mut Stdlib) -> Result<String, Error>;
/// This struct defines the contents of the standard library.
/// function_table is a table of in-built functions
/// variable_table is a table of the variables defined by the user
/// alias_map is a table of function alises
#[derive(Clone)]
pub struct Stdlib<'a> {
pub function_table: Table<'a, Callback>,
pub variable_table: Table<'a, String>,
pub alias_map: Table<'a, String>,
}
impl<'a> Stdlib<'a> {
pub fn new(
function_table: Table<'a, Callback>,
variable_table: Table<'a, String>,
alias_map: Table<'a, String>,
) -> Stdlib<'a> {
let mut stdlib = Stdlib {
function_table,
variable_table,
alias_map,
};
// The standard library must be manually populated
stdlib.populate();
stdlib
}
/// This function manually populates the pre-defined contents of the standard library
fn populate(&mut self) {
self.function_table.insert(String::from("if"), builtin_if);
self.function_table
.insert(String::from("return"), builtin_return);
self.function_table
.insert(String::from("const"), builtin_binding);
self.function_table
.insert(String::from("var"), builtin_binding);
self.function_table
.insert(String::from("let"), builtin_binding);
self.function_table
.insert(String::from("function"), builtin_function_definition);
self.function_table
.insert(String::from("quote"), builtin_quote);
self.function_table
.insert(String::from("lambda"), builtin_lambda);
self.function_table
.insert(String::from("js"), builtin_raw_js);
self.function_table.insert(String::from("nth"), builtin_nth);
self.function_table
.insert(String::from("defalias"), builtin_def_alias);
self.alias_map.insert(
String::from("map"),
String::from("Array.prototype.map.call"),
);
self.alias_map.insert(
String::from("forEach"),
String::from("Array.prototype.forEach.call"),
);
self.alias_map.insert(
String::from("filter"),
String::from("Array.prototype.filter.call"),
);
self.alias_map
.insert(String::from("define"), String::from("const"));
self.alias_map
.insert(String::from("defun"), String::from("function"));
self.alias_map
.insert(String::from("not"), String::from("!"));
self.alias_map
.insert(String::from("and"), String::from("&&"));
self.alias_map
.insert(String::from("or"), String::from("||"));
self.alias_map
.insert(String::from("="), String::from("==="));
// Insert each of the generic functions into the function table
for generic in GENERIC_FUNCTION {
self.function_table
.insert(generic.to_string(), builtin_generic_function);
}
// Insert each of the alias function name into the function table
for (builtin, _) in self.alias_map.container.iter() {
self.function_table
.insert(builtin.to_string(), builtin_alias);
}
// Insert each of the binary ops into the function table
for binop in MATHS_BINOPS {
self.function_table.insert(binop.to_string(), builtin_binop);
}
// Plus and minus are both binary and unary
// But I have deemed binary to have a higher precedence, so binary goes first
for logic in LOGIC_BINOPS {
self.function_table.insert(logic.to_string(), builtin_binop);
}
// Insert each of the unary ops into the function table
for unary in UNARY_OPS {
self.function_table.insert(unary.to_string(), builtin_unary);
}
}
}
/// # Robin to JS transformation (If Statement)
/// Note: '=>' is used to as 'translated to'
///
/// (if) => Error: Too few arguments
/// (if true) => Error: Too few arguments
/// (if true (return 1)) => if (true) { return 1 }
/// (if true (return 1) (return 2)) => if (true) { return 1 } else { return 2 }
/// (if true (return 1) (return 3) (return 4)) => Error: too many arguments
///
fn builtin_if(
_name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
match args.len() {
0 => Err(Error::too_few_arguments(String::from("if statement"))),
1 => Err(Error::too_few_arguments(String::from(
"if statement condition",
))),
2 => Ok(format!(
"if ({}) {{ {} }}",
args[0].eval(stdlib)?,
args[1].eval(stdlib)?
)),
3 => Ok(format!(
"if ({}) {{ {} }} else {{ {} }}",
args[0].eval(stdlib)?,
args[1].eval(stdlib)?,
args[2].eval(stdlib)?
)),
// TODO: Add error message here
_ => Err(Error::too_many_arguments(String::from(
"unknown number of arguments supplied to if statement",
))),
}
}
/// # Robin to JS transformation (Return statement)
/// Note: '=>' is used to as 'translated to'
///
/// (return) => Error: Too few arguments
/// (return 100) => return 100
/// (return 100 200) => Error: too many arguments
///
fn builtin_return(
_name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
match args.len() {
0 => Err(Error::too_few_arguments(String::from("return"))),
1 => Ok(format!("return {}", args[0].eval(stdlib)?)),
_ => Err(Error::too_many_arguments(String::from("return"))),
}
}
/// # Robin to JS transformation (Const / Var / Let) statement
/// Note: '=>' is used to as 'translated to'
///
/// (const) => Error: Too few arguments
/// (const x) => Error: Too few arguments
/// (const x 100) => const x = 100
/// (const x 100 200) => Error: Too many arguments
///
fn builtin_binding(
name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
// TODO: Add name to the error messages
match args.len() {
0 | 1 => Err(Error::too_few_arguments(String::from("binding"))),
2 => {
let (ident, value) = args.split_first_mut().unwrap();
// Check if the binding name is an identifier or not.
// If an identifier, evaluate correctly.
// If not an identifier than an Invalid Expression error is returned
match ident {
box Expression::Identifier(ref mut ident) => {
// TODO: Remove clones
stdlib
.variable_table
.insert(ident.value.clone(), ident.value.clone());
Ok(format!(
"{} {} = {}",
name,
ident.value.clone(),
value[0].eval(stdlib)?
))
}
_ => Err(Error::invalid_expression(String::from(
"non-identifier given to binding",
))),
}
}
_ => Err(Error::too_many_arguments(String::from("binding"))),
}
}
/// # Robin to JS transformation (Generic Function)
/// Note: '=>' is used to as 'translated to'
///
/// (map (1 2 3 4) (lambda (n) (return n))) => Array.prototype.map.call([1, 2, 3, 4], (n) => { return n })
///
fn builtin_generic_function(
name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
// Evaluate each of the results and join them with a comma
let args_fmt = join(
args.into_iter()
.map(|expr| expr.eval(stdlib))
.fold_results(vec![], |mut i, expr| {
i.push(expr);
i
})?,
",",
);
Ok(format!("{}({})", name, args_fmt))
}
/// # Robin to JS transformation (Function Alias)
/// A funciton which converts an alias to the built-in function
///
fn builtin_alias(
name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
// TODO: Remove clone
// Get the function from the alias map
match stdlib.alias_map.clone().get_mut(&name.clone()) {
// If the alias is present, then call the actual function
Some(name) => {
stdlib.clone().function_table.get(name).unwrap()(name.to_string(), args, stdlib)
}
// (This should be rare)... but return an error
_ => Err(Error::undefined_func(name)),
}
}
/// # Robin to JS transformation (Function Definition)
/// Note: '=>' is used to as 'translated to'
///
/// (defun) => Error: Too few arguments
/// (defun example) => Error: Too few arguments
/// (defun example ()) => Error: Too few arguments
/// (defun example (n) (console.log n)) => Error: Too few arguments
/// (defun example (n) (console.log n) (console.log n)) => Error: Too many arguments
///
fn builtin_function_definition(
_name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
match args.len() {
0 | 1 | 2 => Err(Error::too_few_arguments(String::from(
"function definition",
))),
3 => {
// First get the name of the function then the rest of the arguments
let (name, rest) = args.split_first_mut().unwrap();
// Then, get the functions argument then the body
let (args, body) = rest.split_first_mut().unwrap();
// TODO: Switch the matching conditions around for they are backwards!
// Match the function's arguments and the function name
match (args, name) {
// If a list and identifier are found
(box Expression::List(args_expr), box Expression::Identifier(func_name)) => {
// TODO: Remove clone
// Add the funciton to the parent variable table
stdlib
.variable_table
.insert(func_name.value.clone(), func_name.value.clone());
// Create a new child stdlib
// TODO: Remove clone
let mut stdlib = Stdlib::new(
Table::new(Some(Box::new(&stdlib.function_table))),
Table::new(Some(Box::new(&stdlib.variable_table))),
stdlib.alias_map.clone(),
);
// Conver the argument expressions to strings and join them with a comma
let args_fmt = join(
args_expr
.value
// TODO: Remove .clone
.clone()
.into_iter()
// TODO: Remove unwrap
.map(|mut expr| {
// TODO: Remove unwrap
let expr_name = expr.clone().to_string();
// TODO: Remove clones
stdlib.variable_table.insert(expr_name.clone(),
expr_name.clone());
expr.eval(&mut stdlib)
})
.fold_results(vec![], |mut i, expr| {
i.push(expr);
i
})?,
",",
);
Ok(format!(
"function {}({}){{ {}; }}",
func_name.value,
args_fmt,
body[0].eval(&mut stdlib)?
))
}
// If incorrect expressions are given, we raise an error
(_, _) => Err(Error::invalid_expression(String::from(
"non list given to function binding",
))),
}
}
_ => Err(Error::too_many_arguments(String::from(
"function definition",
))),
}
}
/// # Robin to JS transformation (Quote)
/// Note: '=>' is used to as 'translated to'
/// (quote 100) => [100]
/// (quote (100)) => [[100]]
/// (quote (+ 100 1)) => [101]
///
fn builtin_quote(
_name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
// Evaluate each of the arguments and join them with a comma
let args_fmt = join(
args
.into_iter()
// TODO: Remove clone
.map(|expr| expr.eval(stdlib))
.fold_results(vec![], |mut i, expr| {
i.push(expr);
i
})?,
",",
);
Ok(format!("\"[{}]\"", args_fmt))
}
/// # Robin to JS transformation (Lambda)
/// Note: '=>' is used to as 'translated to'
/// (lambda) => Error: Too few arguments
/// (lambda ()) => Error: Too few arguments
/// (lambda (n) (+ n n)) => (n) => { n + n }
///
fn builtin_lambda(
_name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
match args.len() {
0 | 1 => Err(Error::too_few_arguments(String::from("lambda"))),
_ => {
// TODO: Remove unwrap
let (args, exprs) = args.split_first_mut().unwrap();
// Create a new child stdlib
// TODO: Remove clone
let mut stdlib = Stdlib::new(
Table::new(Some(Box::new(&stdlib.function_table))),
Table::new(Some(Box::new(&stdlib.variable_table))),
stdlib.alias_map.clone(),
);
match args {
// The first argument must be a list
box Expression::List(list) => {
// Insert each argument within the list into the variable table
list.value.clone().into_iter().for_each(|expr| {
let expr_name = expr.to_string();
stdlib
.variable_table
.insert(expr_name.clone(), expr_name.clone());
});
// Convert each argument to a string a join them with a comma
let args_fmt = list.value
.clone()
.into_iter()
.map(|arg| arg.to_string())
.collect::<Vec<String>>()
.join(",");
// Evaluate each expression after the argument and join them with a comma
let exprs_fmt = join(
exprs
.into_iter()
.map(|expr| expr.eval(&mut stdlib))
.fold_results(vec![], |mut i, expr| {
i.push(expr);
i
})?,
",",
);
Ok(format!("({}) => {{ {} }}", args_fmt, exprs_fmt))
}
// If a non-list has been given
_ => Err(Error::invalid_expression(String::from(<|fim▁hole|> }
}
}
}
/// # Robin to JS transformation (Raw JS)
/// Note: '=>' is used to as 'translated to'
/// (js) => Error: Too few arguments
/// (js "100") => eval("100")
/// (js "100" "200") => Error: Too many arguments
///
fn builtin_raw_js(
_name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
match args.len() {
0 => Err(Error::too_few_arguments(String::from("raw javascript"))),
1 => Ok(format!("eval({})", args[0].eval(stdlib)?)),
_ => Err(Error::too_many_arguments(String::from("raw javascript"))),
}
}
/// # Robin to JS transformation (Nth)
/// Note: '=>' is used to as 'translated to'
/// (nth) => Error: Too few arguments
/// (nth (1 2 3 4)) => Error: Too few arguments
/// (nth (1 2 3 4) 2) => [1, 2, 3, 4][2]
/// (nth (1 2 3 4) 2 4) => Error: Too many arguments
///
fn builtin_nth(
_name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
match args.len() {
0 | 1 => Err(Error::too_few_arguments(String::from("nth"))),
2 => {
let (list, nth) = args.split_first_mut().unwrap();
Ok(format!("{}[{}]", list.eval(stdlib)?, nth[0].eval(stdlib)?))
}
_ => Err(Error::too_many_arguments(String::from("nth"))),
}
}
/// # Robin to JS transformation (Defalias)
/// Note: '=>' is used to as 'translated to'
/// (defalias) => Error: Too few arguments
/// (defalias "Array.prototype.find.call") => Error: Too few arguments
/// (defalias "Array.prototype.find.call" "find") => "find" alises to "Array.prototype.find.call", no output
/// (defalias "Array.prototype.find.call" "find" "something") => Error: Too many arguments
///
fn builtin_def_alias(
_name: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
match args.len() {
0 | 1 => Err(Error::too_few_arguments(String::from("alias"))),
2 => {
// Get the function name being alised
let mut function_name = args[0].eval(stdlib)?.clone();
// Strip the quotes from the string
function_name.retain(|c| c != '"');
// Get the name of the alias
let mut alias = args[1].eval(stdlib)?.clone();
// Strip the quotes from the string
alias.retain(|c| c != '"');
// We insert the alias into the map
stdlib
.alias_map
.insert(alias.clone(), function_name.clone());
// We add the alias into the function
stdlib.function_table.insert(alias.clone(), builtin_alias);
// The function being alised needs to be added as a generic function
stdlib
.function_table
.insert(function_name.clone(), builtin_generic_function);
// The function doesn't actually produce any output!
Ok(String::from(""))
}
_ => Err(Error::too_many_arguments(String::from("alias"))),
}
}
/// # Robin to JS transformation (Binop)
/// Note: '=>' is used to as 'translated to'
/// (+) => Error: Too few arguments
/// (+ 1) => +1, Note: builtin_unary is called
/// (+ 1 1) => 1+1
/// (+ 1 1 1) => 1+1+1
///
fn builtin_binop(
op: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
match args.len() {
0 => Err(Error::too_few_arguments(String::from("binary operation"))),
// Handle unary functions
1 => builtin_unary(op, args, stdlib),
2 => {
// This is messy _but_ it should make the match easier to understand
match (args[0].clone(), args[1].clone()) {
// Pre-calcuate if op an maths operation
(box Expression::Number(ref l), box Expression::Number(ref r))
// If the operator is in the MATHS_BINOPS array
if MATHS_BINOPS.contains(&&*op) =>
{
precalculate_numbers(op, l.value, r.value)
}
// If left and right aren't number literals, then manually format them
(_, _) => Ok(format!(
"{}{}{}",
args[0].eval(stdlib)?,
op,
args[1].eval(stdlib)?
)),
}
}
// TODO: Consider precalculating numbers for more than one binary operation
_ => {
// Evaluate each expression and join them with the operator used
let joined = join(
args.into_iter()
.map(|expr| expr.eval(stdlib))
.fold_results(vec![], |mut i, expr| {
i.push(expr);
i
})?,
&op,
);
Ok(joined)
}
}
}
/// # Robin to JS transformation (Binop)
/// Note: '=>' is used to as 'translated to'
/// (+ 1) => +1
/// (typeof 100) => typeof 100
/// (. 100) => Error: invalid expression
///
fn builtin_unary(
op: String,
args: &mut [Box<Expression>],
stdlib: &mut Stdlib,
) -> Result<String, Error> {
match op.as_ref() {
"+" | "-" | "!" | "++" | "--" | "~" => Ok(format!("{}{}", op, args[0].eval(stdlib)?)),
// Unary operators which are words next an extra space.
"typeof" | "delete" => Ok(format!("{} {}", op, args[0].eval(stdlib)?)),
_ => Err(Error::invalid_expression(String::from("unary operator"))),
}
}
/// Use to convert expressions contained within qoutes to a string without evaluating it
fn precalculate_numbers(op: String, left: f64, right: f64) -> Result<String, Error> {
match op.as_ref() {
// Perform the calculations during translation
"+" => Ok(format!("{}", left + right)),
"-" => Ok(format!("{}", left - right)),
"*" => Ok(format!("{}", left * right)),
"/" if right != 0.0 => Ok(format!("{}", left / right)),
"%" => Ok(format!("{}", left % right)),
"/" => Err(Error::invalid_expression(String::from(
"Divide by zero encountered on numeric literal binary operation",
))),
// Assume divide by 0 here
_ => Err(Error::invalid_expression(String::from(
"Divide by zero encountered on numeric literal binary operation",
))),
}
}<|fim▁end|> | "non-list given to lambda expression",
))), |
<|file_name|>clean002.py<|end_file_name|><|fim▁begin|>self.description = "CleanMethod = KeepCurrent"
sp = pmpkg("dummy", "2.0-1")
self.addpkg2db("sync", sp)
sp = pmpkg("bar", "2.0-1")
self.addpkg2db("sync", sp)
sp = pmpkg("baz", "2.0-1")
self.addpkg2db("sync", sp)<|fim▁hole|>lp = pmpkg("bar", "2.0-1")
self.addpkg2db("local", lp)
self.args = "-Sc"
self.option['CleanMethod'] = ['KeepCurrent']
self.createlocalpkgs = True
self.addrule("PACMAN_RETCODE=0")
self.addrule("CACHE_EXISTS=dummy|2.0-1")
self.addrule("!CACHE_EXISTS=dummy|1.0-1")
self.addrule("CACHE_EXISTS=bar|2.0-1")
self.addrule("CACHE_EXISTS=baz|2.0-1")<|fim▁end|> |
lp = pmpkg("dummy", "1.0-1")
self.addpkg2db("local", lp)
|
<|file_name|>normalize.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
def zero_mean_normalize_image_data(data, axis=(0, 1, 2)):
return np.divide(data - data.mean(axis=axis), data.std(axis=axis))
def foreground_zero_mean_normalize_image_data(data, channel_dim=4, background_value=0, tolerance=1e-5):
data = np.copy(data)
if data.ndim == channel_dim or data.shape[channel_dim] == 1:
# only 1 channel, so the std and mean calculations are straight forward
foreground_mask = np.abs(data) > (background_value + tolerance)
foreground = data[foreground_mask]
mean = foreground.mean()
std = foreground.std()
data[foreground_mask] = np.divide(foreground - mean, std)
return data
else:
# std and mean need to be calculated for each channel in the 4th dimension
for channel in range(data.shape[channel_dim]):
channel_data = data[..., channel]
channel_mask = np.abs(channel_data) > (background_value + tolerance)
channel_foreground = channel_data[channel_mask]
channel_mean = channel_foreground.mean()
channel_std = channel_foreground.std()
channel_data[channel_mask] = np.divide(channel_foreground - channel_mean, channel_std)
data[..., channel] = channel_data
return data
def zero_floor_normalize_image_data(data, axis=(0, 1, 2), floor_percentile=1, floor=0):
floor_threshold = np.percentile(data, floor_percentile, axis=axis)
if data.ndim != len(axis):
floor_threshold_shape = np.asarray(floor_threshold.shape * data.ndim)
floor_threshold_shape[np.asarray(axis)] = 1
floor_threshold = floor_threshold.reshape(floor_threshold_shape)
background = data <= floor_threshold
data = np.ma.masked_array(data - floor_threshold, mask=background)
std = data.std(axis=axis)
if data.ndim != len(axis):
std = std.reshape(floor_threshold_shape)
return np.divide(data, std).filled(floor)
def zero_one_window(data, axis=(0, 1, 2), ceiling_percentile=99, floor_percentile=1, floor=0, ceiling=1,
channels_axis=None):
"""
:param data: Numpy ndarray.
:param axis:
:param ceiling_percentile: Percentile value of the foreground to set to the ceiling.
:param floor_percentile: Percentile value of the image to set to the floor.
:param floor: New minimum value.
:param ceiling: New maximum value.
:param channels_axis:
:return:
"""
data = np.copy(data)
if len(axis) != data.ndim:
floor_threshold = np.percentile(data, floor_percentile, axis=axis)
if channels_axis is None:
channels_axis = find_channel_axis(data.ndim, axis=axis)
data = np.moveaxis(data, channels_axis, 0)
for channel in range(data.shape[0]):
channel_data = data[channel]
# find the background
bg_mask = channel_data <= floor_threshold[channel]
# use background to find foreground
fg = channel_data[bg_mask == False]
# find threshold based on foreground percentile
ceiling_threshold = np.percentile(fg, ceiling_percentile)
# normalize the data for this channel
data[channel] = window_data(channel_data, floor_threshold=floor_threshold[channel],
ceiling_threshold=ceiling_threshold, floor=floor, ceiling=ceiling)
data = np.moveaxis(data, 0, channels_axis)
else:
floor_threshold = np.percentile(data, floor_percentile)
fg_mask = data > floor_threshold
fg = data[fg_mask]
ceiling_threshold = np.percentile(fg, ceiling_percentile)
data = window_data(data, floor_threshold=floor_threshold, ceiling_threshold=ceiling_threshold, floor=floor,
ceiling=ceiling)
return data
def find_channel_axis(ndim, axis):
for i in range(ndim):
if i not in axis and (i - ndim) not in axis:
# I don't understand the second part of this if statement
# answer: it is checking ot make sure that the axis is not indexed in reverse (i.e. axis 3 might be
# indexed as -1)
channels_axis = i
return channels_axis
def static_windows(data, windows, floor=0, ceiling=1):
"""
Normalizes the data according to a set of predefined windows. This is helpful for CT normalization where the
units are static and radiologists often have a set of windowing parameters that the use that allow them to look at
different features in the image.
:param data: 3D numpy array.
:param windows:
:param floor: defaults to 0.
:param ceiling: defaults to 1.
:return: Array with data windows listed in the final dimension
"""
data = np.squeeze(data)
normalized_data = np.ones(data.shape + (len(windows),)) * floor
for i, (l, w) in enumerate(windows):
normalized_data[..., i] = radiology_style_windowing(data, l, w, floor=floor, ceiling=ceiling)
return normalized_data
def radiology_style_windowing(data, l, w, floor=0, ceiling=1):
upper = l + w/2
lower = l - w/2
return window_data(data, floor_threshold=lower, ceiling_threshold=upper, floor=floor, ceiling=ceiling)
def window_data(data, floor_threshold, ceiling_threshold, floor, ceiling):
data = (data - floor_threshold) / (ceiling_threshold - floor_threshold)
# set the data below the floor to equal the floor
data[data < floor] = floor
# set the data above the ceiling to equal the ceiling
data[data > ceiling] = ceiling
return data
def hist_match(source, template):
"""
Source: https://stackoverflow.com/a/33047048
Adjust the pixel values of a grayscale image such that its histogram
matches that of a target image
Arguments:
-----------
source: np.ndarray
Image to transform; the histogram is computed over the flattened
array
template: np.ndarray
Template image; can have different dimensions to source
Returns:
-----------
matched: np.ndarray
The transformed output image
"""
oldshape = source.shape
source = source.ravel()
template = template.ravel()
# get the set of unique pixel values and their corresponding indices and
# counts
s_values, bin_idx, s_counts = np.unique(source, return_inverse=True,
return_counts=True)
t_values, t_counts = np.unique(template, return_counts=True)
# take the cumsum of the counts and normalize by the number of pixels to
# get the empirical cumulative distribution functions for the source and
# template images (maps pixel value --> quantile)
s_quantiles = np.cumsum(s_counts).astype(np.float64)
s_quantiles /= s_quantiles[-1]
t_quantiles = np.cumsum(t_counts).astype(np.float64)
t_quantiles /= t_quantiles[-1]
# interpolate linearly to find the pixel values in the template image
# that correspond most closely to the quantiles in the source image
interp_t_values = np.interp(s_quantiles, t_quantiles, t_values)
return interp_t_values[bin_idx].reshape(oldshape)<|fim▁end|> | import numpy as np
|
<|file_name|>course_structure_spec.js<|end_file_name|><|fim▁begin|>define([
'jquery', 'underscore', 'common/js/spec_helpers/template_helpers', 'js/spec/edxnotes/helpers',
'js/edxnotes/collections/notes', 'js/edxnotes/collections/tabs',
'js/edxnotes/views/tabs/course_structure', 'js/spec/edxnotes/custom_matchers',
'jasmine-jquery'
], function(
$, _, TemplateHelpers, Helpers, NotesCollection, TabsCollection, CourseStructureView,
customMatchers
) {
'use strict';
describe('EdxNotes CourseStructureView', function() {
var notes = Helpers.getDefaultNotes(),
getView, getText;
getText = function (selector) {
return $(selector).map(function () {
return _.trim($(this).text());
}).toArray();
};
getView = function (collection, tabsCollection, options) {
var view;
options = _.defaults(options || {}, {
el: $('.wrapper-student-notes'),
collection: collection,
tabsCollection: tabsCollection,
});
view = new CourseStructureView(options);
tabsCollection.at(0).activate();
return view;
};
beforeEach(function () {
customMatchers(this);
loadFixtures('js/fixtures/edxnotes/edxnotes.html');
TemplateHelpers.installTemplates([
'templates/edxnotes/note-item', 'templates/edxnotes/tab-item'
]);
this.collection = new NotesCollection(notes);
this.tabsCollection = new TabsCollection();
});
it('displays a tab and content with proper data and order', function () {
var view = getView(this.collection, this.tabsCollection),
chapters = getText('.course-title'),
sections = getText('.course-subtitle'),
notes = getText('.note-excerpt-p');
expect(this.tabsCollection).toHaveLength(1);
expect(this.tabsCollection.at(0).toJSON()).toEqual({
name: 'Location in Course',<|fim▁hole|> view: 'Location in Course'
});
expect(view.$('#structure-panel')).toExist();
expect(chapters).toEqual(['First Chapter', 'Second Chapter']);
expect(sections).toEqual(['First Section', 'Second Section', 'Third Section']);
expect(notes).toEqual(['Note 1', 'Note 2', 'Note 3', 'Note 4', 'Note 5']);
});
});
});<|fim▁end|> | identifier: 'view-course-structure',
icon: 'fa fa-list-ul',
is_active: true,
is_closable: false, |
<|file_name|>orbitalWidget.py<|end_file_name|><|fim▁begin|>class orbitalWidget(QGroupBox):
def __init__(self):
super(QGroupBox, self).__init__()
self.initUI()
def initUI(self):
table= orbitalTable(0, 3)
table.horizontalHeader().setResizeMode(QHeaderView.Stretch)
<|fim▁hole|> btn_active = QPushButton('Active', self)
btn_active.setStyleSheet("background-color: red")
btn_active.clicked.connect(table.setActif)
btn_active.setStatusTip(dic_orbital_space["btn_active"])
btn_frozen = QPushButton('Frozen', self)
btn_frozen.setStyleSheet("background-color: rgb(51,153,255)")
btn_frozen.clicked.connect(table.setFrozen)
btn_frozen.setStatusTip(dic_orbital_space["btn_frozen"])
btn_inactive = QPushButton('Inactive', self)
btn_inactive.setStyleSheet("background-color: white")
btn_inactive.clicked.connect(table.setInactive)
btn_inactive.setStatusTip(dic_orbital_space["btn_inactive"])
vbox = QVBoxLayout()
vbox.addWidget(table)
vbox.addWidget(btn_active)
vbox.addWidget(btn_frozen)
vbox.addWidget(btn_inactive)
w=QWidget()
w.setLayout(vbox)
self.setTitle("Orbital class")
vbox = QVBoxLayout()
vbox.addWidget(w)
self.setLayout(vbox)<|fim▁end|> | |
<|file_name|>test_contexts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from polyaxon.connections.kinds import V1ConnectionKind
from polyaxon.connections.schemas import V1ClaimConnection
from polyaxon.containers import contexts as container_contexts
from polyaxon.polyaxonfile.specs import kinds
from polyaxon.polyflow import V1CloningKind, V1CompiledOperation, V1RunKind
from polyaxon.polypod.compiler.contexts import resolve_contexts
from polyaxon.schemas.types import V1ConnectionType
from polyaxon.utils.test_utils import BaseTestCase
from polyaxon.utils.tz_utils import now
class V1CloningKin(object):
pass
@pytest.mark.polypod_mark
class TestResolveContexts(BaseTestCase):
def test_resolver_default_contexts(self):
context_root = container_contexts.CONTEXT_ROOT
compiled_operation = V1CompiledOperation.read(
{
"version": 1.1,
"kind": kinds.COMPILED_OPERATION,
"plugins": {
"auth": False,
"shm": False,<|fim▁hole|> "collectResources": False,
},
"run": {"kind": V1RunKind.JOB, "container": {"image": "test"}},
}
)
spec = resolve_contexts(
namespace="test",
owner_name="user",
project_name="project",
project_uuid="uuid",
run_uuid="uuid",
run_name="run",
run_path="test",
compiled_operation=compiled_operation,
artifacts_store=None,
connection_by_names={},
iteration=None,
created_at=None,
compiled_at=None,
)
assert spec == {
"globals": {
"owner_name": "user",
"project_unique_name": "user.project",
"project_name": "project",
"project_uuid": "uuid",
"run_info": "user.project.runs.uuid",
"context_path": context_root,
"artifacts_path": "{}/artifacts".format(context_root),
"name": "run",
"uuid": "uuid",
"namespace": "test",
"iteration": None,
"created_at": None,
"compiled_at": None,
"schedule_at": None,
"started_at": None,
"finished_at": None,
"duration": None,
"cloning_kind": None,
"original_uuid": None,
"is_independent": True,
"store_path": "",
},
"init": {},
"connections": {},
}
def test_resolver_init_and_connections_contexts(self):
context_root = container_contexts.CONTEXT_ROOT
store = V1ConnectionType(
name="test_claim",
kind=V1ConnectionKind.VOLUME_CLAIM,
schema=V1ClaimConnection(
mount_path="/claim/path", volume_claim="claim", read_only=True
),
)
compiled_operation = V1CompiledOperation.read(
{
"version": 1.1,
"kind": kinds.COMPILED_OPERATION,
"plugins": {
"auth": False,
"shm": False,
"mountArtifactsStore": True,
"collectLogs": False,
"collectArtifacts": False,
"collectResources": False,
},
"run": {
"kind": V1RunKind.JOB,
"container": {"image": "test"},
"connections": [store.name],
"init": [{"connection": store.name}],
},
}
)
date_value = now()
spec = resolve_contexts(
namespace="test",
owner_name="user",
project_name="project",
project_uuid="uuid",
run_uuid="uuid",
run_name="run",
run_path="test",
compiled_operation=compiled_operation,
artifacts_store=store,
connection_by_names={store.name: store},
iteration=12,
created_at=date_value,
compiled_at=date_value,
cloning_kind=V1CloningKind.COPY,
original_uuid="uuid-copy",
is_independent=False,
)
assert spec == {
"globals": {
"owner_name": "user",
"project_unique_name": "user.project",
"project_name": "project",
"project_uuid": "uuid",
"name": "run",
"uuid": "uuid",
"context_path": context_root,
"artifacts_path": "{}/artifacts".format(context_root),
"run_artifacts_path": "/claim/path/test",
"run_outputs_path": "/claim/path/test/outputs",
"namespace": "test",
"iteration": 12,
"run_info": "user.project.runs.uuid",
"created_at": date_value,
"compiled_at": date_value,
"schedule_at": None,
"started_at": None,
"finished_at": None,
"duration": None,
"is_independent": False,
"cloning_kind": V1CloningKind.COPY,
"original_uuid": "uuid-copy",
"store_path": "/claim/path",
},
"init": {"test_claim": store.schema.to_dict()},
"connections": {"test_claim": store.schema.to_dict()},
}
def test_resolver_outputs_collections(self):
context_root = container_contexts.CONTEXT_ROOT
store = V1ConnectionType(
name="test_claim",
kind=V1ConnectionKind.VOLUME_CLAIM,
schema=V1ClaimConnection(
mount_path="/claim/path", volume_claim="claim", read_only=True
),
)
compiled_operation = V1CompiledOperation.read(
{
"version": 1.1,
"kind": kinds.COMPILED_OPERATION,
"plugins": {
"auth": False,
"shm": False,
"mountArtifactsStore": False,
"collectLogs": False,
"collectArtifacts": True,
"collectResources": True,
},
"run": {
"kind": V1RunKind.JOB,
"container": {"image": "test"},
"connections": [store.name],
"init": [{"connection": store.name}],
},
}
)
spec = resolve_contexts(
namespace="test",
owner_name="user",
project_name="project",
project_uuid="uuid",
run_uuid="uuid",
run_name="run",
run_path="test",
compiled_operation=compiled_operation,
artifacts_store=store,
connection_by_names={store.name: store},
iteration=12,
created_at=None,
compiled_at=None,
is_independent=True,
)
assert spec == {
"globals": {
"owner_name": "user",
"project_name": "project",
"project_unique_name": "user.project",
"project_uuid": "uuid",
"name": "run",
"uuid": "uuid",
"run_info": "user.project.runs.uuid",
"context_path": context_root,
"artifacts_path": "{}/artifacts".format(context_root),
"run_artifacts_path": "{}/artifacts/test".format(context_root),
"run_outputs_path": "{}/artifacts/test/outputs".format(context_root),
"namespace": "test",
"iteration": 12,
"created_at": None,
"compiled_at": None,
"schedule_at": None,
"started_at": None,
"finished_at": None,
"duration": None,
"cloning_kind": None,
"original_uuid": None,
"is_independent": True,
"store_path": "",
},
"init": {"test_claim": store.schema.to_dict()},
"connections": {"test_claim": store.schema.to_dict()},
}
def test_resolver_mount_artifacts_store(self):
context_root = container_contexts.CONTEXT_ROOT
store = V1ConnectionType(
name="test_claim",
kind=V1ConnectionKind.VOLUME_CLAIM,
schema=V1ClaimConnection(
mount_path="/claim/path", volume_claim="claim", read_only=True
),
)
compiled_operation = V1CompiledOperation.read(
{
"version": 1.1,
"kind": kinds.COMPILED_OPERATION,
"plugins": {
"auth": False,
"shm": False,
"mountArtifactsStore": True,
"collectLogs": False,
"collectArtifacts": True,
"collectResources": True,
},
"run": {
"kind": V1RunKind.JOB,
"container": {"image": "test"},
"connections": [store.name],
"init": [{"connection": store.name}],
},
}
)
spec = resolve_contexts(
namespace="test",
owner_name="user",
project_name="project",
project_uuid="uuid",
run_uuid="uuid",
run_name="run",
run_path="test",
compiled_operation=compiled_operation,
artifacts_store=store,
connection_by_names={store.name: store},
iteration=12,
created_at=None,
compiled_at=None,
)
assert spec == {
"globals": {
"owner_name": "user",
"project_name": "project",
"project_unique_name": "user.project",
"project_uuid": "uuid",
"name": "run",
"uuid": "uuid",
"run_info": "user.project.runs.uuid",
"context_path": context_root,
"artifacts_path": "{}/artifacts".format(context_root),
"run_artifacts_path": "{}/artifacts/test".format(context_root),
"run_outputs_path": "{}/artifacts/test/outputs".format(context_root),
"namespace": "test",
"iteration": 12,
"created_at": None,
"compiled_at": None,
"schedule_at": None,
"started_at": None,
"finished_at": None,
"duration": None,
"cloning_kind": None,
"original_uuid": None,
"is_independent": True,
"store_path": "/claim/path",
},
"init": {"test_claim": store.schema.to_dict()},
"connections": {"test_claim": store.schema.to_dict()},
}
def test_resolver_default_service_ports(self):
context_root = container_contexts.CONTEXT_ROOT
compiled_operation = V1CompiledOperation.read(
{
"version": 1.1,
"kind": kinds.COMPILED_OPERATION,
"plugins": {
"auth": False,
"shm": False,
"collectLogs": False,
"collectArtifacts": True,
"collectResources": True,
},
"run": {
"kind": V1RunKind.SERVICE,
"ports": [1212, 1234],
"container": {"image": "test", "command": "{{ ports[0] }}"},
},
}
)
spec = resolve_contexts(
namespace="test",
owner_name="user",
project_name="project",
project_uuid="uuid",
run_uuid="uuid",
run_name="run",
run_path="test",
compiled_operation=compiled_operation,
artifacts_store=None,
connection_by_names={},
iteration=12,
created_at=None,
compiled_at=None,
)
assert spec == {
"globals": {
"owner_name": "user",
"project_name": "project",
"project_unique_name": "user.project",
"project_uuid": "uuid",
"run_info": "user.project.runs.uuid",
"name": "run",
"uuid": "uuid",
"context_path": context_root,
"artifacts_path": "{}/artifacts".format(context_root),
"run_artifacts_path": "{}/artifacts/test".format(context_root),
"run_outputs_path": "{}/artifacts/test/outputs".format(context_root),
"namespace": "test",
"iteration": 12,
"ports": [1212, 1234],
"base_url": "/services/v1/test/user/project/runs/uuid",
"created_at": None,
"compiled_at": None,
"schedule_at": None,
"started_at": None,
"finished_at": None,
"duration": None,
"cloning_kind": None,
"original_uuid": None,
"is_independent": True,
"store_path": "",
},
"init": {},
"connections": {},
}<|fim▁end|> | "collectLogs": False,
"collectArtifacts": False, |
<|file_name|>check_print.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
class report_print_check(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(report_print_check, self).__init__(cr, uid, name, context)
self.number_lines = 0
self.number_add = 0<|fim▁hole|> 'fill_stars' : self.fill_stars,
})
def fill_stars(self, amount):
if len(amount) < 100:
stars = 100 - len(amount)
return ' '.join([amount,'*'*stars])
else: return amount
def get_lines(self, voucher_lines):
result = []
self.number_lines = len(voucher_lines)
for i in range(0, min(10,self.number_lines)):
if i < self.number_lines:
res = {
'date_due' : voucher_lines[i].date_due,
'name' : voucher_lines[i].name,
'amount_original' : voucher_lines[i].amount_original and voucher_lines[i].amount_original or False,
'amount_unreconciled' : voucher_lines[i].amount_unreconciled and voucher_lines[i].amount_unreconciled or False,
'amount' : voucher_lines[i].amount and voucher_lines[i].amount or False,
}
else :
res = {
'date_due' : False,
'name' : False,
'amount_original' : False,
'amount_due' : False,
'amount' : False,
}
result.append(res)
return result
class report_check(osv.AbstractModel):
_name = 'report.account_check_writing.report_check'
_inherit = 'report.abstract_report'
_template = 'account_check_writing.report_check'
_wrapped_report_class = report_print_check<|fim▁end|> | self.localcontext.update({
'time': time,
'get_lines': self.get_lines, |
<|file_name|>embed.py<|end_file_name|><|fim▁begin|>''' Provide functions to embed Bokeh models (e.g., plots, widget, layouts)
in various different ways.
There are a number of different combinations of options when embedding
Bokeh plots. The data for the plot can be contained in the document,
or on a Bokeh server, or in a sidecar JavaScript file. Likewise, BokehJS
may be inlined in the document, or loaded from CDN or a Bokeh server.
The functions in ``bokeh.embed`` provide functionality to embed in all
these different cases.
'''
from __future__ import absolute_import
import re
import uuid
from warnings import warn
from .templates import (
AUTOLOAD_JS, AUTOLOAD_TAG, FILE,
NOTEBOOK_DIV, PLOT_DIV, DOC_JS, SCRIPT_TAG
)
from .util.string import encode_utf8
from .plot_object import PlotObject, _ModelInDocument
from ._json_encoder import serialize_json
from .resources import DEFAULT_SERVER_HTTP_URL
from .client import DEFAULT_SESSION_ID
from .document import Document
from collections import Sequence
from six import string_types
def _wrap_in_function(code):
# indent and wrap Bokeh function def around
code = "\n".join([" " + line for line in code.split("\n")])
return 'Bokeh.$(function() {\n%s\n});' % code
def components(plot_objects, resources=None, wrap_script=True, wrap_plot_info=True):
'''
Return HTML components to embed a Bokeh plot. The data for the plot is
stored directly in the returned HTML.
An example can be found in examples/embed/embed_multiple.py
.. note::
The returned components assume that BokehJS resources are
**already loaded**.
Args:
plot_objects (PlotObject|list|dict|tuple) :
A single PlotObject, a list/tuple of PlotObjects, or a dictionary of keys and PlotObjects.
resources :
Deprecated argument
wrap_script (boolean, optional) :
If True, the returned javascript is wrapped in a script tag.
(default: True)
wrap_plot_info (boolean, optional) : If True, returns ``<div>`` strings.
Otherwise, return dicts that can be used to build your own divs.
(default: True)
If False, the returned dictionary contains the following information:
.. code-block:: python
{
'modelid': 'The model ID, used with Document.get_model_by_id',
'elementid': 'The css identifier the BokehJS will look for to target the plot',
'docid': 'Used by Bokeh to find the doc embedded in the returned script',
}
Returns:
UTF-8 encoded *(script, div[s])* or *(raw_script, plot_info[s])*
Examples:
With default wrapping parameter values:
.. code-block:: python
components(plot)
# => (script, plot_div)
components((plot1, plot2))
# => (script, (plot1_div, plot2_div))
components({"Plot 1": plot1, "Plot 2": plot2})
# => (script, {"Plot 1": plot1_div, "Plot 2": plot2_div})
Examples:
With wrapping parameters set to ``False``:
.. code-block:: python
components(plot, wrap_script=False, wrap_plot_info=False)
# => (javascript, plot_dict)
components((plot1, plot2), wrap_script=False, wrap_plot_info=False)
# => (javascript, (plot1_dict, plot2_dict))
components({"Plot 1": plot1, "Plot 2": plot2}, wrap_script=False, wrap_plot_info=False)
# => (javascript, {"Plot 1": plot1_dict, "Plot 2": plot2_dict})
'''
if resources is not None:
warn('Because the ``resources`` argument is no longer needed, '
'it is deprecated and no longer has any effect',
DeprecationWarning, stacklevel=2)
# 1) Convert single items and dicts into list
was_single_object = isinstance(plot_objects, PlotObject) or isinstance(plot_objects, Document)
# converts single to list
plot_objects = _check_plot_objects(plot_objects, allow_dict=True)
# now convert dict to list, saving keys in the same order
plot_object_keys = None
if isinstance(plot_objects, dict):
plot_object_keys = plot_objects.keys()
values = []
# don't just use .values() to ensure we are in the same order as key list
for k in plot_object_keys:
values.append(plot_objects[k])
plot_objects = values
# 2) Do our rendering
with _ModelInDocument(plot_objects):
(docs_json, render_items) = _standalone_docs_json_and_render_items(plot_objects)
custom_models = _extract_custom_models(plot_objects)
script = _script_for_render_items(docs_json, render_items, custom_models=custom_models,
websocket_url=None, wrap_script=wrap_script)
script = encode_utf8(script)
if wrap_plot_info:
results = list(_div_for_render_item(item) for item in render_items)
else:
results = render_items
# 3) convert back to the input shape
if was_single_object:
return script, results[0]
elif plot_object_keys is not None:
result = {}
for (key, value) in zip(plot_object_keys, results):
result[key] = value
return script, result
else:
return script, tuple(results)
def _escape_code(code):
""" Escape JS/CS source code, so that it can be embbeded in a JS string.
This is based on https://github.com/joliss/js-string-escape.
"""
def escape(match):
ch = match.group(0)
if ch == '"' or ch == "'" or ch == '\\':
return '\\' + ch
elif ch == '\n':
return '\\n'
elif ch == '\r':
return '\\r'
elif ch == '\u2028':
return '\\u2028'
elif ch == '\u2029':
return '\\u2029'
return re.sub(u"""['"\\\n\r\u2028\u2029]""", escape, code)
def _extract_custom_models(plot_objects):
custom_models = {}
def extract_from_model(model):
for r in model.references():
impl = getattr(r.__class__, "__implementation__", None)
if impl is not None:
name = r.__class__.__name__
impl = "['%s', {}]" % _escape_code(impl)
custom_models[name] = impl
for o in plot_objects:
if isinstance(o, Document):
for r in o.roots:
extract_from_model(r)
else:
extract_from_model(o)
return custom_models
def notebook_div(plot_object):
''' Return HTML for a div that will display a Bokeh plot in an
IPython Notebook
The data for the plot is stored directly in the returned HTML.
Args:
plot_object (PlotObject) : Bokeh object to render
Returns:
UTF-8 encoded HTML text for a ``<div>``
.. note::
Assumes :func:`~bokeh.util.notebook.load_notebook` or the equivalent
has already been executed.
'''
plot_object = _check_one_plot_object(plot_object)
with _ModelInDocument(plot_object):
(docs_json, render_items) = _standalone_docs_json_and_render_items([plot_object])
custom_models = _extract_custom_models([plot_object])
script = _script_for_render_items(docs_json, render_items,
custom_models=custom_models,
websocket_url=None)
item = render_items[0]
div = _div_for_render_item(item)
html = NOTEBOOK_DIV.render(
plot_script = script,
plot_div = div,
)
return encode_utf8(html)
def _use_widgets(plot_objects):
from .models.widgets import Widget
for o in plot_objects:
if isinstance(o, Document):
if _use_widgets(o.roots):
return True
else:
if any(isinstance(model, Widget) for model in o.references()):
return True
return False
def file_html(plot_objects,
resources,
title,
js_resources=None,
css_resources=None,
template=FILE,
template_variables={}):
'''Return an HTML document that embeds Bokeh PlotObject or Document objects.
The data for the plot is stored directly in the returned HTML.
This is an alias for standalone_html_page_for_models() which
supports customizing the JS/CSS resources independently and
customizing the jinja2 template.
Args:
plot_objects (PlotObject or Document or list) : Bokeh object or objects to render
typically a PlotObject or Document
resources (Resources) : a resource configuration for BokehJS assets
title (str) : a title for the HTML document ``<title>`` tags<|fim▁hole|> template. If used, the following variable names will be overwritten:
title, js_resources, css_resources, plot_script, plot_div
Returns:
UTF-8 encoded HTML
'''
plot_objects = _check_plot_objects(plot_objects)
with _ModelInDocument(plot_objects):
(docs_json, render_items) = _standalone_docs_json_and_render_items(plot_objects)
custom_models = _extract_custom_models(plot_objects)
return _html_page_for_render_items(resources, docs_json, render_items, title,
custom_models=custom_models, websocket_url=None,
js_resources=js_resources, css_resources=css_resources,
template=template, template_variables=template_variables,
use_widgets=_use_widgets(plot_objects))
# TODO rename this "standalone"?
def autoload_static(plot_object, resources, script_path):
''' Return JavaScript code and a script tag that can be used to embed
Bokeh Plots.
The data for the plot is stored directly in the returned JavaScript code.
Args:
plot_object (PlotObject or Document) :
resources (Resources) :
script_path (str) :
Returns:
(js, tag) :
JavaScript code to be saved at ``script_path`` and a ``<script>``
tag to load it
Raises:
ValueError
'''
if resources.mode == 'inline':
raise ValueError("autoload_static() requires non-inline resources")
# TODO why is this?
if resources.dev:
raise ValueError("autoload_static() only works with non-dev resources")
plot_object = _check_one_plot_object(plot_object)
with _ModelInDocument(plot_object):
(docs_json, render_items) = _standalone_docs_json_and_render_items([plot_object])
item = render_items[0]
model_id = ""
if 'modelid' in item:
model_id = item['modelid']
doc_id = ""
if 'docid' in item:
doc_id = item['docid']
js = AUTOLOAD_JS.render(
docs_json = serialize_json(docs_json),
# TODO we should load all the JS files, but the code
# in AUTOLOAD_JS isn't smart enough to deal with it.
js_url = resources.js_files[0],
css_files = resources.css_files,
elementid = item['elementid'],
websocket_url = None
)
tag = AUTOLOAD_TAG.render(
src_path = script_path,
elementid = item['elementid'],
modelid = model_id,
docid = doc_id,
loglevel = resources.log_level
)
return encode_utf8(js), encode_utf8(tag)
def autoload_server(plot_object, app_path="/", session_id=DEFAULT_SESSION_ID, url="default", loglevel="info"):
''' Return a script tag that can be used to embed Bokeh Plots from
a Bokeh Server.
The data for the plot is stored on the Bokeh Server.
Args:
plot_object (PlotObject) : the object to render from the session, or None for entire document
app_path (str, optional) : the server path to the app we want to load
session_id (str, optional) : server session ID
url (str, optional) : server root URL (where static resources live, not where a specific app lives)
loglevel (str, optional) : "trace", "debug", "info", "warn", "error", "fatal"
Returns:
tag :
a ``<script>`` tag that will execute an autoload script
loaded from the Bokeh Server
'''
if url == "default":
url = DEFAULT_SERVER_HTTP_URL
elementid = str(uuid.uuid4())
# empty model_id means render the entire doc from session_id
model_id = ""
if plot_object is not None:
model_id = plot_object._id
if not url.endswith("/"):
url = url + "/"
if not app_path.endswith("/"):
app_path = app_path + "/"
if app_path.startswith("/"):
app_path = app_path[1:]
src_path = url + app_path + "autoload.js" + "?bokeh-autoload-element=" + elementid
tag = AUTOLOAD_TAG.render(
src_path = src_path,
elementid = elementid,
modelid = model_id,
sessionid = session_id,
loglevel = loglevel
)
return encode_utf8(tag)
def _script_for_render_items(docs_json, render_items, websocket_url,
custom_models, wrap_script=True):
# this avoids emitting the "register custom models" code at all
# just to register an empty set
if (custom_models is not None) and len(custom_models) == 0:
custom_models = None
plot_js = _wrap_in_function(
DOC_JS.render(
custom_models=custom_models,
websocket_url=websocket_url,
docs_json=serialize_json(docs_json),
render_items=serialize_json(render_items)
)
)
if wrap_script:
return SCRIPT_TAG.render(js_code=plot_js)
else:
return plot_js
def _html_page_for_render_items(resources, docs_json, render_items, title, websocket_url,
custom_models, js_resources=None, css_resources=None,
template=FILE, template_variables={}, use_widgets=True):
if resources:
if js_resources:
warn('Both resources and js_resources provided. resources will override js_resources.')
if css_resources:
warn('Both resources and css_resources provided. resources will override css_resources.')
js_resources = resources
css_resources = resources
bokeh_js = ''
if js_resources:
if not css_resources:
warn('No Bokeh CSS Resources provided to template. If required you will need to provide them manually.')
js_resources = js_resources.use_widgets(use_widgets)
bokeh_js = js_resources.render_js()
bokeh_css = ''
if css_resources:
if not js_resources:
warn('No Bokeh JS Resources provided to template. If required you will need to provide them manually.')
css_resources = css_resources.use_widgets(use_widgets)
bokeh_css = css_resources.render_css()
script = _script_for_render_items(docs_json, render_items, websocket_url, custom_models)
template_variables_full = template_variables.copy()
template_variables_full.update(dict(
title = title,
bokeh_js = bokeh_js,
bokeh_css = bokeh_css,
plot_script = script,
plot_div = "\n".join(_div_for_render_item(item) for item in render_items)
))
html = template.render(template_variables_full)
return encode_utf8(html)
def _check_plot_objects(plot_objects, allow_dict=False):
input_type_valid = False
# Check for single item
if isinstance(plot_objects, (PlotObject, Document)):
plot_objects = [plot_objects]
# Check for sequence
if isinstance(plot_objects, Sequence) and all(isinstance(x, (PlotObject, Document)) for x in plot_objects):
input_type_valid = True
if allow_dict:
if isinstance(plot_objects, dict) and \
all(isinstance(x, string_types) for x in plot_objects.keys()) and \
all(isinstance(x, (PlotObject, Document)) for x in plot_objects.values()):
input_type_valid = True
if not input_type_valid:
if allow_dict:
raise ValueError(
'Input must be a PlotObject, a Document, a Sequence of PlotObjects and Document, or a dictionary from string to PlotObject and Document'
)
else:
raise ValueError('Input must be a PlotObject, a Document, or a Sequence of PlotObjects and Document')
return plot_objects
def _check_one_plot_object(plot_object):
plot_objects = _check_plot_objects(plot_object)
if len(plot_objects) != 1:
raise ValueError("Input must be exactly one PlotObject or Document")
return plot_objects[0]
def _div_for_render_item(item):
return PLOT_DIV.render(elementid=item['elementid'])
def _standalone_docs_json_and_render_items(plot_objects):
plot_objects = _check_plot_objects(plot_objects)
render_items = []
docs_by_id = {}
for p in plot_objects:
modelid = None
if isinstance(p, Document):
doc = p
else:
if p.document is None:
raise ValueError("To render a PlotObject as HTML it must be part of a Document")
doc = p.document
modelid = p._id
docid = None
for key in docs_by_id:
if docs_by_id[key] == doc:
docid = key
if docid is None:
docid = str(uuid.uuid4())
docs_by_id[docid] = doc
elementid = str(uuid.uuid4())
render_items.append({
'docid' : docid,
'elementid' : elementid,
# if modelid is None, that means the entire document
'modelid' : modelid
})
docs_json = {}
for k, v in docs_by_id.items():
docs_json[k] = v.to_json()
return (docs_json, render_items)
# TODO this is a theory about what file_html() "should" be,
# with a more explicit name similar to the server names below,
# and without the jinja2 entanglement. Thus this encapsulates that
# we use jinja2 and encapsulates the exact template variables we require.
# Anyway, we should deprecate file_html or else drop this version,
# most likely.
def standalone_html_page_for_models(plot_objects, resources, title):
''' Return an HTML document that renders zero or more Bokeh documents or models.
The document for each model will be embedded directly in the HTML, so the
resulting HTML file is standalone (does not require a server). Depending
on the provided resources, the HTML file may be completely self-contained
or may have to load JS and CSS from different files.
Args:
plot_objects (PlotObject or Document) : Bokeh object to render
typically a PlotObject or a Document
resources (Resources) : a resource configuration for BokehJS assets
title (str) : a title for the HTML document ``<title>`` tags
Returns:
UTF-8 encoded HTML
'''
return file_html(plot_objects, resources, title)
def server_html_page_for_models(session_id, model_ids, resources, title, websocket_url):
render_items = []
for modelid in model_ids:
if modelid is None:
raise ValueError("None found in list of model_ids")
elementid = str(uuid.uuid4())
render_items.append({
'sessionid' : session_id,
'elementid' : elementid,
'modelid' : modelid
})
return _html_page_for_render_items(resources, {}, render_items, title,
websocket_url=websocket_url, custom_models=None)
def server_html_page_for_session(session_id, resources, title, websocket_url):
elementid = str(uuid.uuid4())
render_items = [{
'sessionid' : session_id,
'elementid' : elementid
# no 'modelid' implies the entire session document
}]
return _html_page_for_render_items(resources, {}, render_items, title,
websocket_url=websocket_url, custom_models=None)<|fim▁end|> | template (Template, optional) : HTML document template (default: FILE)
A Jinja2 Template, see bokeh.templates.FILE for the required
template parameters
template_variables (dict, optional) : variables to be used in the Jinja2 |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import datetime
import re
import sys
from unittest import skipIf
import warnings
from xml.dom.minidom import parseString
try:
import pytz
except ImportError:
pytz = None
from django.core import serializers
from django.core.urlresolvers import reverse
from django.db.models import Min, Max
from django.http import HttpRequest
from django.template import Context, RequestContext, Template, TemplateSyntaxError
from django.test import TestCase, override_settings, skipIfDBFeature, skipUnlessDBFeature
from django.test.utils import requires_tz_support
from django.utils import six
from django.utils import timezone
from .forms import EventForm, EventSplitForm, EventLocalizedForm, EventModelForm, EventLocalizedModelForm
from .models import Event, MaybeEvent, Session, SessionEvent, Timestamp, AllDayEvent
# These tests use the EAT (Eastern Africa Time) and ICT (Indochina Time)
# who don't have Daylight Saving Time, so we can represent them easily
# with FixedOffset, and use them directly as tzinfo in the constructors.
# settings.TIME_ZONE is forced to EAT. Most tests use a variant of
# datetime.datetime(2011, 9, 1, 13, 20, 30), which translates to
# 10:20:30 in UTC and 17:20:30 in ICT.
UTC = timezone.utc
EAT = timezone.get_fixed_timezone(180) # Africa/Nairobi
ICT = timezone.get_fixed_timezone(420) # Asia/Bangkok
@override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=False)
class LegacyDatabaseTests(TestCase):
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipUnlessDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipIfDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
Event.objects.create(dt=dt)
event = Event.objects.get()
# microseconds are lost during a round-trip in the database
self.assertEqual(event.dt, dt.replace(microsecond=0))
@skipUnlessDBFeature('supports_timezones')
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
@skipUnlessDBFeature('supports_timezones')
@skipUnlessDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
# This combination actually never happens.
@skipUnlessDBFeature('supports_timezones')
@skipIfDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
# microseconds are lost during a round-trip in the database
self.assertEqual(event.dt.replace(tzinfo=EAT), dt.replace(microsecond=0))
@skipUnlessDBFeature('supports_timezones')
@skipIfDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
# This combination is no longer possible since timezone support
# was removed from the SQLite backend -- it didn't work.
@skipUnlessDBFeature('supports_timezones')
@skipUnlessDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_utc_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# django.db.backend.utils.typecast_dt will just drop the
# timezone, so a round-trip in the database alters the data (!)
# interpret the naive datetime in local time and you get a wrong value
self.assertNotEqual(event.dt.replace(tzinfo=EAT), dt)
# interpret the naive datetime in original time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=UTC), dt)
@skipUnlessDBFeature('supports_timezones')
@skipIfDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# interpret the naive datetime in local time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
# This combination is no longer possible since timezone support
# was removed from the SQLite backend -- it didn't work.
@skipUnlessDBFeature('supports_timezones')
@skipUnlessDBFeature('needs_datetime_string_cast')
def test_aware_datetime_in_other_timezone_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertIsNone(event.dt.tzinfo)
# django.db.backend.utils.typecast_dt will just drop the
# timezone, so a round-trip in the database alters the data (!)
# interpret the naive datetime in local time and you get a wrong value
self.assertNotEqual(event.dt.replace(tzinfo=EAT), dt)
# interpret the naive datetime in original time to get the correct value
self.assertEqual(event.dt.replace(tzinfo=ICT), dt)
@skipIfDBFeature('supports_timezones')
def test_aware_datetime_unspported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
with self.assertRaises(ValueError):
Event.objects.create(dt=dt)
def test_auto_now_and_auto_now_add(self):
now = datetime.datetime.now()
past = now - datetime.timedelta(seconds=2)
future = now + datetime.timedelta(seconds=2)
Timestamp.objects.create()
ts = Timestamp.objects.get()
self.assertLess(past, ts.created)
self.assertLess(past, ts.updated)
self.assertGreater(future, ts.updated)
self.assertGreater(future, ts.updated)
def test_query_filter(self):
dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30)
dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30)
Event.objects.create(dt=dt1)
Event.objects.create(dt=dt2)
self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2)
self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1)
self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1)
self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0)
def test_query_datetime_lookups(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0))
self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2)
self.assertEqual(Event.objects.filter(dt__month=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__day=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2)
self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2)
self.assertEqual(Event.objects.filter(dt__second=0).count(), 2)
def test_query_aggregation(self):
# Only min and max make sense for datetimes.
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40))
result = Event.objects.all().aggregate(Min('dt'), Max('dt'))
self.assertEqual(result, {
'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40),
'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20),
})
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt)
def test_query_datetimes(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0))
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'year'),
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'month'),
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'day'),
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'hour'),
[datetime.datetime(2011, 1, 1, 1, 0, 0),
datetime.datetime(2011, 1, 1, 4, 0, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'minute'),
[datetime.datetime(2011, 1, 1, 1, 30, 0),
datetime.datetime(2011, 1, 1, 4, 30, 0)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'second'),
[datetime.datetime(2011, 1, 1, 1, 30, 0),
datetime.datetime(2011, 1, 1, 4, 30, 0)],
transform=lambda d: d)
def test_raw_sql(self):
# Regression test for #17755
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
event = Event.objects.create(dt=dt)
self.assertQuerysetEqual(
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
[event],
transform=lambda d: d)
def test_filter_date_field_with_aware_datetime(self):
# Regression test for #17742
day = datetime.date(2011, 9, 1)
AllDayEvent.objects.create(day=day)
# This is 2011-09-02T01:30:00+03:00 in EAT
dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC)
self.assertTrue(AllDayEvent.objects.filter(day__gte=dt).exists())
@override_settings(TIME_ZONE='Africa/Nairobi', USE_TZ=True)
class NewDatabaseTests(TestCase):
@requires_tz_support
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
Event.objects.create(dt=dt)
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertTrue(msg.startswith("DateTimeField Event.dt received "
"a naive datetime"))
event = Event.objects.get()
# naive datetimes are interpreted in local time
self.assertEqual(event.dt, dt.replace(tzinfo=EAT))
@requires_tz_support
def test_datetime_from_date(self):
dt = datetime.date(2011, 9, 1)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
Event.objects.create(dt=dt)
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertTrue(msg.startswith("DateTimeField Event.dt received "
"a naive datetime"))
event = Event.objects.get()
self.assertEqual(event.dt, datetime.datetime(2011, 9, 1, tzinfo=EAT))
@requires_tz_support
@skipUnlessDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
Event.objects.create(dt=dt)
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertTrue(msg.startswith("DateTimeField Event.dt received "
"a naive datetime"))
event = Event.objects.get()
# naive datetimes are interpreted in local time
self.assertEqual(event.dt, dt.replace(tzinfo=EAT))
@requires_tz_support
@skipIfDBFeature('supports_microsecond_precision')
def test_naive_datetime_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
Event.objects.create(dt=dt)
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertTrue(msg.startswith("DateTimeField Event.dt received "
"a naive datetime"))
event = Event.objects.get()
# microseconds are lost during a round-trip in the database
# naive datetimes are interpreted in local time
self.assertEqual(event.dt, dt.replace(microsecond=0, tzinfo=EAT))
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipUnlessDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
@skipIfDBFeature('supports_microsecond_precision')
def test_aware_datetime_in_local_timezone_with_microsecond_unsupported(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060, tzinfo=EAT)
Event.objects.create(dt=dt)
event = Event.objects.get()
# microseconds are lost during a round-trip in the database
self.assertEqual(event.dt, dt.replace(microsecond=0))
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
Event.objects.create(dt=dt)
event = Event.objects.get()
self.assertEqual(event.dt, dt)
def test_auto_now_and_auto_now_add(self):
now = timezone.now()
past = now - datetime.timedelta(seconds=2)
future = now + datetime.timedelta(seconds=2)
Timestamp.objects.create()
ts = Timestamp.objects.get()
self.assertLess(past, ts.created)
self.assertLess(past, ts.updated)
self.assertGreater(future, ts.updated)
self.assertGreater(future, ts.updated)
def test_query_filter(self):
dt1 = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT)
dt2 = datetime.datetime(2011, 9, 1, 14, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt1)
Event.objects.create(dt=dt2)
self.assertEqual(Event.objects.filter(dt__gte=dt1).count(), 2)
self.assertEqual(Event.objects.filter(dt__gt=dt1).count(), 1)
self.assertEqual(Event.objects.filter(dt__gte=dt2).count(), 1)
self.assertEqual(Event.objects.filter(dt__gt=dt2).count(), 0)
@skipIf(pytz is None, "this test requires pytz")
def test_query_filter_with_pytz_timezones(self):
tz = pytz.timezone('Europe/Paris')
dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=tz)
Event.objects.create(dt=dt)
next = dt + datetime.timedelta(seconds=3)
prev = dt - datetime.timedelta(seconds=3)
self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1)
self.assertEqual(Event.objects.filter(dt__exact=next).count(), 0)
self.assertEqual(Event.objects.filter(dt__in=(prev, next)).count(), 0)
self.assertEqual(Event.objects.filter(dt__in=(prev, dt, next)).count(), 1)
self.assertEqual(Event.objects.filter(dt__range=(prev, next)).count(), 1)
@requires_tz_support
def test_query_filter_with_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT)
Event.objects.create(dt=dt)
dt = dt.replace(tzinfo=None)
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
# naive datetimes are interpreted in local time
self.assertEqual(Event.objects.filter(dt__exact=dt).count(), 1)
self.assertEqual(Event.objects.filter(dt__lte=dt).count(), 1)
self.assertEqual(Event.objects.filter(dt__gt=dt).count(), 0)
self.assertEqual(len(recorded), 3)
for warning in recorded:
msg = str(warning.message)
self.assertTrue(msg.startswith("DateTimeField Event.dt "
"received a naive datetime"))
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetime_lookups(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
self.assertEqual(Event.objects.filter(dt__year=2011).count(), 2)
self.assertEqual(Event.objects.filter(dt__month=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__day=1).count(), 2)
self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 2)
self.assertEqual(Event.objects.filter(dt__hour=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2)
self.assertEqual(Event.objects.filter(dt__second=0).count(), 2)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetime_lookups_in_other_timezone(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
with timezone.override(UTC):
# These two dates fall in the same day in EAT, but in different days,
# years and months in UTC.
self.assertEqual(Event.objects.filter(dt__year=2011).count(), 1)
self.assertEqual(Event.objects.filter(dt__month=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__day=1).count(), 1)
self.assertEqual(Event.objects.filter(dt__week_day=7).count(), 1)
self.assertEqual(Event.objects.filter(dt__hour=22).count(), 1)
self.assertEqual(Event.objects.filter(dt__minute=30).count(), 2)
self.assertEqual(Event.objects.filter(dt__second=0).count(), 2)
def test_query_aggregation(self):
# Only min and max make sense for datetimes.
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT))
result = Event.objects.all().aggregate(Min('dt'), Max('dt'))
self.assertEqual(result, {
'dt__min': datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT),
'dt__max': datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT),
})
def test_query_annotation(self):
# Only min and max make sense for datetimes.
morning = Session.objects.create(name='morning')
afternoon = Session.objects.create(name='afternoon')
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 23, 20, 20, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), session=afternoon)
SessionEvent.objects.create(dt=datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT), session=morning)
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
[morning_min_dt, afternoon_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
[morning_min_dt],
transform=lambda d: d.dt)
self.assertQuerysetEqual(
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
[afternoon_min_dt],
transform=lambda d: d.dt)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetimes(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'year'),
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'month'),
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
transform=lambda d: d)<|fim▁hole|> self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'hour'),
[datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT),
datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'minute'),
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'second'),
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
transform=lambda d: d)
@skipUnlessDBFeature('has_zoneinfo_database')
def test_query_datetimes_in_other_timezone(self):
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
with timezone.override(UTC):
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'year'),
[datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'month'),
[datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'day'),
[datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'hour'),
[datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'minute'),
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
transform=lambda d: d)
self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'second'),
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
transform=lambda d: d)
def test_raw_sql(self):
# Regression test for #17755
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
event = Event.objects.create(dt=dt)
self.assertQuerysetEqual(
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
[event],
transform=lambda d: d)
@requires_tz_support
def test_filter_date_field_with_aware_datetime(self):
# Regression test for #17742
day = datetime.date(2011, 9, 1)
AllDayEvent.objects.create(day=day)
# This is 2011-09-02T01:30:00+03:00 in EAT
dt = datetime.datetime(2011, 9, 1, 22, 30, 0, tzinfo=UTC)
self.assertFalse(AllDayEvent.objects.filter(day__gte=dt).exists())
def test_null_datetime(self):
# Regression test for #17294
e = MaybeEvent.objects.create()
self.assertEqual(e.dt, None)
@override_settings(TIME_ZONE='Africa/Nairobi')
class SerializationTests(TestCase):
# Backend-specific notes:
# - JSON supports only milliseconds, microseconds will be truncated.
# - PyYAML dumps the UTC offset correctly for timezone-aware datetimes,
# but when it loads this representation, it substracts the offset and
# returns a naive datetime object in UTC (http://pyyaml.org/ticket/202).
# Tests are adapted to take these quirks into account.
def assert_python_contains_datetime(self, objects, dt):
self.assertEqual(objects[0]['fields']['dt'], dt)
def assert_json_contains_datetime(self, json, dt):
self.assertIn('"fields": {"dt": "%s"}' % dt, json)
def assert_xml_contains_datetime(self, xml, dt):
field = parseString(xml).getElementsByTagName('field')[0]
self.assertXMLEqual(field.childNodes[0].wholeText, dt)
def assert_yaml_contains_datetime(self, yaml, dt):
# Depending on the yaml dumper, '!timestamp' might be absent
six.assertRegex(self, yaml,
r"- fields: {dt: !(!timestamp)? '%s'}" % re.escape(dt))
def test_naive_datetime(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T13:20:30")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt, dt)
def test_naive_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T13:20:30.405")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt.replace(microsecond=405000))
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30.405060")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30.405060")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt, dt)
def test_aware_datetime_with_microsecond(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, 405060, tzinfo=ICT)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T17:20:30.405+07:00")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt.replace(microsecond=405000))
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30.405060+07:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30.405060+07:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_utc(self):
dt = datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T10:20:30Z")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T10:20:30+00:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 10:20:30+00:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_local_timezone(self):
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T13:20:30+03:00")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30+03:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30+03:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_other_timezone(self):
dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT)
data = serializers.serialize('python', [Event(dt=dt)])
self.assert_python_contains_datetime(data, dt)
obj = next(serializers.deserialize('python', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('json', [Event(dt=dt)])
self.assert_json_contains_datetime(data, "2011-09-01T17:20:30+07:00")
obj = next(serializers.deserialize('json', data)).object
self.assertEqual(obj.dt, dt)
data = serializers.serialize('xml', [Event(dt=dt)])
self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30+07:00")
obj = next(serializers.deserialize('xml', data)).object
self.assertEqual(obj.dt, dt)
if not isinstance(serializers.get_serializer('yaml'), serializers.BadSerializer):
data = serializers.serialize('yaml', [Event(dt=dt)])
self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30+07:00")
obj = next(serializers.deserialize('yaml', data)).object
self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True)
class TemplateTests(TestCase):
@requires_tz_support
def test_localtime_templatetag_and_filters(self):
"""
Test the {% localtime %} templatetag and related filters.
"""
datetimes = {
'utc': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC),
'eat': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
'ict': datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT),
'naive': datetime.datetime(2011, 9, 1, 13, 20, 30),
}
templates = {
'notag': Template("{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}"),
'noarg': Template("{% load tz %}{% localtime %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"),
'on': Template("{% load tz %}{% localtime on %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"),
'off': Template("{% load tz %}{% localtime off %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:ICT }}{% endlocaltime %}"),
}
# Transform a list of keys in 'datetimes' to the expected template
# output. This makes the definition of 'results' more readable.
def t(*result):
return '|'.join(datetimes[key].isoformat() for key in result)
# Results for USE_TZ = True
results = {
'utc': {
'notag': t('eat', 'eat', 'utc', 'ict'),
'noarg': t('eat', 'eat', 'utc', 'ict'),
'on': t('eat', 'eat', 'utc', 'ict'),
'off': t('utc', 'eat', 'utc', 'ict'),
},
'eat': {
'notag': t('eat', 'eat', 'utc', 'ict'),
'noarg': t('eat', 'eat', 'utc', 'ict'),
'on': t('eat', 'eat', 'utc', 'ict'),
'off': t('eat', 'eat', 'utc', 'ict'),
},
'ict': {
'notag': t('eat', 'eat', 'utc', 'ict'),
'noarg': t('eat', 'eat', 'utc', 'ict'),
'on': t('eat', 'eat', 'utc', 'ict'),
'off': t('ict', 'eat', 'utc', 'ict'),
},
'naive': {
'notag': t('naive', 'eat', 'utc', 'ict'),
'noarg': t('naive', 'eat', 'utc', 'ict'),
'on': t('naive', 'eat', 'utc', 'ict'),
'off': t('naive', 'eat', 'utc', 'ict'),
}
}
for k1, dt in six.iteritems(datetimes):
for k2, tpl in six.iteritems(templates):
ctx = Context({'dt': dt, 'ICT': ICT})
actual = tpl.render(ctx)
expected = results[k1][k2]
self.assertEqual(actual, expected, '%s / %s: %r != %r' % (k1, k2, actual, expected))
# Changes for USE_TZ = False
results['utc']['notag'] = t('utc', 'eat', 'utc', 'ict')
results['ict']['notag'] = t('ict', 'eat', 'utc', 'ict')
with self.settings(USE_TZ=False):
for k1, dt in six.iteritems(datetimes):
for k2, tpl in six.iteritems(templates):
ctx = Context({'dt': dt, 'ICT': ICT})
actual = tpl.render(ctx)
expected = results[k1][k2]
self.assertEqual(actual, expected, '%s / %s: %r != %r' % (k1, k2, actual, expected))
@skipIf(pytz is None, "this test requires pytz")
def test_localtime_filters_with_pytz(self):
"""
Test the |localtime, |utc, and |timezone filters with pytz.
"""
# Use a pytz timezone as local time
tpl = Template("{% load tz %}{{ dt|localtime }}|{{ dt|utc }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 12, 20, 30)})
with self.settings(TIME_ZONE='Europe/Paris'):
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00|2011-09-01T10:20:30+00:00")
# Use a pytz timezone as argument
tpl = Template("{% load tz %}{{ dt|timezone:tz }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30),
'tz': pytz.timezone('Europe/Paris')})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
# Use a pytz timezone name as argument
tpl = Template("{% load tz %}{{ dt|timezone:'Europe/Paris' }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30),
'tz': pytz.timezone('Europe/Paris')})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
def test_localtime_templatetag_invalid_argument(self):
with self.assertRaises(TemplateSyntaxError):
Template("{% load tz %}{% localtime foo %}{% endlocaltime %}").render()
def test_localtime_filters_do_not_raise_exceptions(self):
"""
Test the |localtime, |utc, and |timezone filters on bad inputs.
"""
tpl = Template("{% load tz %}{{ dt }}|{{ dt|localtime }}|{{ dt|utc }}|{{ dt|timezone:tz }}")
with self.settings(USE_TZ=True):
# bad datetime value
ctx = Context({'dt': None, 'tz': ICT})
self.assertEqual(tpl.render(ctx), "None|||")
ctx = Context({'dt': 'not a date', 'tz': ICT})
self.assertEqual(tpl.render(ctx), "not a date|||")
# bad timezone value
tpl = Template("{% load tz %}{{ dt|timezone:tz }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': None})
self.assertEqual(tpl.render(ctx), "")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30), 'tz': 'not a tz'})
self.assertEqual(tpl.render(ctx), "")
@requires_tz_support
def test_timezone_templatetag(self):
"""
Test the {% timezone %} templatetag.
"""
tpl = Template(
"{% load tz %}"
"{{ dt }}|"
"{% timezone tz1 %}"
"{{ dt }}|"
"{% timezone tz2 %}"
"{{ dt }}"
"{% endtimezone %}"
"{% endtimezone %}"
)
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC),
'tz1': ICT, 'tz2': None})
self.assertEqual(tpl.render(ctx), "2011-09-01T13:20:30+03:00|2011-09-01T17:20:30+07:00|2011-09-01T13:20:30+03:00")
@skipIf(pytz is None, "this test requires pytz")
def test_timezone_templatetag_with_pytz(self):
"""
Test the {% timezone %} templatetag with pytz.
"""
tpl = Template("{% load tz %}{% timezone tz %}{{ dt }}{% endtimezone %}")
# Use a pytz timezone as argument
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
'tz': pytz.timezone('Europe/Paris')})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
# Use a pytz timezone name as argument
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT),
'tz': 'Europe/Paris'})
self.assertEqual(tpl.render(ctx), "2011-09-01T12:20:30+02:00")
def test_timezone_templatetag_invalid_argument(self):
with self.assertRaises(TemplateSyntaxError):
Template("{% load tz %}{% timezone %}{% endtimezone %}").render()
with self.assertRaises(ValueError if pytz is None else pytz.UnknownTimeZoneError):
Template("{% load tz %}{% timezone tz %}{% endtimezone %}").render(Context({'tz': 'foobar'}))
@skipIf(sys.platform.startswith('win'), "Windows uses non-standard time zone names")
def test_get_current_timezone_templatetag(self):
"""
Test the {% get_current_timezone %} templatetag.
"""
tpl = Template("{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}")
self.assertEqual(tpl.render(Context()), "Africa/Nairobi" if pytz else "EAT")
with timezone.override(UTC):
self.assertEqual(tpl.render(Context()), "UTC")
tpl = Template("{% load tz %}{% timezone tz %}{% get_current_timezone as time_zone %}{% endtimezone %}{{ time_zone }}")
self.assertEqual(tpl.render(Context({'tz': ICT})), "+0700")
with timezone.override(UTC):
self.assertEqual(tpl.render(Context({'tz': ICT})), "+0700")
@skipIf(pytz is None, "this test requires pytz")
def test_get_current_timezone_templatetag_with_pytz(self):
"""
Test the {% get_current_timezone %} templatetag with pytz.
"""
tpl = Template("{% load tz %}{% get_current_timezone as time_zone %}{{ time_zone }}")
with timezone.override(pytz.timezone('Europe/Paris')):
self.assertEqual(tpl.render(Context()), "Europe/Paris")
tpl = Template("{% load tz %}{% timezone 'Europe/Paris' %}{% get_current_timezone as time_zone %}{% endtimezone %}{{ time_zone }}")
self.assertEqual(tpl.render(Context()), "Europe/Paris")
def test_get_current_timezone_templatetag_invalid_argument(self):
with self.assertRaises(TemplateSyntaxError):
Template("{% load tz %}{% get_current_timezone %}").render()
@skipIf(sys.platform.startswith('win'), "Windows uses non-standard time zone names")
def test_tz_template_context_processor(self):
"""
Test the django.core.context_processors.tz template context processor.
"""
tpl = Template("{{ TIME_ZONE }}")
self.assertEqual(tpl.render(Context()), "")
self.assertEqual(tpl.render(RequestContext(HttpRequest())), "Africa/Nairobi" if pytz else "EAT")
@requires_tz_support
def test_date_and_time_template_filters(self):
tpl = Template("{{ dt|date:'Y-m-d' }} at {{ dt|time:'H:i:s' }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)})
self.assertEqual(tpl.render(ctx), "2011-09-01 at 23:20:20")
with timezone.override(ICT):
self.assertEqual(tpl.render(ctx), "2011-09-02 at 03:20:20")
def test_date_and_time_template_filters_honor_localtime(self):
tpl = Template("{% load tz %}{% localtime off %}{{ dt|date:'Y-m-d' }} at {{ dt|time:'H:i:s' }}{% endlocaltime %}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 20, 20, 20, tzinfo=UTC)})
self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20")
with timezone.override(ICT):
self.assertEqual(tpl.render(ctx), "2011-09-01 at 20:20:20")
def test_localtime_with_time_zone_setting_set_to_none(self):
# Regression for #17274
tpl = Template("{% load tz %}{{ dt }}")
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 12, 20, 30, tzinfo=EAT)})
with self.settings(TIME_ZONE=None):
# the actual value depends on the system time zone of the host
self.assertTrue(tpl.render(ctx).startswith("2011"))
@requires_tz_support
def test_now_template_tag_uses_current_time_zone(self):
# Regression for #17343
tpl = Template("{% now \"O\" %}")
self.assertEqual(tpl.render(Context({})), "+0300")
with timezone.override(ICT):
self.assertEqual(tpl.render(Context({})), "+0700")
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=False)
class LegacyFormsTests(TestCase):
def test_form(self):
form = EventForm({'dt': '2011-09-01 13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 13, 20, 30))
@skipIf(pytz is None, "this test requires pytz")
def test_form_with_non_existent_time(self):
form = EventForm({'dt': '2011-03-27 02:30:00'})
with timezone.override(pytz.timezone('Europe/Paris')):
# this is obviously a bug
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 3, 27, 2, 30, 0))
@skipIf(pytz is None, "this test requires pytz")
def test_form_with_ambiguous_time(self):
form = EventForm({'dt': '2011-10-30 02:30:00'})
with timezone.override(pytz.timezone('Europe/Paris')):
# this is obviously a bug
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 10, 30, 2, 30, 0))
def test_split_form(self):
form = EventSplitForm({'dt_0': '2011-09-01', 'dt_1': '13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 13, 20, 30))
def test_model_form(self):
EventModelForm({'dt': '2011-09-01 13:20:30'}).save()
e = Event.objects.get()
self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 13, 20, 30))
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True)
class NewFormsTests(TestCase):
@requires_tz_support
def test_form(self):
form = EventForm({'dt': '2011-09-01 13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
def test_form_with_other_timezone(self):
form = EventForm({'dt': '2011-09-01 17:20:30'})
with timezone.override(ICT):
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
def test_form_with_explicit_timezone(self):
form = EventForm({'dt': '2011-09-01 17:20:30+07:00'})
# Datetime inputs formats don't allow providing a time zone.
self.assertFalse(form.is_valid())
@skipIf(pytz is None, "this test requires pytz")
def test_form_with_non_existent_time(self):
with timezone.override(pytz.timezone('Europe/Paris')):
form = EventForm({'dt': '2011-03-27 02:30:00'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['dt'],
["2011-03-27 02:30:00 couldn't be interpreted in time zone "
"Europe/Paris; it may be ambiguous or it may not exist."])
@skipIf(pytz is None, "this test requires pytz")
def test_form_with_ambiguous_time(self):
with timezone.override(pytz.timezone('Europe/Paris')):
form = EventForm({'dt': '2011-10-30 02:30:00'})
self.assertFalse(form.is_valid())
self.assertEqual(form.errors['dt'],
["2011-10-30 02:30:00 couldn't be interpreted in time zone "
"Europe/Paris; it may be ambiguous or it may not exist."])
@requires_tz_support
def test_split_form(self):
form = EventSplitForm({'dt_0': '2011-09-01', 'dt_1': '13:20:30'})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['dt'], datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
@requires_tz_support
def test_localized_form(self):
form = EventLocalizedForm(initial={'dt': datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)})
with timezone.override(ICT):
self.assertIn("2011-09-01 17:20:30", str(form))
@requires_tz_support
def test_model_form(self):
EventModelForm({'dt': '2011-09-01 13:20:30'}).save()
e = Event.objects.get()
self.assertEqual(e.dt, datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
@requires_tz_support
def test_localized_model_form(self):
form = EventLocalizedModelForm(instance=Event(dt=datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)))
with timezone.override(ICT):
self.assertIn("2011-09-01 17:20:30", str(form))
@override_settings(DATETIME_FORMAT='c', TIME_ZONE='Africa/Nairobi', USE_L10N=False, USE_TZ=True,
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class AdminTests(TestCase):
urls = 'timezones.urls'
fixtures = ['tz_users.xml']
def setUp(self):
self.client.login(username='super', password='secret')
@requires_tz_support
def test_changelist(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
response = self.client.get(reverse('admin:timezones_event_changelist'))
self.assertContains(response, e.dt.astimezone(EAT).isoformat())
def test_changelist_in_other_timezone(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
with timezone.override(ICT):
response = self.client.get(reverse('admin:timezones_event_changelist'))
self.assertContains(response, e.dt.astimezone(ICT).isoformat())
@requires_tz_support
def test_change_editable(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
response = self.client.get(reverse('admin:timezones_event_change', args=(e.pk,)))
self.assertContains(response, e.dt.astimezone(EAT).date().isoformat())
self.assertContains(response, e.dt.astimezone(EAT).time().isoformat())
def test_change_editable_in_other_timezone(self):
e = Event.objects.create(dt=datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC))
with timezone.override(ICT):
response = self.client.get(reverse('admin:timezones_event_change', args=(e.pk,)))
self.assertContains(response, e.dt.astimezone(ICT).date().isoformat())
self.assertContains(response, e.dt.astimezone(ICT).time().isoformat())
@requires_tz_support
def test_change_readonly(self):
Timestamp.objects.create()
# re-fetch the object for backends that lose microseconds (MySQL)
t = Timestamp.objects.get()
response = self.client.get(reverse('admin:timezones_timestamp_change', args=(t.pk,)))
self.assertContains(response, t.created.astimezone(EAT).isoformat())
def test_change_readonly_in_other_timezone(self):
Timestamp.objects.create()
# re-fetch the object for backends that lose microseconds (MySQL)
t = Timestamp.objects.get()
with timezone.override(ICT):
response = self.client.get(reverse('admin:timezones_timestamp_change', args=(t.pk,)))
self.assertContains(response, t.created.astimezone(ICT).isoformat())
@override_settings(TIME_ZONE='Africa/Nairobi')
class UtilitiesTests(TestCase):
def test_make_aware(self):
self.assertEqual(
timezone.make_aware(datetime.datetime(2011, 9, 1, 13, 20, 30), EAT),
datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
)
self.assertEqual(
timezone.make_aware(datetime.datetime(2011, 9, 1, 10, 20, 30), UTC),
datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC)
)
def test_make_naive(self):
self.assertEqual(
timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), EAT),
datetime.datetime(2011, 9, 1, 13, 20, 30)
)
self.assertEqual(
timezone.make_naive(datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT), UTC),
datetime.datetime(2011, 9, 1, 10, 20, 30)
)
self.assertEqual(
timezone.make_naive(datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC), UTC),
datetime.datetime(2011, 9, 1, 10, 20, 30)
)<|fim▁end|> | self.assertQuerysetEqual(
Event.objects.datetimes('dt', 'day'),
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
transform=lambda d: d) |
<|file_name|>BlobObject.java<|end_file_name|><|fim▁begin|>package com.elderbyte.josc.api;
import com.elderbyte.josc.core.BlobObjectUtils;
import java.time.Instant;
import java.util.Map;
import java.util.Optional;
/**
* Represents a blob object
*/
public interface BlobObject {
/**
* Gets the bucket name where this object is stored
*/
String getBucket();
/**
* Gets the object key.
*
* The object key is unique inside a bucket.
* It may contain slashes '/', which are considered as virtual directory notations.
*/
String getObjectKey();
/**
* @deprecated Please switch to getObjectKey()
*/
default String getObjectName() {
return getObjectKey();
}
/**
* The blob object size in bytes
*/
long getLength();
/**
* Gets the content type (mime-type) of this object.
*/
Optional<String> getContentType();
/**
* Gets the objects server side calculated hash.
* Might not be available.
*/
Optional<String> getObjectHash();
/**
* @deprecated Please switch to getObjectHash()
*/
default String hash() {
return getObjectHash().orElse(null);
}
/**
* Last modified / creation date of this object
*/
Optional<Instant> getLastModified();
/**
* Other metadata data
*/
Map<String,String> getMetaData();
/**
* Returns true if this object is actually a directory.
*/
boolean isDirectory();
/**
* Returns the filename of this object.<|fim▁hole|> default String getVirtualFileName(){
return BlobObjectUtils.extractVirtualFileName(getObjectKey());
}
/**
* Extracts the extension from this object.
* Only the file name part is considered for extension scanning.
*
* @return Returns the extension with the dot, such as '.png'
*/
default String getVirtualExtension(){
return BlobObjectUtils.extractVirtualExtensionWithDot(getObjectKey());
}
}<|fim▁end|> | * Slashes are interpreted as virtual directory indicators.
*
* @return Returns the last part after the last '/', if no '/' is found returns the input string.
*/ |
<|file_name|>twistedtools.py<|end_file_name|><|fim▁begin|>"""
Twisted integration
-------------------
This module provides a very simple way to integrate your tests with the
Twisted_ event loop.
You must import this module *before* importing anything from Twisted itself!
Example::
from nose.twistedtools import reactor, deferred
@deferred()
def test_resolve():
return reactor.resolve("www.python.org")
Or, more realistically::
@deferred(timeout=5.0)
def test_resolve():
d = reactor.resolve("www.python.org")
def check_ip(ip):
assert ip == "67.15.36.43"
d.addCallback(check_ip)
return d
.. _Twisted: http://twistedmatrix.com/trac/
"""
import sys
from Queue import Queue, Empty
from nose.tools import make_decorator, TimeExpired
__all__ = [
'threaded_reactor', 'reactor', 'deferred', 'TimeExpired',
'stop_reactor'
]
_twisted_thread = None
def threaded_reactor():
"""
Start the Twisted reactor in a separate thread, if not already done.
Returns the reactor.
The thread will automatically be destroyed when all the tests are done.
"""
global _twisted_thread
try:
from twisted.internet import reactor
except ImportError:
return None, None
if not _twisted_thread:
from twisted.python import threadable
from threading import Thread
_twisted_thread = Thread(target=lambda: reactor.run( \
installSignalHandlers=False))
_twisted_thread.setDaemon(True)
_twisted_thread.start()
return reactor, _twisted_thread
# Export global reactor variable, as Twisted does
reactor, reactor_thread = threaded_reactor()
def stop_reactor():
"""Stop the reactor and join the reactor thread until it stops.
Call this function in teardown at the module or package level to
reset the twisted system after your tests. You *must* do this if
you mix tests using these tools and tests using twisted.trial.
"""
global _twisted_thread
reactor.stop()
reactor_thread.join()
for p in reactor.getDelayedCalls():
if p.active():
p.cancel()
_twisted_thread = None
def deferred(timeout=None):
"""
By wrapping a test function with this decorator, you can return a
twisted Deferred and the test will wait for the deferred to be triggered.
The whole test function will run inside the Twisted event loop.
The optional timeout parameter specifies the maximum duration of the test.
The difference with timed() is that timed() will still wait for the test
to end, while deferred() will stop the test when its timeout has expired.
The latter is more desireable when dealing with network tests, because
the result may actually never arrive.
If the callback is triggered, the test has passed.
If the errback is triggered or the timeout expires, the test has failed.
Example::
@deferred(timeout=5.0)
def test_resolve():
return reactor.resolve("www.python.org")
Attention! If you combine this decorator with other decorators (like
"raises"), deferred() must be called *first*!
In other words, this is good::
@raises(DNSLookupError)
@deferred()
def test_error():
return reactor.resolve("xxxjhjhj.biz")
and this is bad::
@deferred()
@raises(DNSLookupError)
def test_error():
return reactor.resolve("xxxjhjhj.biz")
"""
reactor, reactor_thread = threaded_reactor()
if reactor is None:
raise ImportError("twisted is not available or could not be imported")
# Check for common syntax mistake
# (otherwise, tests can be silently ignored
# if one writes "@deferred" instead of "@deferred()")
try:
timeout is None or timeout + 0
except TypeError:
raise TypeError("'timeout' argument must be a number or None")
def decorate(func):
def wrapper(*args, **kargs):
q = Queue()
def callback(value):
q.put(None)<|fim▁hole|> except:
q.put(sys.exc_info())
def g():
try:
d = func(*args, **kargs)
try:
d.addCallbacks(callback, errback)
# Check for a common mistake and display a nice error
# message
except AttributeError:
raise TypeError("you must return a twisted Deferred "
"from your test case!")
# Catch exceptions raised in the test body (from the
# Twisted thread)
except:
q.put(sys.exc_info())
reactor.callFromThread(g)
try:
error = q.get(timeout=timeout)
except Empty:
raise TimeExpired("timeout expired before end of test (%f s.)"
% timeout)
# Re-raise all exceptions
if error is not None:
exc_type, exc_value, tb = error
raise exc_type, exc_value, tb
wrapper = make_decorator(func)(wrapper)
return wrapper
return decorate<|fim▁end|> | def errback(failure):
# Retrieve and save full exception info
try:
failure.raiseException() |
<|file_name|>device.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
def device_from_request(request):
"""
Determine's the device name from the request by first looking for an
overridding cookie, and if not found then matching the user agent.
Used at both the template level for choosing the template to load and
also at the cache level as a cache key prefix.
"""
from mezzanine.conf import settings
try:
# If a device was set via cookie, match available devices.
for (device, _) in settings.DEVICE_USER_AGENTS:
if device == request.COOKIES["mezzanine-device"]:
return device
except KeyError:
# If a device wasn't set via cookie, match user agent.
try:
user_agent = request.META["HTTP_USER_AGENT"].lower()
except KeyError:
pass
else:<|fim▁hole|> for (device, ua_strings) in settings.DEVICE_USER_AGENTS:
for ua_string in ua_strings:
if ua_string.lower() in user_agent:
return device
return ""
def templates_for_device(request, templates):
"""
Given a template name (or list of them), returns the template names
as a list, with each name prefixed with the device directory
inserted before it's associate default in the list.
"""
from mezzanine.conf import settings
if not isinstance(templates, (list, tuple)):
templates = [templates]
device = device_from_request(request)
device_templates = []
for template in templates:
if device:
device_templates.append("%s/%s" % (device, template))
if settings.DEVICE_DEFAULT and settings.DEVICE_DEFAULT != device:
default = "%s/%s" % (settings.DEVICE_DEFAULT, template)
device_templates.append(default)
device_templates.append(template)
return device_templates<|fim▁end|> | try:
user_agent = user_agent.decode("utf-8")
except AttributeError:
pass |
<|file_name|>index.py<|end_file_name|><|fim▁begin|>from flask import Flask
from flask import render_template
from .. import app
@app.route('/')
def index():
user = {'first_name': 'Lance', 'last_name': 'Anderson'}
return render_template('index.html', user=user)
@app.route('/user/<user_id>/board/<board_id>')
@app.route('/new_board')
def board(user_id=None, board_id=None):<|fim▁hole|><|fim▁end|> | user = {'first_name': 'Lance', 'last_name': 'Anderson'}
return render_template('board.html', user=user) |
<|file_name|>plot_feature_distributions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Plot distribution of each feature,
conditioned on its bfeature type
'''
import argparse
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
from common import *
from information import utils
from scipy.stats import itemfreq
nbins = 100
def opts():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('features', type=load_npz,
help='Training data features (npz)')
parser.add_argument('output',
help='Output file with plots (pdf)')
return parser
if __name__ == "__main__":
args = opts().parse_args()
pdf = PdfPages(args.output)
dfs = args.features['ifeatures']
cfs = args.features['ffeatures']
print "Plotting float features"
bfs = args.features['bfeatures']
u = utils.unique_rows(bfs)
indices = [np.all(bfs==ui, axis=-1) for ui in u]
for j, f in enumerate(cfs.T):
print "...ffeature %d" % j
fig = plt.figure()
h = np.zeros(nbins)
not_nan = f[np.logical_not(np.isnan(f))]
f_min = not_nan.min()
f_max = not_nan.max()
x = np.linspace(f_min, f_max, nbins)
dx = (f_max - f_min) / nbins
for idx in indices:
h_new, bins = np.histogram(f[idx], range=(f_min, f_max), bins=nbins)
plt.bar(x, h_new, bottom=h, width=dx)
h += h_new
plt.xlim(f_min, f_max)
plt.xlabel('f')
plt.ylabel('P(f)')
plt.title('FFeature %d. # NaN = %d' % (j, np.sum(np.isnan(f))))
pdf.savefig(fig)
plt.close()
print "Plotting integer features"
for j, x in enumerate(dfs.T):
print "...dfeature %d" % j
freq = itemfreq(x)
fig = plt.figure()
xu = np.sort(np.unique(x))
h = np.zeros_like(xu)
for idx in indices:
f = itemfreq(x[idx])
h_new = np.zeros_like(h)
h_new[f[:,0]] = f[:,1]
plt.bar(xu, h_new, bottom=h)
h += h_new
plt.xlabel('f')
plt.ylabel('P(f)')<|fim▁hole|> plt.title('DFeature %d' % j)
pdf.savefig(fig)
plt.close()
pdf.close()<|fim▁end|> | |
<|file_name|>DoubleRangeParameter.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2006-2020 The MZmine Development Team
*
* This file is part of MZmine.
*
* MZmine is free software; you can redistribute it and/or modify it under the terms of the GNU
* General Public License as published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* MZmine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
* the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
* Public License for more details.
*
* You should have received a copy of the GNU General Public License along with MZmine; if not,
* write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301
* USA
*/
package io.github.mzmine.parameters.parametertypes.ranges;
import java.text.NumberFormat;
import java.util.Collection;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import com.google.common.collect.Range;
import io.github.mzmine.parameters.UserParameter;
public class DoubleRangeParameter implements UserParameter<Range<Double>, DoubleRangeComponent> {
private final String name, description;
protected final boolean valueRequired;
private final boolean nonEmptyRequired;
private NumberFormat format;
private Range<Double> value;
private Range<Double> maxAllowedRange;
public DoubleRangeParameter(String name, String description, NumberFormat format) {
this(name, description, format, true, false, null);
}
public DoubleRangeParameter(String name, String description, NumberFormat format,
Range<Double> defaultValue) {
this(name, description, format, true, false, defaultValue);
}
public DoubleRangeParameter(String name, String description, NumberFormat format,
boolean valueRequired, Range<Double> defaultValue) {
this(name, description, format, valueRequired, false, defaultValue);
}
public DoubleRangeParameter(String name, String description, NumberFormat format,
boolean valueRequired, boolean nonEmptyRequired, Range<Double> defaultValue) {
this(name, description, format, valueRequired, nonEmptyRequired, defaultValue, null);
}
public DoubleRangeParameter(String name, String description, NumberFormat format,
boolean valueRequired, boolean nonEmptyRequired, Range<Double> defaultValue, Range<Double> maxAllowedRange) {
this.name = name;
this.description = description;
this.format = format;
this.valueRequired = valueRequired;
this.nonEmptyRequired = nonEmptyRequired;
this.value = defaultValue;
this.maxAllowedRange = maxAllowedRange;
}
/**
* @see io.github.mzmine.data.Parameter#getName()
*/
@Override
public String getName() {
return name;
}
/**
* @see io.github.mzmine.data.Parameter#getDescription()
*/
@Override
public String getDescription() {
return description;
}
public boolean isValueRequired() {
return valueRequired;
}
@Override
public DoubleRangeComponent createEditingComponent() {
return new DoubleRangeComponent(format);
}
public Range<Double> getValue() {
return value;
}
@Override
public void setValue(Range<Double> value) {
this.value = value;
}
@Override
public DoubleRangeParameter cloneParameter() {
DoubleRangeParameter copy = new DoubleRangeParameter(name, description, format);
copy.setValue(this.getValue());
return copy;
}
@Override
public void setValueFromComponent(DoubleRangeComponent component) {
value = component.getValue();
}
@Override
public void setValueToComponent(DoubleRangeComponent component, Range<Double> newValue) {
component.setValue(newValue);
}
@Override
public void loadValueFromXML(Element xmlElement) {
NodeList minNodes = xmlElement.getElementsByTagName("min");
if (minNodes.getLength() != 1)
return;
NodeList maxNodes = xmlElement.getElementsByTagName("max");
if (maxNodes.getLength() != 1)
return;
String minText = minNodes.item(0).getTextContent();
String maxText = maxNodes.item(0).getTextContent();
double min = Double.valueOf(minText);
double max = Double.valueOf(maxText);
value = Range.closed(min, max);
}
@Override
public void saveValueToXML(Element xmlElement) {
if (value == null)
return;
Document parentDocument = xmlElement.getOwnerDocument();
Element newElement = parentDocument.createElement("min");
newElement.setTextContent(String.valueOf(value.lowerEndpoint()));
xmlElement.appendChild(newElement);
newElement = parentDocument.createElement("max");
newElement.setTextContent(String.valueOf(value.upperEndpoint()));
xmlElement.appendChild(newElement);
}
@Override
public boolean checkValue(Collection<String> errorMessages) {
if (valueRequired && (value == null)) {
errorMessages.add(name + " is not set properly");
return false;
}
if (value != null) {
if (!nonEmptyRequired && value.lowerEndpoint() > value.upperEndpoint()) {
errorMessages.add(name + " range maximum must be higher than minimum, or equal");
return false;<|fim▁hole|> }
if (nonEmptyRequired && value.lowerEndpoint() >= value.upperEndpoint()) {
errorMessages.add(name + " range maximum must be higher than minimum");
return false;
}
}
if (value != null && maxAllowedRange != null) {
if (maxAllowedRange.intersection(value) != value) {
errorMessages.add(name + " must be within " + maxAllowedRange.toString());
return false;
}
}
return true;
}
}<|fim▁end|> | |
<|file_name|>setup_test.go<|end_file_name|><|fim▁begin|>package sign
import (
"testing"
"github.com/coredns/caddy"
)
func TestParse(t *testing.T) {
tests := []struct {
input string
shouldErr bool
exp *Signer
}{
{`sign testdata/db.miek.nl miek.nl {
key file testdata/Kmiek.nl.+013+59725
}`,
false,
&Signer{
keys: []Pair{},
origin: "miek.nl.",
dbfile: "testdata/db.miek.nl",
directory: "/var/lib/coredns",
signedfile: "db.miek.nl.signed",
},
},
{`sign testdata/db.miek.nl example.org {
key file testdata/Kmiek.nl.+013+59725
directory testdata
}`,
false,
&Signer{
keys: []Pair{},
origin: "example.org.",
dbfile: "testdata/db.miek.nl",
directory: "testdata",
signedfile: "db.example.org.signed",
},
},
// errors
{`sign db.example.org {
key file /etc/coredns/keys/Kexample.org
}`,
true,
nil,
},
}
for i, tc := range tests {
c := caddy.NewTestController("dns", tc.input)
sign, err := parse(c)
if err == nil && tc.shouldErr {
t.Fatalf("Test %d expected errors, but got no error", i)
}
if err != nil && !tc.shouldErr {
t.Fatalf("Test %d expected no errors, but got '%v'", i, err)
}
if tc.shouldErr {
continue
}
signer := sign.signers[0]
if x := signer.origin; x != tc.exp.origin {
t.Errorf("Test %d expected %s as origin, got %s", i, tc.exp.origin, x)
}
if x := signer.dbfile; x != tc.exp.dbfile {
t.Errorf("Test %d expected %s as dbfile, got %s", i, tc.exp.dbfile, x)<|fim▁hole|> if x := signer.directory; x != tc.exp.directory {
t.Errorf("Test %d expected %s as directory, got %s", i, tc.exp.directory, x)
}
if x := signer.signedfile; x != tc.exp.signedfile {
t.Errorf("Test %d expected %s as signedfile, got %s", i, tc.exp.signedfile, x)
}
}
}<|fim▁end|> | } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.