prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>test_result.rs<|end_file_name|><|fim▁begin|>mod checked {
#[derive(Debug)]
pub enum MathError {<|fim▁hole|> pub type MathResult = Result<f64, MathError>;
pub fn div(x: f64, y: f64) -> MathResult {
if y == 0.0 {
Err(MathError::DivisionByZero)
} else {
Ok(x / y)
}
}
pub fn sqrt(x: f64) -> MathResult {
if x < 0.0 {
Err(MathError::NegativeSquareRoot)
} else {
Ok(x.sqrt())
}
}
pub fn ln(x: f64) -> MathResult {
if x <= 0.0 {
Err(MathError::NonPositiveLogarithm)
} else {
Ok(x.ln())
}
}
}
fn op(x: f64, y: f64) -> f64 {
match checked::div(x, y) {
Err(why) => panic!("{:?}", why),
Ok(ratio) => match checked::ln(ratio) {
Err(why) => panic!("{:?}", why),
Ok(ln) => match checked::sqrt(ln) {
Err(why) => panic!("{:?}", why),
Ok(sqrt) => sqrt,
},
},
}
}
fn main() {
println!("{}", op(1.0, 10.0));
}<|fim▁end|> | DivisionByZero,
NonPositiveLogarithm,
NegativeSquareRoot,
} |
<|file_name|>ExampleUnitTest.java<|end_file_name|><|fim▁begin|>package org.galaxy.myhttp;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* To work on unit tests, switch the Test Artifact in the Build Variants view.
*/<|fim▁hole|> @Test
public void addition_isCorrect() throws Exception {
assertEquals(4, 2 + 2);
}
}<|fim▁end|> | public class ExampleUnitTest { |
<|file_name|>cmake.py<|end_file_name|><|fim▁begin|>#!env python
# Copyright 2008 Simon Edwards <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import re
import os.path
import glob
import kbindinggenerator.cmakeparser as cmakeparser
def ExtractInstallFiles(filename=None,input=None,variables=None):
if variables is None:
variables = {}
else:
variables = variables.copy()
install_list = []
if filename is not None:
variables['cmake_current_source_dir'] = [os.path.dirname(filename)]
ExtractInstallFilesWithContext(variables, install_list, filename,input)
# print(repr(variables))
# print(repr(install_list))
return install_list
def ExtractInstallFilesWithContext(variables, install_list, filename=None, input=None, fileprefix=""):
inputstring = ""
currentdir = ""
if input:
inputstring = input
elif filename:
currentdir = os.path.dirname(filename)
fhandle = open(filename)
inputstring= fhandle.read()
fhandle.close()
parser = cmakeparser.CMakeParser()
command_list = parser.parse(inputstring, filename)
include_dirs = []
for commandobject in command_list:
command = commandobject.command().lower()
args = [arg.value() for arg in commandobject.arguments()]
if command=="set":
variables[args[0].lower()] = ExpandArgs(variables, args[1:], filename)
elif command=="install":
install_args = ExpandArgs(variables, args, filename)
for arg in install_args:
if arg.endswith('.h'):
for basepath in [currentdir, fileprefix] + include_dirs:
fullpath = os.path.join(basepath, arg)
# print(fullpath)
if os.path.exists(fullpath):
install_list.append(fullpath)
break
else:
fullpath = os.path.join(currentdir, basepath, arg)
if os.path.exists(fullpath):
install_list.append(fullpath)
break
else:
print("Unable to find header file " + arg)
elif command=="include":
if filename is not None:
command_args = ExpandArgs(variables, args, filename)
this_dir = os.path.dirname(filename)
for arg in command_args:
if len(arg.strip())!=0:
include_filename = os.path.join(this_dir,arg)
if os.path.exists(include_filename):
ExtractInstallFilesWithContext(variables, install_list, include_filename)
elif command=="add_subdirectory":
if filename is not None:
command_args = ExpandArgs(variables, args, filename)
this_dir = os.path.dirname(filename)
for arg in command_args:
if len(arg.strip())!=0:
include_filename = os.path.join(this_dir,arg,"CMakeLists.txt")
if os.path.exists(include_filename):
ExtractInstallFilesWithContext(variables, install_list, include_filename, fileprefix=os.path.join(fileprefix,arg))
elif command=="file":
# This is just a basic cmake FILE() implementation. It just does GLOB.
command_args = ExpandArgs(variables, args, filename)
varname = None
result = None
try:
it = iter(command_args)
arg = it.__next__()
if arg.lower()=='glob' and filename is not None:
arg = it.next()
varname = arg
arg = it.next()
relative_dir = os.path.dirname(filename)
if arg.lower()=='relative':
arg = it.next()
relative_dir = arg
arg = it.next()
if not relative_dir.endswith('/'):
relative_dir += '/'
result = []
current_dir = variables['cmake_current_source_dir'][0]
while True:
for x in glob.iglob(os.path.join(current_dir, arg)):
if x.startswith(relative_dir):
x = x[len(relative_dir):]
result.append(x)
arg = it.next()
except StopIteration:
if varname is not None and result is not None:
variables[varname.lower()] = result
elif command=="ecm_generate_headers":
header_args = ExpandArgs(variables, args, filename)
# print("ecm_generate_headers:"+repr(header_args))
prefix=""
if "RELATIVE" in header_args:
prefix = header_args[header_args.index("RELATIVE")+1]
for item in header_args:
if item == "REQUIRED_HEADERS" or item == "RELATIVE":
break
headername = os.path.join(currentdir, prefix, item.lower() + ".h")
if os.path.exists(headername):
install_list.append(headername)
elif command == "target_include_directories":
include_args = ExpandArgs(variables, args, filename)
if "PUBLIC" in include_args:
for item in include_args[include_args.index("PUBLIC")+1:]:
include_dirs.append(item)
#print("include dirs:",repr(include_dirs))
def ExpandArgs(variables, args, filename=None):
rex = re.compile(r'(\$\{[^\}]+\})')
fixed_args = []
for arg in args:
fixed_parts = []
if arg.startswith("$<BUILD_INTERFACE:"):
arg = arg[len("$<BUILD_INTERFACE:"): -1]
parts = rex.split(arg)
for part in parts:
if part.startswith("${"):
name = part[2:-1].lower()
if name in variables:
value = variables[name]
if len(value)==1:
fixed_parts.append(variables[name][0])
else:
fixed_args.extend(value)
else:
print("Undefined cmake variable '" + name + "' in " + filename)
else:
fixed_parts.append(part)
fixed_args.append(''.join(fixed_parts))
return fixed_args
def __FetchCommands(lexer):
topmode = True
command_list = []
command = None
args = []
tok = lexer.token()
while 1:
if not tok:
if command:
command_list.append( (command,args) )
break # No more input
if topmode:
if tok.type=="COMMAND":
command = tok.value
topmode = False
else:
print("Fail")
# Fail
tok = lexer.token()
else:
# Grab arguments
if tok.type=="COMMAND":
if command:
command_list.append( (command,args) )
command = None
args = []
topmode = True
continue
args.append(tok.value)
tok = lexer.token()
return command_list
if __name__=="__main__":
#print("Testing")
#lexer = cmakelexer.CMakeLexer()
print(ExtractInstallFiles(filename="/home/sbe/devel/svn/kde/trunk/KDE/kdeedu/marble/src/lib/CMakeLists.txt"))
def foo():
ExtractInstallFiles(input="""
find_package(KDE4 REQUIRED)
include (KDE4Defaults)
include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${KDEBASE_WORKSPACE_SOURCE_DIR}/libs ${CMAKE_CURRENT_SOURCE_DIR}/.. ${KDE4_INCLUDES} ${OPENGL_INCLUDE_DIR})
add_subdirectory(tests)
add_definitions(-DKDE_DEFAULT_DEBUG_AREA=1209)
########### next target ###############
set(plasmagik_SRCS
packagemetadata.cpp
packagestructure.cpp
package.cpp
)
set(plasma_LIB_SRCS
${plasmagik_SRCS}
abstractrunner.cpp
animationdriver.cpp
animator.cpp
applet.cpp
appletbrowser.cpp
appletbrowser/customdragtreeview.cpp
appletbrowser/kcategorizeditemsview.cpp
appletbrowser/kcategorizeditemsviewdelegate.cpp
appletbrowser/kcategorizeditemsviewmodels.cpp
appletbrowser/openwidgetassistant.cpp
appletbrowser/plasmaappletitemmodel.cpp
configxml.cpp
containment.cpp
corona.cpp
datacontainer.cpp
dataengine.cpp
dataenginemanager.cpp
delegate.cpp
dialog.cpp
extender.cpp
extenderitem.cpp
paintutils.cpp
panelsvg.cpp
plasma.cpp
popupapplet.cpp
private/applethandle.cpp
private/datacontainer_p.cpp
private/desktoptoolbox.cpp
private/nativetabbar.cpp
private/packages.cpp
private/paneltoolbox.cpp
private/toolbox.cpp
private/tooltip.cpp
querymatch.cpp
runnercontext.cpp
runnermanager.cpp
scripting/appletscript.cpp
scripting/dataenginescript.cpp
scripting/runnerscript.cpp
scripting/scriptengine.cpp
service.cpp
servicejob.cpp
svg.cpp
theme.cpp
tooltipmanager.cpp
uiloader.cpp
version.cpp
view.cpp
wallpaper.cpp
widgets/checkbox.cpp
widgets/combobox.cpp
widgets/flash.cpp
widgets/frame.cpp
widgets/groupbox.cpp
widgets/icon.cpp
widgets/label.cpp
widgets/lineedit.cpp
widgets/meter.cpp
widgets/pushbutton.cpp
widgets/radiobutton.cpp
widgets/signalplotter.cpp
widgets/slider.cpp
widgets/tabbar.cpp
widgets/textedit.cpp
widgets/webcontent.cpp
)
kde4_add_ui_files (
plasma_LIB_SRCS
appletbrowser/kcategorizeditemsviewbase.ui
)
if(QT_QTOPENGL_FOUND AND OPENGL_FOUND)
MESSAGE(STATUS "Adding support for OpenGL applets to libplasma")
set(plasma_LIB_SRCS
${plasma_LIB_SRCS}
glapplet.cpp)
endif(QT_QTOPENGL_FOUND AND OPENGL_FOUND)
kde4_add_library(plasma SHARED ${plasma_LIB_SRCS})
target_link_libraries(plasma ${KDE4_KIO_LIBS} ${KDE4_KFILE_LIBS} ${KDE4_KNEWSTUFF2_LIBS}
${QT_QTUITOOLS_LIBRARY} ${QT_QTWEBKIT_LIBRARY}
${KDE4_THREADWEAVER_LIBRARIES} ${KDE4_SOLID_LIBS} ${X11_LIBRARIES})
if(QT_QTOPENGL_FOUND AND OPENGL_FOUND)
target_link_libraries(plasma ${QT_QTOPENGL_LIBRARY} ${OPENGL_gl_LIBRARY})
endif(QT_QTOPENGL_FOUND AND OPENGL_FOUND)
set_target_properties(plasma PROPERTIES
VERSION 3.0.0
SOVERSION 3
${KDE4_DISABLE_PROPERTY_}LINK_INTERFACE_LIBRARIES "${KDE4_KDEUI_LIBS}"
)
install(TARGETS plasma ${INSTALL_TARGETS_DEFAULT_ARGS})
########### install files ###############
set(plasmagik_HEADERS
packagemetadata.h
packagestructure.h
package.h
)
install(FILES ${plasmagik_HEADERS} DESTINATION ${INCLUDE_INSTALL_DIR}/plasma/ COMPONENT Devel)
set(plasma_LIB_INCLUDES
abstractrunner.h
animationdriver.h
animator.h
applet.h
appletbrowser.h
configxml.h
containment.h
corona.h
datacontainer.h
dataengine.h
dataenginemanager.h
delegate.h
dialog.h
extender.h
extenderitem.h
paintutils.h
panelsvg.h
plasma.h
plasma_export.h
popupapplet.h
querymatch.h
runnercontext.h
runnermanager.h
service.h
servicejob.h
svg.h
theme.h
tooltipmanager.h
uiloader.h
tooltipmanager.h
version.h
view.h
wallpaper.h)
if(QT_QTOPENGL_FOUND AND OPENGL_FOUND)
set(plasma_LIB_INCLUDES
${plasma_LIB_INCLUDES}
glapplet.h)
endif(QT_QTOPENGL_FOUND AND OPENGL_FOUND)
install(FILES
${plasma_LIB_INCLUDES}
DESTINATION ${INCLUDE_INSTALL_DIR}/plasma COMPONENT Devel)
install(FILES
widgets/checkbox.h
widgets/combobox.h
widgets/flash.h
widgets/frame.h
widgets/groupbox.h
widgets/icon.h
widgets/label.h
widgets/lineedit.h
widgets/meter.h
widgets/pushbutton.h
widgets/radiobutton.h
widgets/signalplotter.h
widgets/slider.h
widgets/tabbar.h
widgets/textedit.h
widgets/webcontent.h
DESTINATION ${INCLUDE_INSTALL_DIR}/plasma/widgets COMPONENT Devel)
install(FILES
scripting/appletscript.h
scripting/dataenginescript.h
scripting/runnerscript.h
scripting/scriptengine.h
DESTINATION ${INCLUDE_INSTALL_DIR}/plasma/scripting COMPONENT Devel)
install(FILES
includes/AbstractRunner
includes/AnimationDriver
includes/Animator
includes/Applet
includes/AppletBrowser
includes/AppletScript
includes/CheckBox
includes/ComboBox
includes/ConfigXml
includes/Containment
includes/Corona
includes/DataContainer
includes/DataEngine
includes/DataEngineManager
includes/DataEngineScript
includes/Delegate
includes/Dialog<|fim▁hole|>includes/GroupBox
includes/Icon
includes/Label
includes/LineEdit
includes/Meter
includes/Package
includes/PackageMetadata
includes/PackageStructure
includes/PaintUtils
includes/PanelSvg
includes/Plasma
includes/PopupApplet
includes/PushButton
includes/QueryMatch
includes/RadioButton
includes/RunnerContext
includes/RunnerManager
includes/RunnerScript
includes/ScriptEngine
includes/Service
includes/ServiceJob
includes/SignalPlotter
includes/Slider
includes/Svg
includes/TabBar
includes/TextEdit
includes/ToolTipManager
includes/Theme
includes/UiLoader
includes/View
includes/Version
includes/Wallpaper
includes/WebContent
DESTINATION ${INCLUDE_INSTALL_DIR}/KDE/Plasma COMPONENT Devel)
if(QT_QTOPENGL_FOUND AND OPENGL_FOUND)
install(FILES
includes/GLApplet
DESTINATION ${INCLUDE_INSTALL_DIR}/KDE/Plasma COMPONENT Devel)
endif(QT_QTOPENGL_FOUND AND OPENGL_FOUND)
install(FILES
servicetypes/plasma-animator.desktop
servicetypes/plasma-applet.desktop
servicetypes/plasma-containment.desktop
servicetypes/plasma-dataengine.desktop
servicetypes/plasma-packagestructure.desktop
servicetypes/plasma-runner.desktop
servicetypes/plasma-scriptengine.desktop
servicetypes/plasma-wallpaper.desktop
DESTINATION ${SERVICETYPES_INSTALL_DIR})
install(FILES scripting/plasmoids.knsrc DESTINATION ${CONFIG_INSTALL_DIR})
""")
# Tokenize
#while 1:
# tok = lexer.token()
# if not tok: break # No more input
# print tok
#while 1:
# tok = cmakelexer.lex.token()
# if not tok: break # No more input
# print tok<|fim▁end|> | includes/Extender
includes/ExtenderItem
includes/Flash |
<|file_name|>autocomplete-trigger.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {Directionality} from '@angular/cdk/bidi';
import {BooleanInput, coerceBooleanProperty} from '@angular/cdk/coercion';
import {DOWN_ARROW, ENTER, ESCAPE, TAB, UP_ARROW, hasModifierKey} from '@angular/cdk/keycodes';
import {
FlexibleConnectedPositionStrategy,
Overlay,
OverlayConfig,
OverlayRef,
PositionStrategy,
ScrollStrategy,
ConnectedPosition,
} from '@angular/cdk/overlay';
import {_getShadowRoot} from '@angular/cdk/platform';
import {TemplatePortal} from '@angular/cdk/portal';
import {ViewportRuler} from '@angular/cdk/scrolling';
import {DOCUMENT} from '@angular/common';
import {
AfterViewInit,
ChangeDetectorRef,
Directive,
ElementRef,
forwardRef,
Host,
Inject,
InjectionToken,
Input,
NgZone,
OnDestroy,
Optional,
ViewContainerRef,
OnChanges,
SimpleChanges,
} from '@angular/core';
import {ControlValueAccessor, NG_VALUE_ACCESSOR} from '@angular/forms';
import {
_countGroupLabelsBeforeOption,
_getOptionScrollPosition,
MatOption,
MatOptionSelectionChange,
} from '@angular/material/core';
import {MAT_FORM_FIELD, MatFormField} from '@angular/material/form-field';
import {defer, fromEvent, merge, Observable, of as observableOf, Subject, Subscription} from 'rxjs';
import {delay, filter, map, switchMap, take, tap} from 'rxjs/operators';
import {
_MatAutocompleteBase,
MAT_AUTOCOMPLETE_DEFAULT_OPTIONS,
MatAutocompleteDefaultOptions
} from './autocomplete';
import {_MatAutocompleteOriginBase} from './autocomplete-origin';
/** Injection token that determines the scroll handling while the autocomplete panel is open. */
export const MAT_AUTOCOMPLETE_SCROLL_STRATEGY =
new InjectionToken<() => ScrollStrategy>('mat-autocomplete-scroll-strategy');
/** @docs-private */
export function MAT_AUTOCOMPLETE_SCROLL_STRATEGY_FACTORY(overlay: Overlay): () => ScrollStrategy {
return () => overlay.scrollStrategies.reposition();
}
/** @docs-private */
export const MAT_AUTOCOMPLETE_SCROLL_STRATEGY_FACTORY_PROVIDER = {
provide: MAT_AUTOCOMPLETE_SCROLL_STRATEGY,
deps: [Overlay],
useFactory: MAT_AUTOCOMPLETE_SCROLL_STRATEGY_FACTORY,
};
/**
* Provider that allows the autocomplete to register as a ControlValueAccessor.
* @docs-private
*/
export const MAT_AUTOCOMPLETE_VALUE_ACCESSOR: any = {
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => MatAutocompleteTrigger),
multi: true
};
/**
* Creates an error to be thrown when attempting to use an autocomplete trigger without a panel.
* @docs-private
*/
export function getMatAutocompleteMissingPanelError(): Error {
return Error('Attempting to open an undefined instance of `mat-autocomplete`. ' +
'Make sure that the id passed to the `matAutocomplete` is correct and that ' +
'you\'re attempting to open it after the ngAfterContentInit hook.');
}
/** Base class with all of the `MatAutocompleteTrigger` functionality. */
@Directive()
export abstract class _MatAutocompleteTriggerBase implements ControlValueAccessor, AfterViewInit,
OnChanges, OnDestroy {
private _overlayRef: OverlayRef | null;
private _portal: TemplatePortal;
private _componentDestroyed = false;
private _autocompleteDisabled = false;
private _scrollStrategy: () => ScrollStrategy;
/** Old value of the native input. Used to work around issues with the `input` event on IE. */
private _previousValue: string | number | null;
/** Strategy that is used to position the panel. */
private _positionStrategy: FlexibleConnectedPositionStrategy;
/** Whether or not the label state is being overridden. */
private _manuallyFloatingLabel = false;
/** The subscription for closing actions (some are bound to document). */
private _closingActionsSubscription: Subscription;
/** Subscription to viewport size changes. */
private _viewportSubscription = Subscription.EMPTY;
/**
* Whether the autocomplete can open the next time it is focused. Used to prevent a focused,
* closed autocomplete from being reopened if the user switches to another browser tab and then
* comes back.
*/
private _canOpenOnNextFocus = true;
/** Whether the element is inside of a ShadowRoot component. */
private _isInsideShadowRoot: boolean;
/** Stream of keyboard events that can close the panel. */
private readonly _closeKeyEventStream = new Subject<void>();
/**
* Event handler for when the window is blurred. Needs to be an
* arrow function in order to preserve the context.
*/
private _windowBlurHandler = () => {
// If the user blurred the window while the autocomplete is focused, it means that it'll be
// refocused when they come back. In this case we want to skip the first focus event, if the
// pane was closed, in order to avoid reopening it unintentionally.
this._canOpenOnNextFocus =
this._document.activeElement !== this._element.nativeElement || this.panelOpen;
}
/** `View -> model callback called when value changes` */
_onChange: (value: any) => void = () => {};
/** `View -> model callback called when autocomplete has been touched` */
_onTouched = () => {};
/** The autocomplete panel to be attached to this trigger. */
@Input('matAutocomplete') autocomplete: _MatAutocompleteBase;
/**
* Position of the autocomplete panel relative to the trigger element. A position of `auto`
* will render the panel underneath the trigger if there is enough space for it to fit in
* the viewport, otherwise the panel will be shown above it. If the position is set to
* `above` or `below`, the panel will always be shown above or below the trigger. no matter
* whether it fits completely in the viewport.
*/
@Input('matAutocompletePosition') position: 'auto' | 'above' | 'below' = 'auto';
/**
* Reference relative to which to position the autocomplete panel.
* Defaults to the autocomplete trigger element.
*/
@Input('matAutocompleteConnectedTo') connectedTo: _MatAutocompleteOriginBase;
/**
* `autocomplete` attribute to be set on the input element.
* @docs-private
*/
@Input('autocomplete') autocompleteAttribute: string = 'off';
/**
* Whether the autocomplete is disabled. When disabled, the element will
* act as a regular input and the user won't be able to open the panel.
*/
@Input('matAutocompleteDisabled')
get autocompleteDisabled(): boolean { return this._autocompleteDisabled; }
set autocompleteDisabled(value: boolean) {
this._autocompleteDisabled = coerceBooleanProperty(value);
}
constructor(private _element: ElementRef<HTMLInputElement>, private _overlay: Overlay,
private _viewContainerRef: ViewContainerRef,
private _zone: NgZone,
private _changeDetectorRef: ChangeDetectorRef,
@Inject(MAT_AUTOCOMPLETE_SCROLL_STRATEGY) scrollStrategy: any,
@Optional() private _dir: Directionality,
@Optional() @Inject(MAT_FORM_FIELD) @Host() private _formField: MatFormField,
@Optional() @Inject(DOCUMENT) private _document: any,
private _viewportRuler: ViewportRuler,
@Optional() @Inject(MAT_AUTOCOMPLETE_DEFAULT_OPTIONS)
private _defaults?: MatAutocompleteDefaultOptions) {
this._scrollStrategy = scrollStrategy;
}
/** Class to apply to the panel when it's above the input. */
protected abstract _aboveClass: string;
ngAfterViewInit() {
const window = this._getWindow();
if (typeof window !== 'undefined') {
this._zone.runOutsideAngular(() => window.addEventListener('blur', this._windowBlurHandler));
}
}
ngOnChanges(changes: SimpleChanges) {
if (changes['position'] && this._positionStrategy) {
this._setStrategyPositions(this._positionStrategy);
if (this.panelOpen) {
this._overlayRef!.updatePosition();
}
}
}
ngOnDestroy() {
const window = this._getWindow();
if (typeof window !== 'undefined') {
window.removeEventListener('blur', this._windowBlurHandler);
}
this._viewportSubscription.unsubscribe();
this._componentDestroyed = true;
this._destroyPanel();
this._closeKeyEventStream.complete();
}
/** Whether or not the autocomplete panel is open. */
get panelOpen(): boolean {
return this._overlayAttached && this.autocomplete.showPanel;
}
private _overlayAttached: boolean = false;
/** Opens the autocomplete suggestion panel. */
openPanel(): void {
this._attachOverlay();
this._floatLabel();
}
/** Closes the autocomplete suggestion panel. */
closePanel(): void {
this._resetLabel();
if (!this._overlayAttached) {
return;
}
if (this.panelOpen) {
// Only emit if the panel was visible.
this.autocomplete.closed.emit();
}
this.autocomplete._isOpen = this._overlayAttached = false;
if (this._overlayRef && this._overlayRef.hasAttached()) {
this._overlayRef.detach();
this._closingActionsSubscription.unsubscribe();
}
// Note that in some cases this can end up being called after the component is destroyed.
// Add a check to ensure that we don't try to run change detection on a destroyed view.
if (!this._componentDestroyed) {
// We need to trigger change detection manually, because
// `fromEvent` doesn't seem to do it at the proper time.
// This ensures that the label is reset when the
// user clicks outside.
this._changeDetectorRef.detectChanges();
}
}
/**
* Updates the position of the autocomplete suggestion panel to ensure that it fits all options
* within the viewport.
*/
updatePosition(): void {
if (this._overlayAttached) {
this._overlayRef!.updatePosition();
}
}
/**
* A stream of actions that should close the autocomplete panel, including
* when an option is selected, on blur, and when TAB is pressed.
*/
get panelClosingActions(): Observable<MatOptionSelectionChange|null> {
return merge(
this.optionSelections,
this.autocomplete._keyManager.tabOut.pipe(filter(() => this._overlayAttached)),
this._closeKeyEventStream,
this._getOutsideClickStream(),
this._overlayRef ?
this._overlayRef.detachments().pipe(filter(() => this._overlayAttached)) :
observableOf()
).pipe(
// Normalize the output so we return a consistent type.
map(event => event instanceof MatOptionSelectionChange ? event : null)
);
}
/** Stream of autocomplete option selections. */
readonly optionSelections: Observable<MatOptionSelectionChange> = defer(() => {
if (this.autocomplete && this.autocomplete.options) {
return merge(...this.autocomplete.options.map(option => option.onSelectionChange));
}
// If there are any subscribers before `ngAfterViewInit`, the `autocomplete` will be undefined.
// Return a stream that we'll replace with the real one once everything is in place.
return this._zone.onStable
.pipe(take(1), switchMap(() => this.optionSelections));
}) as Observable<MatOptionSelectionChange>;
/** The currently active option, coerced to MatOption type. */
get activeOption(): MatOption | null {
if (this.autocomplete && this.autocomplete._keyManager) {
return this.autocomplete._keyManager.activeItem;
}
return null;
}
/** Stream of clicks outside of the autocomplete panel. */
private _getOutsideClickStream(): Observable<any> {
return merge(
fromEvent(this._document, 'click') as Observable<MouseEvent>,
fromEvent(this._document, 'auxclick') as Observable<MouseEvent>,
fromEvent(this._document, 'touchend') as Observable<TouchEvent>)
.pipe(filter(event => {
// If we're in the Shadow DOM, the event target will be the shadow root, so we have to
// fall back to check the first element in the path of the click event.
const clickTarget =
(this._isInsideShadowRoot && event.composedPath ? event.composedPath()[0] :
event.target) as HTMLElement;
const formField = this._formField ? this._formField._elementRef.nativeElement : null;
const customOrigin = this.connectedTo ? this.connectedTo.elementRef.nativeElement : null;
return this._overlayAttached && clickTarget !== this._element.nativeElement &&
(!formField || !formField.contains(clickTarget)) &&
(!customOrigin || !customOrigin.contains(clickTarget)) &&
(!!this._overlayRef && !this._overlayRef.overlayElement.contains(clickTarget));
}));
}
// Implemented as part of ControlValueAccessor.
writeValue(value: any): void {
Promise.resolve(null).then(() => this._setTriggerValue(value));
}
// Implemented as part of ControlValueAccessor.
registerOnChange(fn: (value: any) => {}): void {
this._onChange = fn;
}
// Implemented as part of ControlValueAccessor.
registerOnTouched(fn: () => {}) {
this._onTouched = fn;
}
// Implemented as part of ControlValueAccessor.
setDisabledState(isDisabled: boolean) {
this._element.nativeElement.disabled = isDisabled;
}
_handleKeydown(event: KeyboardEvent): void {
const keyCode = event.keyCode;
// Prevent the default action on all escape key presses. This is here primarily to bring IE
// in line with other browsers. By default, pressing escape on IE will cause it to revert
// the input value to the one that it had on focus, however it won't dispatch any events
// which means that the model value will be out of sync with the view.
if (keyCode === ESCAPE && !hasModifierKey(event)) {
event.preventDefault();
}
if (this.activeOption && keyCode === ENTER && this.panelOpen) {
this.activeOption._selectViaInteraction();
this._resetActiveItem();
event.preventDefault();
} else if (this.autocomplete) {
const prevActiveItem = this.autocomplete._keyManager.activeItem;
const isArrowKey = keyCode === UP_ARROW || keyCode === DOWN_ARROW;
if (this.panelOpen || keyCode === TAB) {
this.autocomplete._keyManager.onKeydown(event);
} else if (isArrowKey && this._canOpen()) {
this.openPanel();
}
if (isArrowKey || this.autocomplete._keyManager.activeItem !== prevActiveItem) {
this._scrollToOption(this.autocomplete._keyManager.activeItemIndex || 0);
}
}
}
_handleInput(event: KeyboardEvent): void {
let target = event.target as HTMLInputElement;
let value: number | string | null = target.value;
// Based on `NumberValueAccessor` from forms.
if (target.type === 'number') {
value = value == '' ? null : parseFloat(value);
}
// If the input has a placeholder, IE will fire the `input` event on page load,
// focus and blur, in addition to when the user actually changed the value. To
// filter out all of the extra events, we save the value on focus and between
// `input` events, and we check whether it changed.
// See: https://connect.microsoft.com/IE/feedback/details/885747/
if (this._previousValue !== value) {
this._previousValue = value;
this._onChange(value);
if (this._canOpen() && this._document.activeElement === event.target) {
this.openPanel();
}
}
}
_handleFocus(): void {
if (!this._canOpenOnNextFocus) {
this._canOpenOnNextFocus = true;
} else if (this._canOpen()) {
this._previousValue = this._element.nativeElement.value;
this._attachOverlay();
this._floatLabel(true);
}
}
/**
* In "auto" mode, the label will animate down as soon as focus is lost.
* This causes the value to jump when selecting an option with the mouse.
* This method manually floats the label until the panel can be closed.
* @param shouldAnimate Whether the label should be animated when it is floated.
*/
private _floatLabel(shouldAnimate = false): void {
if (this._formField && this._formField.floatLabel === 'auto') {
if (shouldAnimate) {
this._formField._animateAndLockLabel();
} else {
this._formField.floatLabel = 'always';
}
this._manuallyFloatingLabel = true;
}
}
/** If the label has been manually elevated, return it to its normal state. */
private _resetLabel(): void {
if (this._manuallyFloatingLabel) {
this._formField.floatLabel = 'auto';
this._manuallyFloatingLabel = false;
}
}
/**
* This method listens to a stream of panel closing actions and resets the
* stream every time the option list changes.
*/
private _subscribeToClosingActions(): Subscription {
const firstStable = this._zone.onStable.pipe(take(1));
const optionChanges = this.autocomplete.options.changes.pipe(
tap(() => this._positionStrategy.reapplyLastPosition()),
// Defer emitting to the stream until the next tick, because changing
// bindings in here will cause "changed after checked" errors.
delay(0)
);
// When the zone is stable initially, and when the option list changes...
return merge(firstStable, optionChanges)
.pipe(
// create a new stream of panelClosingActions, replacing any previous streams
// that were created, and flatten it so our stream only emits closing events...
switchMap(() => {
const wasOpen = this.panelOpen;
this._resetActiveItem();
this.autocomplete._setVisibility();
if (this.panelOpen) {
this._overlayRef!.updatePosition();
// If the `panelOpen` state changed, we need to make sure to emit the `opened`
// event, because we may not have emitted it when the panel was attached. This
// can happen if the users opens the panel and there are no options, but the
// options come in slightly later or as a result of the value changing.
if (wasOpen !== this.panelOpen) {
this.autocomplete.opened.emit();
}
}
return this.panelClosingActions;
}),
// when the first closing event occurs...
take(1))
// set the value, close the panel, and complete.
.subscribe(event => this._setValueAndClose(event));
}
/** Destroys the autocomplete suggestion panel. */
private _destroyPanel(): void {
if (this._overlayRef) {
this.closePanel();
this._overlayRef.dispose();
this._overlayRef = null;
}
}
private _setTriggerValue(value: any): void {
const toDisplay = this.autocomplete && this.autocomplete.displayWith ?
this.autocomplete.displayWith(value) :
value;
// Simply falling back to an empty string if the display value is falsy does not work properly.
// The display value can also be the number zero and shouldn't fall back to an empty string.
const inputValue = toDisplay != null ? toDisplay : '';
// If it's used within a `MatFormField`, we should set it through the property so it can go
// through change detection.
if (this._formField) {
this._formField._control.value = inputValue;
} else {
this._element.nativeElement.value = inputValue;
}
this._previousValue = inputValue;
}
/**
* This method closes the panel, and if a value is specified, also sets the associated
* control to that value. It will also mark the control as dirty if this interaction
* stemmed from the user.
*/
private _setValueAndClose(event: MatOptionSelectionChange | null): void {
if (event && event.source) {
this._clearPreviousSelectedOption(event.source);
this._setTriggerValue(event.source.value);
this._onChange(event.source.value);
this._element.nativeElement.focus();
this.autocomplete._emitSelectEvent(event.source);
}
this.closePanel();
}
/**
* Clear any previous selected option and emit a selection change event for this option
*/
private _clearPreviousSelectedOption(skip: MatOption) {
this.autocomplete.options.forEach(option => {
if (option !== skip && option.selected) {
option.deselect();
}
});
}
private _attachOverlay(): void {
if (!this.autocomplete && (typeof ngDevMode === 'undefined' || ngDevMode)) {
throw getMatAutocompleteMissingPanelError();
}
// We want to resolve this once, as late as possible so that we can be
// sure that the element has been moved into its final place in the DOM.
if (this._isInsideShadowRoot == null) {
this._isInsideShadowRoot = !!_getShadowRoot(this._element.nativeElement);
}
let overlayRef = this._overlayRef;
if (!overlayRef) {
this._portal = new TemplatePortal(this.autocomplete.template,
this._viewContainerRef,
{id: this._formField?.getLabelId()});
overlayRef = this._overlay.create(this._getOverlayConfig());
this._overlayRef = overlayRef;
// Use the `keydownEvents` in order to take advantage of
// the overlay event targeting provided by the CDK overlay.
overlayRef.keydownEvents().subscribe(event => {
// Close when pressing ESCAPE or ALT + UP_ARROW, based on the a11y guidelines.
// See: https://www.w3.org/TR/wai-aria-practices-1.1/#textbox-keyboard-interaction
if ((event.keyCode === ESCAPE && !hasModifierKey(event)) ||
(event.keyCode === UP_ARROW && hasModifierKey(event, 'altKey'))) {
this._resetActiveItem();
this._closeKeyEventStream.next();
// We need to stop propagation, otherwise the event will eventually
// reach the input itself and cause the overlay to be reopened.
event.stopPropagation();
event.preventDefault();
}
});
this._viewportSubscription = this._viewportRuler.change().subscribe(() => {
if (this.panelOpen && overlayRef) {
overlayRef.updateSize({width: this._getPanelWidth()});
}
});
} else {
// Update the trigger, panel width and direction, in case anything has changed.
this._positionStrategy.setOrigin(this._getConnectedElement());
overlayRef.updateSize({width: this._getPanelWidth()});
}
if (overlayRef && !overlayRef.hasAttached()) {
overlayRef.attach(this._portal);
this._closingActionsSubscription = this._subscribeToClosingActions();
}
const wasOpen = this.panelOpen;
this.autocomplete._setVisibility();
this.autocomplete._isOpen = this._overlayAttached = true;
// We need to do an extra `panelOpen` check in here, because the
// autocomplete won't be shown if there are no options.
if (this.panelOpen && wasOpen !== this.panelOpen) {
this.autocomplete.opened.emit();
}
}
private _getOverlayConfig(): OverlayConfig {
return new OverlayConfig({
positionStrategy: this._getOverlayPosition(),
scrollStrategy: this._scrollStrategy(),
width: this._getPanelWidth(),
direction: this._dir,
panelClass: this._defaults?.overlayPanelClass,
});
}
private _getOverlayPosition(): PositionStrategy {
const strategy = this._overlay.position()
.flexibleConnectedTo(this._getConnectedElement())
.withFlexibleDimensions(false)
.withPush(false);
this._setStrategyPositions(strategy);
this._positionStrategy = strategy;
return strategy;
}
/** Sets the positions on a position strategy based on the directive's input state. */
private _setStrategyPositions(positionStrategy: FlexibleConnectedPositionStrategy) {
// Note that we provide horizontal fallback positions, even though by default the dropdown
// width matches the input, because consumers can override the width. See #18854.
const belowPositions: ConnectedPosition[] = [
{originX: 'start', originY: 'bottom', overlayX: 'start', overlayY: 'top'},
{originX: 'end', originY: 'bottom', overlayX: 'end', overlayY: 'top'}
];
// The overlay edge connected to the trigger should have squared corners, while
// the opposite end has rounded corners. We apply a CSS class to swap the
// border-radius based on the overlay position.
const panelClass = this._aboveClass;
const abovePositions: ConnectedPosition[] = [
{originX: 'start', originY: 'top', overlayX: 'start', overlayY: 'bottom', panelClass},
{originX: 'end', originY: 'top', overlayX: 'end', overlayY: 'bottom', panelClass}
];
let positions: ConnectedPosition[];
if (this.position === 'above') {
positions = abovePositions;
} else if (this.position === 'below') {
positions = belowPositions;
} else {
positions = [...belowPositions, ...abovePositions];
}
positionStrategy.withPositions(positions);
}
private _getConnectedElement(): ElementRef<HTMLElement> {
if (this.connectedTo) {
return this.connectedTo.elementRef;
}
return this._formField ? this._formField.getConnectedOverlayOrigin() : this._element;
}
private _getPanelWidth(): number | string {
return this.autocomplete.panelWidth || this._getHostWidth();
}
/** Returns the width of the input element, so the panel width can match it. */
private _getHostWidth(): number {
return this._getConnectedElement().nativeElement.getBoundingClientRect().width;
}
/**
* Resets the active item to -1 so arrow events will activate the
* correct options, or to 0 if the consumer opted into it.
*/
private _resetActiveItem(): void {
const autocomplete = this.autocomplete;
if (autocomplete.autoActiveFirstOption) {
// Note that we go through `setFirstItemActive`, rather than `setActiveItem(0)`, because
// the former will find the next enabled option, if the first one is disabled.
autocomplete._keyManager.setFirstItemActive();
} else {
autocomplete._keyManager.setActiveItem(-1);
}
}
/** Determines whether the panel can be opened. */
private _canOpen(): boolean {
const element = this._element.nativeElement;
return !element.readOnly && !element.disabled && !this._autocompleteDisabled;
}
/** Use defaultView of injected document if available or fallback to global window reference */
private _getWindow(): Window {
return this._document?.defaultView || window;
}
/** Scrolls to a particular option in the list. */
private _scrollToOption(index: number): void {
// Given that we are not actually focusing active options, we must manually adjust scroll
// to reveal options below the fold. First, we find the offset of the option from the top
// of the panel. If that offset is below the fold, the new scrollTop will be the offset -
// the panel height + the option height, so the active option will be just visible at the
// bottom of the panel. If that offset is above the top of the visible panel, the new scrollTop
// will become the offset. If that offset is visible within the panel already, the scrollTop is
// not adjusted.
const autocomplete = this.autocomplete;
const labelCount = _countGroupLabelsBeforeOption(index,
autocomplete.options, autocomplete.optionGroups);
if (index === 0 && labelCount === 1) {
// If we've got one group label before the option and we're at the top option,
// scroll the list to the top. This is better UX than scrolling the list to the
// top of the option, because it allows the user to read the top group's label.
autocomplete._setScrollTop(0);
} else {
const option = autocomplete.options.toArray()[index];
if (option) {
const element = option._getHostElement();
const newScrollPosition = _getOptionScrollPosition(
element.offsetTop,
element.offsetHeight,
autocomplete._getScrollTop(),
autocomplete.panel.nativeElement.offsetHeight
);
autocomplete._setScrollTop(newScrollPosition);
}
}
}
static ngAcceptInputType_autocompleteDisabled: BooleanInput;
}
@Directive({
selector: `input[matAutocomplete], textarea[matAutocomplete]`,
host: {
'class': 'mat-autocomplete-trigger',
'[attr.autocomplete]': 'autocompleteAttribute',
'[attr.role]': 'autocompleteDisabled ? null : "combobox"',
'[attr.aria-autocomplete]': 'autocompleteDisabled ? null : "list"',
'[attr.aria-activedescendant]': '(panelOpen && activeOption) ? activeOption.id : null',
'[attr.aria-expanded]': 'autocompleteDisabled ? null : panelOpen.toString()',
'[attr.aria-owns]': '(autocompleteDisabled || !panelOpen) ? null : autocomplete?.id',
'[attr.aria-haspopup]': '!autocompleteDisabled',
// Note: we use `focusin`, as opposed to `focus`, in order to open the panel
// a little earlier. This avoids issues where IE delays the focusing of the input.
'(focusin)': '_handleFocus()',
'(blur)': '_onTouched()',
'(input)': '_handleInput($event)',
'(keydown)': '_handleKeydown($event)',
},
exportAs: 'matAutocompleteTrigger',
providers: [MAT_AUTOCOMPLETE_VALUE_ACCESSOR]
})
export class MatAutocompleteTrigger extends _MatAutocompleteTriggerBase {<|fim▁hole|><|fim▁end|> | protected _aboveClass = 'mat-autocomplete-panel-above';
} |
<|file_name|>016_resource_providers.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Database migrations for resource-providers."""
from migrate import UniqueConstraint
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import Float
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import Unicode
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
if migrate_engine.name == 'mysql':
nameargs = {'collation': 'utf8_bin'}
else:
nameargs = {}
resource_providers = Table(
'resource_providers', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('uuid', String(36), nullable=False),
Column('name', Unicode(200, **nameargs), nullable=True),
Column('generation', Integer, default=0),
Column('can_host', Integer, default=0),
UniqueConstraint('uuid', name='uniq_resource_providers0uuid'),
UniqueConstraint('name', name='uniq_resource_providers0name'),
Index('resource_providers_name_idx', 'name'),
Index('resource_providers_uuid_idx', 'uuid'),
mysql_engine='InnoDB',
mysql_charset='latin1'
)
inventories = Table(
'inventories', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('resource_provider_id', Integer, nullable=False),
Column('resource_class_id', Integer, nullable=False),
Column('total', Integer, nullable=False),
Column('reserved', Integer, nullable=False),
Column('min_unit', Integer, nullable=False),
Column('max_unit', Integer, nullable=False),
Column('step_size', Integer, nullable=False),
Column('allocation_ratio', Float, nullable=False),
Index('inventories_resource_provider_id_idx',
'resource_provider_id'),
Index('inventories_resource_provider_resource_class_idx',
'resource_provider_id', 'resource_class_id'),
Index('inventories_resource_class_id_idx',
'resource_class_id'),
UniqueConstraint('resource_provider_id', 'resource_class_id',
name='uniq_inventories0resource_provider_resource_class'),
mysql_engine='InnoDB',
mysql_charset='latin1'
)
allocations = Table(
'allocations', meta,
Column('created_at', DateTime),
Column('updated_at', DateTime),
Column('id', Integer, primary_key=True, nullable=False),
Column('resource_provider_id', Integer, nullable=False),
Column('consumer_id', String(36), nullable=False),
Column('resource_class_id', Integer, nullable=False),
Column('used', Integer, nullable=False),
Index('allocations_resource_provider_class_used_idx',
'resource_provider_id', 'resource_class_id',
'used'),
Index('allocations_resource_class_id_idx',
'resource_class_id'),
Index('allocations_consumer_id_idx', 'consumer_id'),
mysql_engine='InnoDB',
mysql_charset='latin1'
)<|fim▁hole|> Column('updated_at', DateTime),
Column('resource_provider_id', Integer, primary_key=True,
nullable=False),
Column('aggregate_id', Integer, primary_key=True, nullable=False),
Index('resource_provider_aggregates_aggregate_id_idx',
'aggregate_id'),
mysql_engine='InnoDB',
mysql_charset='latin1'
)
for table in [resource_providers, inventories, allocations,
resource_provider_aggregates]:
table.create(checkfirst=True)<|fim▁end|> |
resource_provider_aggregates = Table(
'resource_provider_aggregates', meta,
Column('created_at', DateTime), |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2011 Gilt Groupe, INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The idrac6 module contains all the functionality that mothership will need
to interact with the Dell DRACs using pexpect and some IPMI commands
"""
import os
import sys
import re
import pexpect
import subprocess
def check_known_hosts(host, user='~'):
# check for ssh host key
hasKey = False
userknownhosts = os.path.expanduser('%s/.ssh/known_hosts' % user)
for file in [ '/etc/ssh/ssh_known_hosts', userknownhosts ]:
for line in open(file):
if host in line:
hasKey = True
break
if not hasKey:
print '+=== Adding %s to known_hosts' % host
key = subprocess.Popen(['ssh-keyscan', host],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).\
communicate()[0]
f = open(userknownhosts, 'a')
f.write(key)
def query_idrac(cfg, host):
check_known_hosts(host)
child = pexpect.spawn('ssh %s@%s' % (cfg.puser, host))
child.expect('assword:')
child.sendline(cfg.ppass)
child.expect('/admin1->')
child.sendline('racadm getsysinfo')
child.expect('/admin1->')
data = child.before
child.sendline('exit')
child.expect('CLP Session terminated')
return data
def sysinfo(info):
"""
Parses information returned from drac getsysinfo command
and returns it as a dictionary
"""
out={}
parsers = [
r'(Firmware Version\s+=\s(?P<firmware>[.\d]+))',
r'(System BIOS Version\s+=\s(?P<bios>[.\d]+))',
r'(System Model\s+=\s(?P<model>[ \w]+))',
r'(Service Tag\s+=\s(?P<hw_tag>\w+))',
r'(MAC Address\s+=\s(?P<drac>[a-f:0-9]+))',
r'(?P<hwname>NIC\d+)\sEthernet\s+=\s(?P<hwaddr>[a-f:0-9]+)',
]
for line in info.split('\n'):
for pattern in parsers:
match = re.search(pattern, line)
if match:
if 'hwname' in match.groupdict().keys():
num = match.group('hwname').replace('NIC','')
out['eth%d' % (int(num)-1)] = match.group('hwaddr')
else:
for key in match.groupdict().keys():
out[key] = match.group(key)
if key == 'model' and re.match('PowerEdge', match.group(key)):
out['manufacturer'] = 'Dell'
return out
def prep_idrac(cfg, host, debug=False, basics=True, ipmi=False, serial=False, telnet=None, gold=False):
check_known_hosts(host)
try:
check_known_hosts(host, '~root')
except IOError, e:
print '%s\nDRAC prep must be run as root/sudo' % e
sys.exit(1)
configured = False
pubkeys = []
for p in cfg.dkeys:
f = open('%s/.ssh/id_dsa.pub' % os.path.expanduser('~%s' % p))
pubkeys.append(f.read().rstrip())
f.close()
adm_pre = 'racadm config -g cfgUserAdmin -i'
adm_obj = [
'-o cfgUserAdminUserName %s' % cfg.puser,
'-o cfgUserAdminPassword %s' % cfg.ppass,
'-o cfgUserAdminPrivilege 0x000001ff',
'-o cfgUserAdminIpmiLanPrivilege 4',
'-o cfgUserAdminIpmiSerialPrivilege 4',
'-o cfgUserAdminSolEnable 1',
'-o cfgUserAdminEnable 1' ]
# login as default root
print '+=== Prepping DRAC for %s' % host
child = pexpect.spawn('ssh %s@%s' % (cfg.duser,host))
child.expect('assword:')
child.sendline(cfg.ddell)
ans = child.expect([ 'Permission denied', '/admin1->', pexpect.TIMEOUT ])
if ans == 0:
# login as power user
print '+- Default root denied, attempting %s alternate' % cfg.puser
child = pexpect.spawn('ssh %s@%s' % (cfg.puser,host))
child.expect('assword:')
child.sendline(cfg.ppass)
newans = child.expect([ 'Permission denied', '/admin1->'])
if newans == 0:
print '+- Alternate %s failed, exiting' % cfg.puser
sys.exit(2)
userdata = 'default root disabled'
configured = True
elif ans == 1:
# configure new admin user
print '+- Adding DRAC user: %s' % cfg.puser
for c in adm_obj:
child.sendline('%s 3 %s' % (adm_pre,c))
child.expect('/admin1->')
child.sendline('racadm getconfig -u %s' % cfg.puser)
child.expect('/admin1->')
userdata = '\n'.join(child.before.split('\n')[1:])
if debug: print userdata
elif ans == 2:
# timeout
print '+- Default root login timed out, unknown error'
sys.exit(2)
if basics or ipmi: # enable IPMI
print '+- Enabling IPMI'
child.sendline('racadm config -g cfgIpmiLan -o cfgIpmiLanEnable 1')
child.expect('/admin1->')
child.sendline('racadm getconfig -g cfgIpmiLan')
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
if basics or serial: # enable SerialConsole
print '+- Enabling SerialConsole'
child.sendline('racadm config -g cfgSerial -o cfgSerialConsoleEnable 1')
child.expect('/admin1->')
child.sendline('racadm getconfig -g cfgSerial')
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
if telnet is not None: # enable Telnet
print '+- Enabling/Disabling Telnet'
child.sendline('racadm config -g cfgSerial -o cfgSerialTelnetEnable %d' % telnet)
child.expect('/admin1->')
child.sendline('racadm getconfig -g cfgSerial')
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
if basics or gold: # gold = trusted user
# configure new trusted user
adm_obj[0] = '-o cfgUserAdminUserName %s' % cfg.dgold
adm_obj[1] = '-o cfgUserAdminPassword %s' % cfg.dpass
print '+- Adding trusted DRAC user: %s' % cfg.dgold
for c in adm_obj:
child.sendline('%s 4 %s' % (adm_pre,c))
child.expect('/admin1->')
child.sendline('racadm getconfig -u %s' % cfg.dgold)
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
# add keys to trusted user
print '+- Adding keys for trusted user'
for k in pubkeys:
child.sendline('racadm sshpkauth -i 4 -k %d -t "%s"' % (pubkeys.index(k)+1, k))
child.expect('/admin1->')
child.sendline('racadm sshpkauth -v -i 4 -k all')
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
# alter password for root user
if cfg.puser in userdata:
print '+- Changing password for: %s' % cfg.duser
child.sendline('%s 2 -o cfgUserAdminPassword %s' % (adm_pre,cfg.dpass))
child.expect('/admin1->')
if debug: print '\n'.join(child.before.split('\n')[1:])
else:
print '+- Skipping password change for: %s' % cfg.duser
if not configured: print ' because %s was not successfully created' % cfg.puser<|fim▁hole|> child.expect('CLP Session terminated')
if basics or ipmi: # enable IPMI, continued
# settings new admin user privileges with IPMI (apparently racadm was not enough)
print '+- Updating IPMI privileges for non-root users'
os.system('/usr/bin/ipmitool -H %s -U root -P %s user priv 3 4' % (host, cfg.dpass))
os.system('/usr/bin/ipmitool -H %s -U root -P %s user priv 4 4' % (host, cfg.dpass))
if debug: os.system('/usr/bin/ipmitool -H %s -U root -P %s user list' % (host, cfg.dpass))<|fim▁end|> |
# leaving drac
print '+- Exiting DRAC'
child.sendline('exit') |
<|file_name|>2_5_fluid_resistance.rs<|end_file_name|><|fim▁begin|>// The Nature of Code
// Daniel Shiffman
// http://natureofcode.com
//
// Example 2-5: Forces (Gravity and Fluid Resistence) with Vectors
//
// Demonstration of multiple forces acting on bodies (Mover type)
// Bodies experience gravity continuously
// Bodies experience fluid resistance when in *water*
use nannou::prelude::*;
fn main() {
nannou::app(model).update(update).run();
}
struct Model {
movers: Vec<Mover>,
liquid: Liquid,
}
struct Mover {
position: Point2,
velocity: Vector2,
acceleration: Vector2,
mass: f32,
}
// Liquid type
struct Liquid {
// Liquid is a rectangle
rect: Rect,
// Coefficient of drag
c: f32,
}
impl Liquid {
fn new(rect: Rect, c: f32) -> Self {
let rect = rect;
let c = c;
Liquid { rect, c }
}
// Is the Mover in the liquid?
fn contains(&self, m: &Mover) -> bool {
self.rect.contains(m.position)
}
// Calculate drag force
fn drag(&self, m: &Mover) -> Vector2 {
// Magnitude is coefficient * speed squared
let speed = m.velocity.magnitude();
let drag_magnitude = self.c * speed * speed;
// Direction is inverse of velocity
let mut drag_force = m.velocity;
drag_force *= -1.0;
// Scale according to magnitude
drag_force = drag_force.normalize();
drag_force *= drag_magnitude;
drag_force
}
fn display(&self, draw: &Draw) {
draw.rect().xy(self.rect.xy()).wh(self.rect.wh()).gray(0.1);
}
}
impl Mover {
fn new(m: f32, x: f32, y: f32) -> Self {
// Mass is tied to size<|fim▁hole|> let acceleration = vec2(0.0, 0.0);
Mover {
position,
velocity,
acceleration,
mass,
}
}
fn new_random(rect: &Rect) -> Self {
Mover::new(
random_range(0.5f32, 4.0),
random_range(rect.left(), rect.right()),
rect.top(),
)
}
// Newton's 2nd law: F = M * A
// or A = F / M
fn apply_force(&mut self, force: Vector2) {
// Divide by mass
let f = force / self.mass;
// Accumulate all forces in acceleration
self.acceleration += f;
}
fn update(&mut self) {
// Velocity changes according to acceleration
self.velocity += self.acceleration;
// Position changes by velocity
self.position += self.velocity;
// We must clear acceleration each frame
self.acceleration *= 0.0;
}
// Draw Mover
fn display(&self, draw: &Draw) {
draw.ellipse()
.xy(self.position)
.w_h(self.mass * 16.0, self.mass * 16.0)
.rgba(0.0, 0.0, 0.0, 0.5)
.stroke(BLACK)
.stroke_weight(2.0);
}
// Bounce off bottom of window
fn check_edges(&mut self, rect: Rect) {
if self.position.y < rect.bottom() {
self.velocity.y *= -0.9; // A little dampening when hitting the bottom
self.position.y = rect.bottom();
}
}
}
fn model(app: &App) -> Model {
let rect = Rect::from_w_h(640.0, 360.0);
app.new_window()
.size(rect.w() as u32, rect.h() as u32)
.mouse_pressed(mouse_pressed)
.view(view)
.build()
.unwrap();
// Nine moving bodies
let movers = (0..9)
.map(|_| Mover::new_random(&app.window_rect()))
.collect();
// Create an instance of our Liquid type
let rect = Rect::from_w_h(rect.w(), rect.h() * 0.5).align_bottom_of(rect);
let liquid = Liquid::new(rect, 0.1);
Model { movers, liquid }
}
fn mouse_pressed(app: &App, m: &mut Model, _button: MouseButton) {
// Restart all the Mover objects randomly
for mover in &mut m.movers {
*mover = Mover::new_random(&app.window_rect());
}
}
fn update(app: &App, m: &mut Model, _update: Update) {
for i in 0..m.movers.len() {
// Is the Mover in the liquid?
if m.liquid.contains(&m.movers[i]) {
let drag_force = m.liquid.drag(&m.movers[i]);
// Apply drag force to Mover
m.movers[i].apply_force(drag_force);
}
// Gravity is scaled by mass here!
let gravity = vec2(0.0, -0.1 * m.movers[i].mass);
// Apply gravity
m.movers[i].apply_force(gravity);
m.movers[i].update();
m.movers[i].check_edges(app.window_rect());
}
}
fn view(app: &App, m: &Model, frame: Frame) {
// Begin drawing
let draw = app.draw();
draw.background().color(WHITE);
// Draw water
m.liquid.display(&draw);
// Draw movers
for mover in &m.movers {
mover.display(&draw);
}
// Write the result of our drawing to the window's frame.
draw.to_frame(app, &frame).unwrap();
}<|fim▁end|> | let mass = m;
let position = pt2(x, y);
let velocity = vec2(0.0, 0.0); |
<|file_name|>EventAwareResponseHandler.java<|end_file_name|><|fim▁begin|>package ems.server.protocol;
import ems.server.domain.Device;
import ems.server.domain.EventSeverity;
import ems.server.domain.EventType;
import ems.server.utils.EventHelper;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;<|fim▁hole|>
/**
* EventAwareResponseHandler
* Created by thebaz on 9/15/14.
*/
public class EventAwareResponseHandler implements ResponseHandler {
private final DateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'");
private final Device device;
public EventAwareResponseHandler(Device device) {
this.device = device;
format.setTimeZone(TimeZone.getTimeZone("UTC"));
}
@Override
public void onTimeout(String variable) {
EventHelper.getInstance().addEvent(device, EventType.EVENT_NETWORK, EventSeverity.EVENT_WARN);
}
@Override
public void onSuccess(String variable) {
//do nothing
}
@Override
public void onError(String variable, int errorCode, String errorDescription) {
EventSeverity eventSeverity = EventSeverity.EVENT_ERROR;
EventType eventType = EventType.EVENT_PROTOCOL;
String description = "Event of type: \'" + eventType + "\' at: " +
format.format(new Date(System.currentTimeMillis())) + " with severity: \'" +
eventSeverity + "\' for device: " + device.getName() + ". Error code:" +
errorCode + ", Error description: " + errorDescription;
EventHelper.getInstance().addEvent(device, eventType, eventSeverity, description);
}
}<|fim▁end|> | import java.util.TimeZone; |
<|file_name|>mouseevent.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::MouseEventBinding;
use dom::bindings::codegen::Bindings::MouseEventBinding::MouseEventMethods;
use dom::bindings::codegen::Bindings::UIEventBinding::UIEventMethods;
use dom::bindings::codegen::InheritTypes::{EventCast, MouseEventDerived, UIEventCast};
use dom::bindings::error::Fallible;
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, MutNullableHeap, Root, RootedReference};
use dom::bindings::utils::reflect_dom_object;
use dom::event::{Event, EventBubbles, EventCancelable, EventTypeId};
use dom::eventtarget::EventTarget;
use dom::uievent::{UIEvent, UIEventTypeId};
use dom::window::Window;
use std::cell::Cell;
use std::default::Default;
use util::prefs;
use util::str::DOMString;
#[dom_struct]
pub struct MouseEvent {
uievent: UIEvent,
screen_x: Cell<i32>,
screen_y: Cell<i32>,
client_x: Cell<i32>,
client_y: Cell<i32>,
ctrl_key: Cell<bool>,
shift_key: Cell<bool>,
alt_key: Cell<bool>,
meta_key: Cell<bool>,
button: Cell<i16>,
related_target: MutNullableHeap<JS<EventTarget>>,
}
impl MouseEventDerived for Event {
fn is_mouseevent(&self) -> bool {
*self.type_id() == EventTypeId::UIEvent(UIEventTypeId::MouseEvent)
}
}
impl MouseEvent {
fn new_inherited() -> MouseEvent {
MouseEvent {
uievent: UIEvent::new_inherited(UIEventTypeId::MouseEvent),
screen_x: Cell::new(0),
screen_y: Cell::new(0),
client_x: Cell::new(0),
client_y: Cell::new(0),
ctrl_key: Cell::new(false),
shift_key: Cell::new(false),
alt_key: Cell::new(false),
meta_key: Cell::new(false),
button: Cell::new(0),
related_target: Default::default(),
}
}
pub fn new_uninitialized(window: &Window) -> Root<MouseEvent> {
reflect_dom_object(box MouseEvent::new_inherited(),
GlobalRef::Window(window),
MouseEventBinding::Wrap)
}
pub fn new(window: &Window,
type_: DOMString,
canBubble: EventBubbles,
cancelable: EventCancelable,
view: Option<&Window>,
detail: i32,
screenX: i32,
screenY: i32,
clientX: i32,
clientY: i32,
ctrlKey: bool,
altKey: bool,
shiftKey: bool,
metaKey: bool,
button: i16,
relatedTarget: Option<&EventTarget>) -> Root<MouseEvent> {
let ev = MouseEvent::new_uninitialized(window);
ev.r().InitMouseEvent(type_, canBubble == EventBubbles::Bubbles, cancelable == EventCancelable::Cancelable,
view, detail,
screenX, screenY, clientX, clientY,
ctrlKey, altKey, shiftKey, metaKey,
button, relatedTarget);
ev
}
pub fn Constructor(global: GlobalRef,
type_: DOMString,
init: &MouseEventBinding::MouseEventInit) -> Fallible<Root<MouseEvent>> {
let bubbles = if init.parent.parent.parent.bubbles {
EventBubbles::Bubbles
} else {
EventBubbles::DoesNotBubble
};
let cancelable = if init.parent.parent.parent.cancelable {
EventCancelable::Cancelable
} else {
EventCancelable::NotCancelable
};
let event = MouseEvent::new(global.as_window(), type_,
bubbles,
cancelable,
init.parent.parent.view.r(),
init.parent.parent.detail,
init.screenX, init.screenY,
init.clientX, init.clientY, init.parent.ctrlKey,
init.parent.altKey, init.parent.shiftKey, init.parent.metaKey,
init.button, init.relatedTarget.r());
Ok(event)
}
}
impl MouseEventMethods for MouseEvent {
// https://w3c.github.io/uievents/#widl-MouseEvent-screenX
fn ScreenX(&self) -> i32 {
self.screen_x.get()
}
// https://w3c.github.io/uievents/#widl-MouseEvent-screenY
fn ScreenY(&self) -> i32 {
self.screen_y.get()
}
// https://w3c.github.io/uievents/#widl-MouseEvent-clientX
fn ClientX(&self) -> i32 {
self.client_x.get()
}
// https://w3c.github.io/uievents/#widl-MouseEvent-clientY
fn ClientY(&self) -> i32 {
self.client_y.get()
}
<|fim▁hole|> // https://w3c.github.io/uievents/#widl-MouseEvent-ctrlKey
fn CtrlKey(&self) -> bool {
self.ctrl_key.get()
}
// https://w3c.github.io/uievents/#widl-MouseEvent-shiftKey
fn ShiftKey(&self) -> bool {
self.shift_key.get()
}
// https://w3c.github.io/uievents/#widl-MouseEvent-altKey
fn AltKey(&self) -> bool {
self.alt_key.get()
}
// https://w3c.github.io/uievents/#widl-MouseEvent-metaKey
fn MetaKey(&self) -> bool {
self.meta_key.get()
}
// https://w3c.github.io/uievents/#widl-MouseEvent-button
fn Button(&self) -> i16 {
self.button.get()
}
// https://w3c.github.io/uievents/#widl-MouseEvent-relatedTarget
fn GetRelatedTarget(&self) -> Option<Root<EventTarget>> {
self.related_target.get().map(Root::from_rooted)
}
// See discussion at:
// - https://github.com/servo/servo/issues/6643
// - https://bugzilla.mozilla.org/show_bug.cgi?id=1186125
// This returns the same result as current gecko.
// https://developer.mozilla.org/en-US/docs/Web/API/MouseEvent/which
fn Which(&self) -> i32 {
if prefs::get_pref("dom.mouseevent.which.enabled").as_boolean().unwrap_or(false) {
(self.button.get() + 1) as i32
} else {
0
}
}
// https://w3c.github.io/uievents/#widl-MouseEvent-initMouseEvent
fn InitMouseEvent(&self,
typeArg: DOMString,
canBubbleArg: bool,
cancelableArg: bool,
viewArg: Option<&Window>,
detailArg: i32,
screenXArg: i32,
screenYArg: i32,
clientXArg: i32,
clientYArg: i32,
ctrlKeyArg: bool,
altKeyArg: bool,
shiftKeyArg: bool,
metaKeyArg: bool,
buttonArg: i16,
relatedTargetArg: Option<&EventTarget>) {
let event: &Event = EventCast::from_ref(self);
if event.dispatching() {
return;
}
let uievent: &UIEvent = UIEventCast::from_ref(self);
uievent.InitUIEvent(typeArg, canBubbleArg, cancelableArg, viewArg, detailArg);
self.screen_x.set(screenXArg);
self.screen_y.set(screenYArg);
self.client_x.set(clientXArg);
self.client_y.set(clientYArg);
self.ctrl_key.set(ctrlKeyArg);
self.alt_key.set(altKeyArg);
self.shift_key.set(shiftKeyArg);
self.meta_key.set(metaKeyArg);
self.button.set(buttonArg);
self.related_target.set(relatedTargetArg.map(JS::from_ref));
}
}<|fim▁end|> | |
<|file_name|>cli.rs<|end_file_name|><|fim▁begin|>use clap::Clap;
#[derive(Clap, Debug)]
#[clap(
name = "Zombenum",
about = "A game about zombies and stuff",
author,
version
)]
pub struct CliArguments {<|fim▁hole|>}<|fim▁end|> | /// Verbose mode (-v, -vv, -vvv)
#[clap(short, long, parse(from_occurrences))]
pub verbose: u8, |
<|file_name|>whale.go<|end_file_name|><|fim▁begin|>package whale
import (
"fmt"
"strconv"
"strings"
"time"
"log"
)
import (
"open-tritium/dependencies/go-cache"
"open-tritium/dependencies/go-cache/arc"
"github.com/moovweb/gokogiri/xpath"
"github.com/moovweb/rubex"
"open-tritium/dependencies/steno"
"open-tritium"
"open-tritium/constants"
"open-tritium/protoface"
)
type Whale struct {
Debugger steno.Debugger
RegexpCache cache.Cache
XPathCache cache.Cache
}
type EngineContext struct {
Functions []*Function
Types []string
Exports [][]string
Logs []string
ImportedFiles []string
Env map[string]string
MatchStack []string
MatchShouldContinueStack []bool
Yields []*YieldBlock
*Whale
protoface.Transform
Rrules []protoface.RewriteRule
InnerReplacer *rubex.Regexp
HeaderContentType *rubex.Regexp
// Debug info
Filename string
HadError bool
Deadline time.Time
Mobjects []MemoryObject
MessagePath string
Customer string
Project string
InDebug bool
CurrentDoc interface{}
Warnings int
Prod bool
HtmlParsed bool
ActiveLayers map[string]bool
ActiveLayersString string
}
const OutputBufferSize = 500 * 1024 //500KB
const defaultMobjects = 4
const TimeoutError = "EngineTimeout"
var TritiumLogRewritersAsImports = false
func NewEngine(debugger steno.Debugger) *Whale {
e := &Whale{
Debugger: debugger,
RegexpCache: arc.NewARCache(1000),
XPathCache: arc.NewARCache(1000),
}
e.RegexpCache.SetCleanFunc(CleanRegexpObject)
e.XPathCache.SetCleanFunc(CleanXpathExpObject)
return e
}
func NewEngineCtx(eng *Whale, vars map[string]string, transform protoface.Transform, rrules []protoface.RewriteRule, deadline time.Time, messagePath, customer, project string, activeLayers []string, inDebug bool) (ctx *EngineContext) {
ctx = &EngineContext{
Whale: eng,
Exports: make([][]string, 0),
Logs: make([]string, 0),
Env: vars,
Transform: transform,
Rrules: rrules,
MatchStack: make([]string, 0),
MatchShouldContinueStack: make([]bool, 0),
Yields: make([]*YieldBlock, 0),
HadError: false,
Deadline: deadline,
Mobjects: make([]MemoryObject, 0, defaultMobjects),
MessagePath: messagePath,
Customer: customer,
Project: project,
InDebug: inDebug,
}
ctx.ActiveLayers = make(map[string]bool)
for _, name := range activeLayers {
ctx.ActiveLayers[name] = true
}
ctx.ActiveLayersString = strings.Join(activeLayers, ",")
return
}
func (eng *Whale) Free() {
eng.RegexpCache.Reset()
eng.XPathCache.Reset()
}
func (eng *Whale) Run(transform protoface.Transform, rrules []protoface.RewriteRule, input interface{}, vars map[string]string, deadline time.Time, customer, project, messagePath string, activeLayers []string, inDebug bool) (exhaust *tritium.Exhaust) {
ctx := NewEngineCtx(eng, vars, transform, rrules, deadline, messagePath, customer, project, activeLayers, inDebug)
exhaust = &tritium.Exhaust{}
defer ctx.Free()
ctx.Yields = append(ctx.Yields, &YieldBlock{Vars: make(map[string]interface{})})
ctx.UsePackage(transform.IGetPkg())
scope := &Scope{Value: input.(string)}
obj := transform.IGetNthObject(0)
ctx.Filename = obj.IGetName()
if TritiumLogRewritersAsImports {
ctx.Whale.Debugger.LogImport(ctx.MessagePath, ctx.Filename, ctx.Filename, int(obj.IGetRoot().IGetLineNumber()))
}
ctx.RunInstruction(scope, obj.IGetRoot())
exhaust.Output = scope.Value.(string)
exhaust.Exports = ctx.Exports
exhaust.Logs = ctx.Logs
exhaust.HtmlParsed = ctx.HtmlParsed
return
}
func (eng *Whale) GetCacheStats() (int, int, int, int) {
return eng.RegexpCache.GetHitRate(), eng.RegexpCache.GetUsageRate(), eng.XPathCache.GetHitRate(), eng.XPathCache.GetUsageRate()
}
func (ctx *EngineContext) Free() {
for _, o := range ctx.Mobjects {
if o != nil {
o.Free()
}
}
}
func (ctx *EngineContext) RunInstruction(scope *Scope, ins protoface.Instruction) (returnValue interface{}) {
thisFile := ctx.Filename
defer func() {
//TODO Stack traces seem to get truncated on syslog...
if x := recover(); x != nil {
err, ok := x.(error)
errString := ""
if ok {
errString = err.Error()
} else {
errString = x.(string)
}
if errString != TimeoutError {
if ctx.HadError == false {
ctx.HadError = true
errString = errString + "\n" + constants.Instruction_InstructionType_name[ins.IGetType()] + " " + ins.IGetValue() + "\n\n\nTritium Stack\n=========\n\n"
}
// errString = errString + ctx.FileAndLine(ins) + "\n"
if len(thisFile) > 0 && thisFile != "__rewriter__" {
switch ins.IGetType() {
case constants.Instruction_IMPORT:
errString = errString + fmt.Sprintf("%s:%d", thisFile, ins.IGetLineNumber())
errString = errString + fmt.Sprintf(":\t@import %s\n", ctx.Transform.IGetNthObject(int(ins.IGetObjectId())).IGetName())
case constants.Instruction_FUNCTION_CALL:
errString = errString + fmt.Sprintf("%s:%d", thisFile, ins.IGetLineNumber())
if callee := ins.IGetValue(); len(callee) > 0 {
errString = errString + fmt.Sprintf(":\t%s\n", callee)
}
default:
// do nothing
}
}
}
log.Fatal(errString)
// panic(errString)
}
}()
ctx.Whale.Debugger.TrapInstruction(ctx.MessagePath, ctx.Filename, ctx.Env, ins, scope.Value, scope.Index, ctx.CurrentDoc)
if time.Now().After(ctx.Deadline) && !ctx.InDebug {
log.Fatal(TimeoutError)
// panic(TimeoutError)
}
indent := ""
for i := 0; i < len(ctx.Yields); i++ {
indent += "\t"
}
returnValue = ""
switch ins.IGetType() {
case constants.Instruction_BLOCK:
for i := 0; i < ins.INumChildren(); i++ {
child := ins.IGetNthChild(i)
returnValue = ctx.RunInstruction(scope, child)
}
case constants.Instruction_TEXT:
returnValue = ins.IGetValue()
case constants.Instruction_LOCAL_VAR:
name := ins.IGetValue()
vars := ctx.Vars()
if ins.INumArgs() > 0 {
vars[name] = ctx.RunInstruction(scope, ins.IGetNthArgument(0))
}
if ins.INumChildren() > 0 {
ts := &Scope{Value: ctx.Vars()[name]}
for i := 0; i < ins.INumChildren(); i++ {
child := ins.IGetNthChild(i)
ctx.RunInstruction(ts, child)
}
vars[name] = ts.Value<|fim▁hole|> returnValue = vars[name]
case constants.Instruction_IMPORT:
obj := ctx.IGetNthObject(int(ins.IGetObjectId()))
curFile := ctx.Filename
ctx.Filename = obj.IGetName()
ctx.Whale.Debugger.LogImport(ctx.MessagePath, ctx.Filename, curFile, int(ins.IGetLineNumber()))
root := obj.IGetRoot()
for i := 0; i < root.INumChildren(); i++ {
child := root.IGetNthChild(i)
ctx.RunInstruction(scope, child)
}
ctx.Whale.Debugger.LogImportDone(ctx.MessagePath, ctx.Filename, curFile, int(ins.IGetLineNumber()))
ctx.Filename = curFile
case constants.Instruction_FUNCTION_CALL:
fun := ctx.Functions[int(ins.IGetFunctionId())]
args := make([]interface{}, ins.INumArgs())
for i := 0; i < len(args); i++ {
argIns := ins.IGetNthArgument(i)
args[i] = ctx.RunInstruction(scope, argIns)
}
if fun.IGetBuiltIn() {
curFile := ctx.Filename
if f := builtInFunctions[fun.Name]; f != nil {
returnValue = f(ctx, scope, ins, args)
if returnValue == nil {
returnValue = ""
}
} else {
log.Fatal("missing function: " + fun.IGetName())
// panic("missing function: " + fun.IGetName())
}
ctx.Filename = curFile
} else {
// We are using a user-defined function
//println("Resetting localvar")
// Setup the new local var
vars := make(map[string]interface{}, len(args))
for i := 0; i < fun.INumArgs(); i++ {
arg := fun.IGetNthArg(i)
vars[arg.IGetName()] = args[i]
}
yieldBlock := &YieldBlock{
Ins: ins,
Vars: vars,
Filename: ctx.Filename,
}
// PUSH!
ctx.PushYieldBlock(yieldBlock)
curFile := ctx.Filename
ctx.Filename = fun.IGetFilename()
// if it's a user-called function, save the curfile:linenumber
// Are we going to need a stack here? --A.L.
if !ctx.Debugger.IsProd() && ins.IGetIsUserCalled() == true {
ctx.Env[isUserCalledEnvKey] = fmt.Sprintf("%s:%d", curFile, ins.IGetLineNumber())
}
for i := 0; i < fun.IGetInstruction().INumChildren(); i++ {
child := fun.IGetInstruction().IGetNthChild(i)
returnValue = ctx.RunInstruction(scope, child)
}
if ins.IGetIsUserCalled() == true {
delete(ctx.Env, isUserCalledEnvKey)
}
ctx.Filename = curFile
// POP!
ctx.PopYieldBlock()
}
}
return
}
func (ctx *EngineContext) ShouldContinue() (result bool) {
if len(ctx.MatchShouldContinueStack) > 0 {
result = ctx.MatchShouldContinueStack[len(ctx.MatchShouldContinueStack)-1]
} else {
result = false
}
return
}
func (ctx *EngineContext) MatchTarget() string {
return ctx.MatchStack[len(ctx.MatchStack)-1]
}
func (ctx *EngineContext) PushYieldBlock(b *YieldBlock) {
ctx.Yields = append(ctx.Yields, b)
}
func (ctx *EngineContext) PopYieldBlock() (b *YieldBlock) {
num := len(ctx.Yields)
if num > 0 {
b = ctx.Yields[num-1]
ctx.Yields = ctx.Yields[:num-1]
}
return
}
func (ctx *EngineContext) HasYieldBlock() bool {
return len(ctx.Yields) > 0
}
func (ctx *EngineContext) TopYieldBlock() (b *YieldBlock) {
num := len(ctx.Yields)
if num > 0 {
b = ctx.Yields[num-1]
}
return
}
func (ctx *EngineContext) Vars() map[string]interface{} {
b := ctx.TopYieldBlock()
if b != nil {
return b.Vars
}
return nil
}
func (ctx *EngineContext) FileAndLine(ins protoface.Instruction) string {
lineNum := strconv.Itoa(int(ins.IGetLineNumber()))
return (ctx.Filename + ":" + lineNum)
}
func (ctx *EngineContext) UsePackage(pkg protoface.Package) {
pkgTypes := pkg.IGetTypes()
ctx.Types = make([]string, len(pkgTypes))
for i, t := range pkgTypes {
ctx.Types[i] = t.IGetName()
}
pkgFunctions := pkg.IGetFunctions()
ctx.Functions = make([]*Function, len(pkgFunctions))
for i, f := range pkgFunctions {
ns := f.IGetNamespace()
if len(ns) == 0 {
ns = "tritium"
}
name := ns + "." + f.IGetName()
// for _, a := range f.Args {
// typeString := ctx.Types[int(null.GetInt32(a.TypeId))]
// name = name + "." + typeString
// }
for i := 0; i < f.INumArgs(); i++ {
a := f.IGetNthArg(i)
typeString := ctx.Types[int(a.IGetTypeId())]
name = name + "." + typeString
}
fun := &Function{
Name: name,
Function: f,
}
ctx.Functions[i] = fun
}
}
func (ctx *EngineContext) GetRegexp(pattern, options string) (r *rubex.Regexp) {
sig := pattern + "/" + options
object, err := ctx.RegexpCache.Get(sig)
if err != nil {
mode := rubex.ONIG_OPTION_DEFAULT
if strings.Index(options, "i") >= 0 {
mode = rubex.ONIG_OPTION_IGNORECASE
}
if strings.Index(options, "m") >= 0 {
mode = rubex.ONIG_OPTION_MULTILINE
}
var err error
r, err = rubex.NewRegexp(pattern, mode)
if err == nil {
//ctx.AddMemoryObject(r)
ctx.RegexpCache.Set(sig, &RegexpObject{Regexp: r})
} else {
log.Fatal(fmt.Sprintf("%s: /%s/%s", err.Error(), pattern, options))
// panic(fmt.Sprintf("%s: /%s/%s", err.Error(), pattern, options))
}
return r
}
return object.(*RegexpObject).Regexp
}
func (ctx *EngineContext) GetXpathExpr(p string) (e *xpath.Expression) {
object, err := ctx.XPathCache.Get(p)
if err != nil {
e = xpath.Compile(p)
if e != nil {
//ctx.AddMemoryObject(e)
ctx.XPathCache.Set(p, &XpathExpObject{Expression: e})
} else {
ctx.Debugger.LogTritiumErrorMessage(ctx.Customer, ctx.Project, ctx.Env, ctx.MessagePath, "Invalid XPath used: "+p)
}
return e
}
return object.(*XpathExpObject).Expression
}
func (ctx *EngineContext) AddExport(exports []string) {
ctx.Exports = append(ctx.Exports, exports)
}
func (ctx *EngineContext) AddLog(log string) int {
//ctx.Log.Info("TRITIUM: " + log)
index := len(ctx.Logs)
ctx.Logs = append(ctx.Logs, log)
return index
}
func (ctx *EngineContext) UpdateLog(index int, log string) {
//ctx.Log.Info("TRITIUM: " + log)
if index >= 0 && index < len(ctx.Logs) {
ctx.Logs[index] = log
}
}
func (ctx *EngineContext) SetEnv(key, val string) {
ctx.Env[key] = val
}
func (ctx *EngineContext) GetEnv(key string) (val string) {
val = ctx.Env[key]
return
}
func (ctx *EngineContext) SetVar(key string, val interface{}) {
b := ctx.TopYieldBlock()
if b != nil {
b.Vars[key] = val
}
}
func (ctx *EngineContext) GetVar(key string) (val interface{}) {
b := ctx.TopYieldBlock()
if b != nil {
val = b.Vars[key]
}
return
}
func (ctx *EngineContext) GetInnerReplacer() (r *rubex.Regexp) {
return ctx.GetRegexp(`[\\$](\d)`, "i")
}
func (ctx *EngineContext) GetHeaderContentTypeRegex() (r *rubex.Regexp) {
return ctx.GetRegexp(`<meta\s+http-equiv="content-type"\s+content="(.*?)"`, "i")
}
func (ctx *EngineContext) GetOutputBuffer() (b []byte) {
//b = ctx.OutputBuffer
return
}
func (ctx *EngineContext) PushMatchStack(match string) {
ctx.MatchStack = append(ctx.MatchStack, match)
}
func (ctx *EngineContext) PopMatchStack() (match string) {
if num := len(ctx.MatchStack); num > 0 {
match = ctx.MatchStack[num-1]
ctx.MatchStack = ctx.MatchStack[:num-1]
}
return
}
func (ctx *EngineContext) PushShouldContinueStack(cont bool) {
ctx.MatchShouldContinueStack = append(ctx.MatchShouldContinueStack, cont)
}
func (ctx *EngineContext) PopShouldContinueStack() (cont bool) {
if num := len(ctx.MatchShouldContinueStack); num > 0 {
cont = ctx.MatchShouldContinueStack[num-1]
ctx.MatchShouldContinueStack = ctx.MatchShouldContinueStack[:num-1]
}
return
}
func (ctx *EngineContext) SetShouldContinue(cont bool) {
if num := len(ctx.MatchShouldContinueStack); num > 0 {
ctx.MatchShouldContinueStack[num-1] = cont
}
}
func (ctx *EngineContext) AddMemoryObject(o MemoryObject) {
ctx.Mobjects = append(ctx.Mobjects, o)
}<|fim▁end|> | } |
<|file_name|>config.rs<|end_file_name|><|fim▁begin|>use errors::*;
use modules::{centerdevice, pocket, slack};
use std::fs::File;
use std::io::Read;
use std::path::Path;
use toml;
#[derive(Debug, Deserialize)]
#[serde(tag = "format")]
#[derive(PartialOrd, PartialEq, Eq)]
#[derive(Clone, Copy)]<|fim▁hole|> JSON,
HUMAN,
}
impl<'a> From<&'a str> for OutputFormat {
fn from(format: &'a str) -> Self {
let format_sane: &str = &format.to_string().to_uppercase();
match format_sane {
"JSON" => OutputFormat::JSON,
_ => OutputFormat::HUMAN
}
}
}
#[derive(Debug, Deserialize)]
#[serde(tag = "verbosity")]
#[derive(PartialOrd, PartialEq, Eq)]
#[derive(Clone, Copy)]
pub enum Verbosity {
VERBOSE = 1,
NORMAL = 2,
QUIET = 3,
}
#[derive(Debug, Deserialize)]
pub struct GeneralConfig {
pub cache_dir: String,
pub output_format: OutputFormat,
pub verbosity: Verbosity,
}
#[derive(Debug, Deserialize)]
pub struct Config {
pub general: GeneralConfig,
pub centerdevice: centerdevice::CenterDeviceConfig,
pub pocket: pocket::PocketConfig,
pub slack: slack::SlackConfig,
}
impl Config {
pub fn from_file(file_path: &Path) -> Result<Config> {
let mut config_file = File::open(file_path).chain_err(|| "Could not open config file.")?;
let mut config_content = String::new();
config_file.read_to_string(&mut config_content).chain_err(|| "Could not read config file.")?;
let config: Config = toml::from_str(&config_content).chain_err(|| "Could not parse config file.")?;
Ok(config)
}
}<|fim▁end|> | pub enum OutputFormat { |
<|file_name|>linktree-section.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import clsx from 'clsx';
import { LinktreeCard } from '.';
import { LinktreeItem } from '~/lib/data/linktree';
export interface LinktreeSectionProps {
className?: string;
style?: React.CSSProperties;
category: string;
items: LinktreeItem[];
}
export function LinktreeSection({ className, style, category, items }: LinktreeSectionProps) {
return (
<div className={clsx('space-y-9', className)} style={style}>
<h2 className="text-2xl sm:text-3xl lg:text-4xl font-semibold">{category}</h2>
<ul className="space-y-4 list-none">
{items.map(item => (
<LinktreeCard key={item.url} item={item} />
))}
</ul><|fim▁hole|><|fim▁end|> | </div>
);
} |
<|file_name|>app_session.py<|end_file_name|><|fim▁begin|>import re
from bs4 import BeautifulSoup
from nose.tools import assert_equal, assert_in, assert_true
import pages
import requests
APP_URL = 'https://www.github.com'
ADMIN_CREDENTIALS = {'username': '[email protected]', 'password': 'pk$321'}
ROOT_CREDENTIALS = {'username': 'root', 'password': '123456'}
API_URLS_MAP = {
'login': '/api/login',
'logout': '/api/logout'
}
_admin_session = None
def get_requests_app_cookies(credentials):
s = _get_logged_session(credentials)
return s.cookies
def get_url(url_path, app_url=APP_URL):
return ''.join([app_url, url_path])
def _get_logged_session(credentials):
url = get_url(API_URLS_MAP['login'])<|fim▁hole|> }
r = s.post(url, data=payload, verify=False)
assert_equal(r.status_code, 200)
assert_true(r.json()['data']['isAuthenticated'])
return s
def get_csrf_token(response, on_form=False):
response_content = response.text
csrf_pattern = re.compile('<meta name="csrf-token" content="(.*?)">')
if on_form:
csrf_pattern = re.compile("<input type='hidden' name='csrfmiddlewaretoken' value='(.*?)'")
return csrf_pattern.findall(response_content)[0]
def _get_data_key(source_name, payload, response):
name_key_source_map = {
'page': '[name]',
'partner': '[name]',
'product': '[title]',
}
key_part = name_key_source_map[source_name]
name = payload[[k for k in payload.keys() if key_part in k][0]]
soup = BeautifulSoup(response.text)
trs = soup.findAll(lambda tag: tag.name == 'tr' and 'data-key' in tag.attrs)
tr = [tr for tr in trs if name in tr.text][0]
return tr['data-key']<|fim▁end|> | s = requests.Session()
payload = {
'email': credentials['username'],
'password': credentials['password'] |
<|file_name|>demo.html.0.js<|end_file_name|><|fim▁begin|>Polymer('selection-example', {
itemTapAction: function(e, detail, sender) {
this.$.selection.select(e.target);
},
selectAction: function(e, detail, sender) {
detail.item.classList.toggle('selected', detail.isSelected);<|fim▁hole|><|fim▁end|> | }
}); |
<|file_name|>base_module.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
""" MultiQC modules base class, contains helper functions """
from __future__ import print_function
from collections import OrderedDict
import io
import json
import mimetypes
import os
import random
import logging
from multiqc import config
logger = logging.getLogger(__name__)
letters = 'abcdefghijklmnopqrstuvwxyz'
class BaseMultiqcModule(object):
def __init__(self, name='base', anchor='base', target='',href='', info='', extra=''):
self.name = name
self.anchor = anchor
if not target:
target = self.name
self.intro = '<p><a href="{0}" target="_blank">{1}</a> {2}</p>{3}'.format(
href, target, info, extra
)
def find_log_files(self, fn_match=None, contents_match=None, filehandles=False):
"""
Search the analysis directory for log files of interest. Can take either a filename
suffix or a search string to return only log files that contain relevant info.
:param fn_match: Optional string or list of strings. Filename suffixes to search for.
:param contents_match: Optional string or list of strings to look for in file.
NB: Both searches return file if *any* of the supplied strings are matched.
:param filehandles: Set to true to return a file handle instead of slurped file contents
:return: Yields a set with two items - a sample name generated from the filename
and either the file contents or file handle for the current matched file.
As yield is used, the function can be iterated over without
"""
for root, dirnames, filenames in os.walk(config.analysis_dir, followlinks=True):
for fn in filenames:
# Make a sample name from the filename
s_name = self.clean_s_name(fn, root)
# Make search strings into lists if a string is given
if type(fn_match) is str:
fn_match = [fn_match]
if type(contents_match) is str:
contents_match = [contents_match]
# Search for file names ending in a certain string
readfile = False
if fn_match is not None:
for m in fn_match:
if m in fn:
readfile = True
break
else:
readfile = True
# Limit search to files under 1MB to avoid 30GB FastQ files etc.
try:
filesize = os.path.getsize(os.path.join(root,fn))
except (IOError, OSError, ValueError, UnicodeDecodeError):
log.debug("Couldn't read file when looking for output: {}".format(fn))
readfile = False
else:
if filesize > 1000000:
readfile = False
# Use mimetypes to exclude binary files where possible
(ftype, encoding) = mimetypes.guess_type(os.path.join(root, fn))
if encoding is not None:
readfile = False # eg. gzipped files
if ftype is not None and ftype.startswith('text') is False:
readfile = False # eg. images - 'image/jpeg'
if readfile:
try:
with io.open (os.path.join(root,fn), "r", encoding='utf-8') as f:
# Search this file for our string of interest
returnfile = False
if contents_match is not None:
for line in f:
for m in contents_match:
if m in line:
returnfile = True
break
f.seek(0)
else:
returnfile = True
# Give back what was asked for. Yield instead of return
# so that this function can be used as an interator
# without loading all files at once.
if returnfile:
if filehandles:
yield {'s_name': s_name, 'f': f, 'root': root, 'fn': fn}
else:
yield {'s_name': s_name, 'f': f.read(), 'root': root, 'fn': fn}
except (IOError, OSError, ValueError, UnicodeDecodeError):
logger.debug("Couldn't read file when looking for output: {}".format(fn))<|fim▁hole|> back to a clean sample name. Somewhat arbitrary.
:param s_name: The sample name to clean
:param root: The directory path that this file is within
:param prepend_dirs: boolean, whether to prepend dir name to s_name
:param trimmed: boolean, remove common trimming suffixes from name?
:return: The cleaned sample name, ready to be used
"""
# Split then take first section to remove everything after these matches
for ext in config.fn_clean_exts:
s_name = s_name.split(ext ,1)[0]
if config.prepend_dirs:
s_name = "{} | {}".format(root.replace(os.sep, ' | '), s_name).lstrip('. | ')
return s_name
def plot_xy_data(self, data, config={}, original_plots=[]):
""" Plot a line graph with X,Y data. See CONTRIBUTING.md for
further instructions on use.
:param data: 2D dict, first keys as sample names, then x:y data pairs
:param original_plots: optional list of dicts with keys 's_name' and 'img_path'
:param config: optional dict with config key:value pairs. See CONTRIBUTING.md
:param original_plots: optional list specifying original plot images. Each dict
should have a key 's_name' and 'img_path'
:return: HTML and JS, ready to be inserted into the page
"""
# Given one dataset - turn it into a list
if type(data) is not list:
data = [data]
# Generate the data dict structure expected by HighCharts series
plotdata = list()
for d in data:
thisplotdata = list()
for s in sorted(d.keys()):
pairs = list()
maxval = 0
for k in sorted(d[s].keys()):
pairs.append([k, d[s][k]])
maxval = max(maxval, d[s][k])
if maxval > 0 or config.get('hide_empty') is not True:
this_series = { 'name': s, 'data': pairs }
try:
this_series['color'] = config['colors'][s]
except: pass
thisplotdata.append(this_series)
plotdata.append(thisplotdata)
# Build the HTML for the page
if config.get('id') is None:
config['id'] = 'mqc_hcplot_'+''.join(random.sample(letters, 10))
html = ''
# Buttons to cycle through different datasets
if len(plotdata) > 1:
html += '<div class="btn-group switch_group">\n'
for k, p in enumerate(plotdata):
active = 'active' if k == 0 else ''
try: name = config['data_labels'][k]['name']
except: name = k+1
try: ylab = 'data-ylab="{}"'.format(config['data_labels'][k]['ylab'])
except: ylab = 'data-ylab="{}"'.format(name) if name != k+1 else ''
html += '<button class="btn btn-default btn-sm {a}" data-action="set_data" {y} data-newdata="{id}_datasets[{k}]" data-target="#{id}">{n}</button>\n'.format(a=active, id=config['id'], n=name, y=ylab, k=k)
html += '</div>\n\n'
# Markup needed if we have the option of clicking through to original plot images
if len(original_plots) > 0:
config['tt_label'] = 'Click to show original plot.<br>{}'.format(config.get('tt_label', '{point.x}: {point.y:.2f}'))
if len(original_plots) > 1:
next_prev_buttons = '<div class="clearfix"><div class="btn-group btn-group-sm"> \n\
<a href="#{prev}" class="btn btn-default original_plot_prev_btn" data-target="#{id}">« Previous</a> \n\
<a href="#{next}" class="btn btn-default original_plot_nxt_btn" data-target="#{id}">Next »</a> \n\
</div></div>'.format(id=config['id'], prev=original_plots[-1]['s_name'], next=original_plots[1]['s_name'])
else:
next_prev_buttons = ''
html += '<p class="text-muted instr">Click to show original FastQC plot.</p>\n\
<div id="{id}_wrapper" class="hc-plot-wrapper"> \n\
<div class="showhide_orig" style="display:none;"> \n\
<h4><span class="s_name">{n}</span></h4> \n\
{b} <img data-toggle="tooltip" title="Click to return to overlay plot" class="original-plot" src="{f}"> \n\
</div>\n\
<div id="{id}" class="hc-plot"></div> \n\
</div>'.format(id=config['id'], b=next_prev_buttons, n=original_plots[0]['s_name'], f=original_plots[0]['img_path'])
orig_plots = 'var {id}_orig_plots = {d}; \n'.format(id=config['id'], d=json.dumps(original_plots))
config['orig_click_func'] = True # Javascript prints the click function
# Regular plots (no original images)
else:
html += '<div id="{id}" class="hc-plot hc-line-plot"></div> \n'.format(id=config['id'])
orig_plots = ''
# Javascript with data dump
html += '<script type="text/javascript"> \n\
var {id}_datasets = {d}; \n\
{o} \
$(function () {{ plot_xy_line_graph("#{id}", {id}_datasets[0], {c}); }}); \n\
</script>'.format(id=config['id'], d=json.dumps(plotdata), c=json.dumps(config), o=orig_plots);
return html
def plot_bargraph (self, data, cats=None, config={}):
""" Plot a horizontal bar graph. Expects a 2D dict of sample
data. Also can take info about categories. There are quite a
few variants of how to use this function, see CONTRIBUTING.md
for documentation and examples.
:param data: 2D dict, first keys as sample names, then x:y data pairs
Can supply a list of dicts and will have buttons to switch
:param cats: optional list, dict or OrderedDict with plot categories
:param config: optional dict with config key:value pairs
:return: HTML and JS, ready to be inserted into the page
"""
# Given one dataset - turn it into a list
if type(data) is not list:
data = [data]
# Check we have a list of cats
if type(cats) is not list or type(cats[0]) is str:
cats = [cats]
# Check that we have cats at all - find them from the data
for idx, cat in enumerate(cats):
if cats[idx] is None:
cats[idx] = list(set(k for s in data[idx].keys() for k in data[idx][s].keys() ))
# Given a list of cats - turn it into a dict
for idx, cat in enumerate(cats):
if type(cat) is list:
newcats = OrderedDict()
for c in cat:
newcats[c] = {'name': c}
cats[idx] = newcats
# Parse the data into a HighCharts friendly format
plotsamples = list()
plotdata = list()
for idx, d in enumerate(data):
hc_samples = sorted(list(d.keys()))
hc_data = list()
for c in cats[idx].keys():
thisdata = list()
for s in hc_samples:
thisdata.append(d[s][c])
if max(thisdata) > 0:
thisdict = { 'name': cats[idx][c]['name'], 'data': thisdata }
if 'color' in cats[idx][c]:
thisdict['color'] = cats[idx][c]['color']
hc_data.append(thisdict)
plotsamples.append(hc_samples)
plotdata.append(hc_data)
# Build the HTML
if config.get('id') is None:
config['id'] = 'mqc_hcplot_'+''.join(random.sample(letters, 10))
html = ''
# Counts / Percentages Switch
if config.get('cpswitch') is not False:
if config.get('cpswitch_c_active', True) is True:
c_active = 'active'
p_active = ''
else:
c_active = ''
p_active = 'active'
config['stacking'] = 'percent'
c_label = config.get('cpswitch_counts_label', 'Counts')
p_label = config.get('cpswitch_percent_label', 'Percentages')
html += '<div class="btn-group switch_group"> \n\
<button class="btn btn-default btn-sm {c_a}" data-action="set_numbers" data-target="#{id}">{c_l}</button> \n\
<button class="btn btn-default btn-sm {p_a}" data-action="set_percent" data-target="#{id}">{p_l}</button> \n\
</div> '.format(id=config['id'], c_a=c_active, p_a=p_active, c_l=c_label, p_l=p_label)
if len(plotdata) > 1:
html += ' '
# Buttons to cycle through different datasets
if len(plotdata) > 1:
html += '<div class="btn-group switch_group">\n'
for k, p in enumerate(plotdata):
active = 'active' if k == 0 else ''
try: name = config['data_labels'][k]
except: name = k+1
try: ylab = 'data-ylab="{}"'.format(config['data_labels'][k]['ylab'])
except: ylab = 'data-ylab="{}"'.format(name) if name != k+1 else ''
html += '<button class="btn btn-default btn-sm {a}" data-action="set_data" {y} data-newdata="{id}_datasets[{k}]" data-target="#{id}">{n}</button>\n'.format(a=active, id=config['id'], n=name, y=ylab, k=k)
html += '</div>\n\n'
# Plot and javascript function
html += '<div id="{id}" class="hc-plot hc-bar-plot"></div> \n\
<script type="text/javascript"> \n\
var {id}_samples = {s}; \n\
var {id}_datasets = {d}; \n\
$(function () {{ plot_stacked_bar_graph("#{id}", {id}_samples[0], {id}_datasets[0], {c}); }}); \
</script>'.format(id=config['id'], s=json.dumps(plotsamples), d=json.dumps(plotdata), c=json.dumps(config));
return html
def write_csv_file(self, data, fn):
with io.open (os.path.join(config.output_dir, 'report_data', fn), "w", encoding='utf-8') as f:
print( self.dict_to_csv( data ), file=f)
def dict_to_csv (self, d, delim="\t"):
""" Converts a dict to a CSV string
:param d: 2D dictionary, first keys sample names and second key
column headers
:param delim: optional delimiter character. Default: \t
:return: Flattened string, suitable to write to a CSV file.
"""
h = None # We make a list of keys to ensure consistent order
l = list()
for sn in sorted(d.keys()):
if h is None:
h = list(d[sn].keys())
l.append(delim.join([''] + h))
thesefields = [sn] + [ str(d[sn].get(k, '')) for k in h ]
l.append( delim.join( thesefields ) )
return ('\n'.join(l)).encode('utf-8', 'ignore').decode('utf-8')<|fim▁end|> |
def clean_s_name(self, s_name, root):
""" Helper function to take a long file name and strip it |
<|file_name|>expr-overflow-delimited.rs<|end_file_name|><|fim▁begin|>// rustfmt-overflow_delimited_expr: true
fn combine_blocklike() {
do_thing(|param| {
action();
foo(param)
});
do_thing(x, |param| {
action();
foo(param)
});
do_thing(
x,
// I'll be discussing the `action` with your para(m)legal counsel
|param| {
action();
foo(param)
},
);
do_thing(Bar {
x: value,
y: value2,
});
do_thing(x, Bar {
x: value,
y: value2,
});
do_thing(
x,
// Let me tell you about that one time at the `Bar`
Bar {
x: value,
y: value2,
},
);
do_thing(&[
value_with_longer_name,
value2_with_longer_name,
value3_with_longer_name,
value4_with_longer_name,
]);
do_thing(x, &[
value_with_longer_name,
value2_with_longer_name,
value3_with_longer_name,
value4_with_longer_name,
]);
do_thing(
x,
// Just admit it; my list is longer than can be folded on to one line
&[
value_with_longer_name,
value2_with_longer_name,
value3_with_longer_name,
value4_with_longer_name,
],<|fim▁hole|> );
do_thing(vec![
value_with_longer_name,
value2_with_longer_name,
value3_with_longer_name,
value4_with_longer_name,
]);
do_thing(x, vec![
value_with_longer_name,
value2_with_longer_name,
value3_with_longer_name,
value4_with_longer_name,
]);
do_thing(
x,
// Just admit it; my list is longer than can be folded on to one line
vec![
value_with_longer_name,
value2_with_longer_name,
value3_with_longer_name,
value4_with_longer_name,
],
);
do_thing(
x,
(1, 2, 3, |param| {
action();
foo(param)
}),
);
}
fn combine_struct_sample() {
let identity = verify(&ctx, VerifyLogin {
type_: LoginType::Username,
username: args.username.clone(),
password: Some(args.password.clone()),
domain: None,
})?;
}
fn combine_macro_sample() {
rocket::ignite()
.mount("/", routes![
http::auth::login,
http::auth::logout,
http::cors::options,
http::action::dance,
http::action::sleep,
])
.launch();
}<|fim▁end|> | |
<|file_name|>elasticsearch.go<|end_file_name|><|fim▁begin|>package analytics
import (
"fmt"
elastic "gopkg.in/olivere/elastic.v3"
)
//Elasticsearch stores configuration related to the AWS elastic cache isntance.
type Elasticsearch struct {
URL string
IndexName string
DocType string
client *elastic.Client
}
//Initialize initializes the analytics engine.
func (e *Elasticsearch) Initialize() error {
client, err := elastic.NewSimpleClient(elastic.SetURL(e.URL))
if err != nil {
return err
}
e.client = client
s := e.client.IndexExists(e.IndexName)
exists, err := s.Do()
if err != nil {
return err
}
if !exists {
s := e.client.CreateIndex(e.IndexName)
_, err := s.Do()
if err != nil {
return err
}
}<|fim▁hole|>
return nil
}
//SendAnalytics is used to send the data to the analytics engine.
func (e *Elasticsearch) SendAnalytics(data string) error {
fmt.Println(data)
_, err := e.client.Index().Index(e.IndexName).Type(e.DocType).BodyJson(data).Do()
if err != nil {
return err
}
return nil
}<|fim▁end|> | |
<|file_name|>RvHookupRounded.js<|end_file_name|><|fim▁begin|>"use strict";
<|fim▁hole|>});
exports.default = void 0;
var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon"));
var _jsxRuntime = require("react/jsx-runtime");
var _default = (0, _createSvgIcon.default)( /*#__PURE__*/(0, _jsxRuntime.jsx)("path", {
d: "M21 17h-1v-6c0-1.1-.9-2-2-2H7v-.74c0-.46-.56-.7-.89-.37L4.37 9.63c-.2.2-.2.53 0 .74l1.74 1.74c.33.33.89.1.89-.37V11h4v3H5c-.55 0-1 .45-1 1v2c0 1.1.9 2 2 2h2c0 1.66 1.34 3 3 3s3-1.34 3-3h7c.55 0 1-.45 1-1s-.45-1-1-1zm-10 3c-.55 0-1-.45-1-1s.45-1 1-1 1 .45 1 1-.45 1-1 1zm7-6h-4v-3h3c.55 0 1 .45 1 1v2zm-8-8h7v.74c0 .46.56.7.89.37l1.74-1.74c.2-.2.2-.53 0-.74l-1.74-1.74c-.33-.33-.89-.1-.89.37V4h-7c-.55 0-1 .45-1 1s.45 1 1 1z"
}), 'RvHookupRounded');
exports.default = _default;<|fim▁end|> | var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
Object.defineProperty(exports, "__esModule", {
value: true |
<|file_name|>mysql.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Script pour telecharger City
import MySQLdb
file_ = open('city.csv', 'w')
file_.write ('city_id,city,country_id\n')
db = MySQLdb.connect( user='etudiants',
passwd='etudiants_1',
host='192.168.99.100',
db='sakila')
cur = db.cursor()
cur.execute("SELECT * FROM city")
for row in cur.fetchall():
file_.write(str(row[0])+','+ row[1]+','+ str(row[2])+'\n')
db.close()<|fim▁hole|><|fim▁end|> | file_.close() |
<|file_name|>common.rs<|end_file_name|><|fim▁begin|>//! Set of common types used through the app
use async_std::sync::Arc;
use std::fmt;
use std::slice::Iter;
use std::time::Instant;
use unicode_segmentation::UnicodeSegmentation;
pub type Result<T> = std::result::Result<T, Box<dyn std::error::Error + Send + Sync>>;
/// The Prompt represents the current query, the cursor position in that query and when it was
/// updated.
///
/// When the query in the prompt changes the timestamp is updated to reflect that is a fresh query.
/// This is then used to print to the UI only latest changes.
#[derive(Debug, Clone)]
pub struct Prompt {
query: Vec<char>,
cursor: usize,
timestamp: Instant,
}
impl Prompt {
pub fn add(&mut self, ch: char) {
self.query.insert(self.cursor, ch);
self.cursor += 1;
self.refresh();
}
pub fn backspace(&mut self) -> bool {
if self.cursor > 0 {
self.cursor -= 1;
self.query.remove(self.cursor);
self.refresh();
return true;
}
false
}
pub fn clear(&mut self) {
self.query.clear();
self.cursor = 0;
self.refresh();
}
pub fn left(&mut self) {
if self.cursor > 0 {
self.cursor -= 1;
}
}
pub fn right(&mut self) {
if self.cursor < self.len() {
self.cursor += 1;
}
}
pub fn cursor_at_end(&mut self) {
self.cursor = self.len();
}
pub fn cursor_at_start(&mut self) {
self.cursor = 0;
}
pub fn cursor_until_end(&self) -> usize {
if self.len() < self.cursor {
0
} else {
self.len() - self.cursor
}
}
pub fn as_string(&self) -> String {
self.query.iter().collect()
}
pub fn timestamp(&self) -> Instant {
self.timestamp
}<|fim▁hole|> self.query.len()
}
pub fn is_empty(&self) -> bool {
self.query.is_empty()
}
pub fn refresh(&mut self) {
self.timestamp = Instant::now();
}
}
impl From<&String> for Prompt {
fn from(string: &String) -> Self {
let query = string.chars().collect::<Vec<char>>();
let cursor = query.len();
Self {
query,
cursor,
..Default::default()
}
}
}
impl Default for Prompt {
fn default() -> Self {
Self {
timestamp: Instant::now(),
cursor: 0,
query: vec![],
}
}
}
/// The Arc version of Letters
pub type Text = Arc<Letters>;
/// Text type builder
#[derive(Debug, Clone)]
pub struct TextBuilder;
impl TextBuilder {
pub fn build(string: &str) -> Text {
let text: Letters = string.into();
Arc::new(text)
}
}
/// The collection of letters (Graphemes) of a string.
///
/// These letters are the core part of the fuzzy matching algorithm.
///
/// This type is not used directly but through the Text type,
/// which is an Arc wrapper around this type. We use Arc to reduce
/// the String allocations between tasks as much as possible.
#[derive(Debug, Clone)]
pub struct Letters {
string: String,
graphemes: Vec<String>,
graphemes_lw: Vec<String>,
}
impl Letters {
pub fn new(string: String) -> Self {
let graphemes = string.graphemes(true).map(String::from).collect::<Vec<_>>();
let graphemes_lw = graphemes
.iter()
.map(|s| s.to_lowercase())
.collect::<Vec<_>>();
Self {
string,
graphemes,
graphemes_lw,
}
}
pub fn len(&self) -> usize {
self.graphemes.len()
}
pub fn last_index(&self) -> usize {
let len = self.len();
if len == 0 {
0
} else {
len - 1
}
}
pub fn grapheme_at(&self, index: usize) -> &'_ str {
&self.graphemes[index]
}
pub fn lowercase_grapheme_at(&self, index: usize) -> &'_ str {
&self.graphemes_lw[index]
}
pub fn iter(&self) -> Iter<'_, String> {
self.graphemes.iter()
}
pub fn lowercase_iter(&self) -> Iter<'_, String> {
self.graphemes_lw.iter()
}
pub fn is_empty(&self) -> bool {
self.string.is_empty()
}
}
impl From<&str> for Letters {
fn from(string: &str) -> Self {
Self::new(String::from(string))
}
}
impl From<String> for Letters {
fn from(string: String) -> Self {
Self::new(string)
}
}
impl fmt::Display for Letters {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.string)
}
}<|fim▁end|> |
pub fn len(&self) -> usize { |
<|file_name|>naming-conventions.ts<|end_file_name|><|fim▁begin|>import pluralize = require("pluralize");
/**<|fim▁hole|> * conventions that are recommended for the json-api `type` key, and returns
* a singularized, PascalCase version of that name, per the naming conventions
* of Mongoose models.
*
* @param {string} typeName The json-api type string.
* @param {(string) => string} singularizer A function that singularizes its argument.
*/
export function getModelName(
typeName: string,
singularizer: typeof pluralize.singular = pluralize.singular.bind(pluralize)
) {
const words = typeName.split("-");
words[words.length - 1] = singularizer(words[words.length - 1]);
return words.map((it) => it.charAt(0).toUpperCase() + it.slice(1)).join("");
}
/**
* Takes a mongoose-style model name and returns the corresponding name according
* to the dasherized, pluralized conventions used by the json-api `type` key.
*
* @param {string} modelName The name of a model, in Mongoose's PascalCase format.
* @param {(string) => string} pluralizer A function that pluralizes its argument.
*/
export function getTypeName(
modelName: string,
pluralizer: typeof pluralize.plural = pluralize.plural.bind(pluralize)
) {
return pluralizer(
modelName
.replace(/([A-Z])/g, "-$1")
.slice(1)
.toLowerCase()
);
}<|fim▁end|> | * Takes a json-api type name, assumed to be using the dasherized, pluralized |
<|file_name|>makeConfig_spec.js<|end_file_name|><|fim▁begin|>const assert = require('assert');
const makeConfig = require('../../../core/util/makeConfig');
describe('make config', function () {
it('should pass the filter arg correctly', function () {
const actualConfig = makeConfig('init', { filter: true });
assert.strictEqual(actualConfig.args.filter, true);
});
it('should work without an option param', function () {<|fim▁hole|> const actualConfig = makeConfig('init');
assert.deepStrictEqual(actualConfig.args, {});
});
});<|fim▁end|> | |
<|file_name|>test_tokenization_rag.py<|end_file_name|><|fim▁begin|># Copyright 2020 The HuggingFace Team. All rights reserved.<|fim▁hole|>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
import shutil
import tempfile
from unittest import TestCase
from transformers import BartTokenizer, BartTokenizerFast, DPRQuestionEncoderTokenizer, DPRQuestionEncoderTokenizerFast
from transformers.file_utils import is_datasets_available, is_faiss_available, is_torch_available
from transformers.models.bart.configuration_bart import BartConfig
from transformers.models.bert.tokenization_bert import VOCAB_FILES_NAMES as DPR_VOCAB_FILES_NAMES
from transformers.models.dpr.configuration_dpr import DPRConfig
from transformers.models.roberta.tokenization_roberta import VOCAB_FILES_NAMES as BART_VOCAB_FILES_NAMES
from transformers.testing_utils import require_faiss, require_tokenizers, require_torch, slow
if is_torch_available() and is_datasets_available() and is_faiss_available():
from transformers.models.rag.configuration_rag import RagConfig
from transformers.models.rag.tokenization_rag import RagTokenizer
@require_faiss
@require_torch
class RagTokenizerTest(TestCase):
def setUp(self):
self.tmpdirname = tempfile.mkdtemp()
self.retrieval_vector_size = 8
# DPR tok
vocab_tokens = [
"[UNK]",
"[CLS]",
"[SEP]",
"[PAD]",
"[MASK]",
"want",
"##want",
"##ed",
"wa",
"un",
"runn",
"##ing",
",",
"low",
"lowest",
]
dpr_tokenizer_path = os.path.join(self.tmpdirname, "dpr_tokenizer")
os.makedirs(dpr_tokenizer_path, exist_ok=True)
self.vocab_file = os.path.join(dpr_tokenizer_path, DPR_VOCAB_FILES_NAMES["vocab_file"])
with open(self.vocab_file, "w", encoding="utf-8") as vocab_writer:
vocab_writer.write("".join([x + "\n" for x in vocab_tokens]))
# BART tok
vocab = [
"l",
"o",
"w",
"e",
"r",
"s",
"t",
"i",
"d",
"n",
"\u0120",
"\u0120l",
"\u0120n",
"\u0120lo",
"\u0120low",
"er",
"\u0120lowest",
"\u0120newer",
"\u0120wider",
"<unk>",
]
vocab_tokens = dict(zip(vocab, range(len(vocab))))
merges = ["#version: 0.2", "\u0120 l", "\u0120l o", "\u0120lo w", "e r", ""]
self.special_tokens_map = {"unk_token": "<unk>"}
bart_tokenizer_path = os.path.join(self.tmpdirname, "bart_tokenizer")
os.makedirs(bart_tokenizer_path, exist_ok=True)
self.vocab_file = os.path.join(bart_tokenizer_path, BART_VOCAB_FILES_NAMES["vocab_file"])
self.merges_file = os.path.join(bart_tokenizer_path, BART_VOCAB_FILES_NAMES["merges_file"])
with open(self.vocab_file, "w", encoding="utf-8") as fp:
fp.write(json.dumps(vocab_tokens) + "\n")
with open(self.merges_file, "w", encoding="utf-8") as fp:
fp.write("\n".join(merges))
def get_dpr_tokenizer(self) -> DPRQuestionEncoderTokenizer:
return DPRQuestionEncoderTokenizer.from_pretrained(os.path.join(self.tmpdirname, "dpr_tokenizer"))
def get_bart_tokenizer(self) -> BartTokenizer:
return BartTokenizer.from_pretrained(os.path.join(self.tmpdirname, "bart_tokenizer"))
def tearDown(self):
shutil.rmtree(self.tmpdirname)
@require_tokenizers
def test_save_load_pretrained_with_saved_config(self):
save_dir = os.path.join(self.tmpdirname, "rag_tokenizer")
rag_config = RagConfig(question_encoder=DPRConfig().to_dict(), generator=BartConfig().to_dict())
rag_tokenizer = RagTokenizer(question_encoder=self.get_dpr_tokenizer(), generator=self.get_bart_tokenizer())
rag_config.save_pretrained(save_dir)
rag_tokenizer.save_pretrained(save_dir)
new_rag_tokenizer = RagTokenizer.from_pretrained(save_dir, config=rag_config)
self.assertIsInstance(new_rag_tokenizer.question_encoder, DPRQuestionEncoderTokenizerFast)
self.assertEqual(new_rag_tokenizer.question_encoder.get_vocab(), rag_tokenizer.question_encoder.get_vocab())
self.assertIsInstance(new_rag_tokenizer.generator, BartTokenizerFast)
self.assertEqual(new_rag_tokenizer.generator.get_vocab(), rag_tokenizer.generator.get_vocab())
@slow
def test_pretrained_token_nq_tokenizer(self):
tokenizer = RagTokenizer.from_pretrained("facebook/rag-token-nq")
input_strings = [
"who got the first nobel prize in physics",
"when is the next deadpool movie being released",
"which mode is used for short wave broadcast service",
"who is the owner of reading football club",
"when is the next scandal episode coming out",
"when is the last time the philadelphia won the superbowl",
"what is the most current adobe flash player version",
"how many episodes are there in dragon ball z",
"what is the first step in the evolution of the eye",
"where is gall bladder situated in human body",
"what is the main mineral in lithium batteries",
"who is the president of usa right now",
"where do the greasers live in the outsiders",
"panda is a national animal of which country",
"what is the name of manchester united stadium",
]
input_dict = tokenizer(input_strings)
self.assertIsNotNone(input_dict)
@slow
def test_pretrained_sequence_nq_tokenizer(self):
tokenizer = RagTokenizer.from_pretrained("facebook/rag-sequence-nq")
input_strings = [
"who got the first nobel prize in physics",
"when is the next deadpool movie being released",
"which mode is used for short wave broadcast service",
"who is the owner of reading football club",
"when is the next scandal episode coming out",
"when is the last time the philadelphia won the superbowl",
"what is the most current adobe flash player version",
"how many episodes are there in dragon ball z",
"what is the first step in the evolution of the eye",
"where is gall bladder situated in human body",
"what is the main mineral in lithium batteries",
"who is the president of usa right now",
"where do the greasers live in the outsiders",
"panda is a national animal of which country",
"what is the name of manchester united stadium",
]
input_dict = tokenizer(input_strings)
self.assertIsNotNone(input_dict)<|fim▁end|> | |
<|file_name|>edit.py<|end_file_name|><|fim▁begin|>"""Edit the RWhois data on the account."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
import click
@click.command()
@click.option('--abuse', help='Set the abuse email address')
@click.option('--address1', help='Update the address 1 field')
@click.option('--address2', help='Update the address 2 field')
@click.option('--city', help='Set the city name')
@click.option('--company', help='Set the company name')
@click.option('--country', help='Set the two-letter country code')
@click.option('--firstname', help='Update the first name field')
@click.option('--lastname', help='Update the last name field')
@click.option('--postal', help='Set the postal code field')
@click.option('--public/--private',
default=None,
help='Flags the address as a public or private residence.')
@click.option('--state', help='Set the two-letter state code')
@environment.pass_env
def cli(env, abuse, address1, address2, city, company, country, firstname,
lastname, postal, public, state):
"""Edit the RWhois data on the account."""
mgr = SoftLayer.NetworkManager(env.client)
update = {
'abuse_email': abuse,
'address1': address1,
'address2': address2,
'company_name': company,
'city': city,
'country': country,
'first_name': firstname,
'last_name': lastname,
'postal_code': postal,<|fim▁hole|> }
if public is True:
update['private_residence'] = False
elif public is False:
update['private_residence'] = True
check = [x for x in update.values() if x is not None]
if not check:
raise exceptions.CLIAbort(
"You must specify at least one field to update.")
mgr.edit_rwhois(**update)<|fim▁end|> | 'state': state,
'private_residence': public, |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use std::env;
use std::fs;
use std::process;
// ANCHOR: here
fn main() {
// --snip--
// ANCHOR_END: here
let args: Vec<String> = env::args().collect();
<|fim▁hole|> process::exit(1);
});
// ANCHOR: here
println!("Searching for {}", config.query);
println!("In file {}", config.filename);
run(config);
}
fn run(config: Config) {
let contents = fs::read_to_string(config.filename)
.expect("Something went wrong reading the file");
println!("With text:\n{}", contents);
}
// --snip--
// ANCHOR_END: here
struct Config {
query: String,
filename: String,
}
impl Config {
fn new(args: &[String]) -> Result<Config, &'static str> {
if args.len() < 3 {
return Err("not enough arguments");
}
let query = args[1].clone();
let filename = args[2].clone();
Ok(Config { query, filename })
}
}<|fim▁end|> | let config = Config::new(&args).unwrap_or_else(|err| {
println!("Problem parsing arguments: {}", err); |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>pub mod point;
pub mod rect;
pub use self::point::Point;
pub use self::rect::Rect;
pub type Pointi = Point<i32>;
pub type Pointf = Point<f32>;
pub type Recti = Rect<i32>;
pub type Rectf = Rect<f32>;<|fim▁end|> | mod types; |
<|file_name|>DefinitionService.js<|end_file_name|><|fim▁begin|>"use strict";
var SourceLine_1 = require("../source/SourceLine");
var Prop_1 = require("../entities/Prop");
var Method_1 = require("../entities/Method");
var MethodKind_1 = require("../kind/MethodKind");
var ProgramError_1 = require("../errors/ProgramError");
var ClassDefn_1 = require("../define/ClassDefn");
var StructDefn_1 = require("../define/StructDefn");
var CONST_1 = require("../CONST");
var PropQual_1 = require("../entities/PropQual");
var ParamParser_1 = require("../parser/ParamParser");
/**
* Created by Nidin Vinayakan on 4/7/2016.
*/
var DefinitionService = (function () {
function DefinitionService() {
}
/**
* Collect all definitions from the source code
* */
DefinitionService.prototype.collectDefinitions = function (filename, lines) {
var _this = this;
var defs = [];
var turboLines = [];
var i = 0;
var numLines = lines.length;
var line;
while (i < numLines) {
line = lines[i++];
if (!CONST_1.Matcher.START.test(line)) {
turboLines.push(new SourceLine_1.SourceLine(filename, i, line));
continue;
}
var kind = "";
var name_1 = "";
var inherit = "";
var lineNumber = i;
var m = null;
if (m = CONST_1.Matcher.STRUCT.exec(line)) {
kind = "struct";
name_1 = m[1];
}
else if (m = CONST_1.Matcher.CLASS.exec(line)) {
kind = "class";
name_1 = m[1];
inherit = m[2] ? m[2] : "";
}
else {
throw new ProgramError_1.ProgramError(filename, i, "Syntax error: Malformed definition line");
}
var properties = [];
var methods = [];
var in_method = false;
var mbody = null;
var method_type = MethodKind_1.MethodKind.Virtual;
var method_name = "";
var method_line = 0;
var method_signature = null;
// Do not check for duplicate names here since that needs to
// take into account inheritance.
while (i < numLines) {
line = lines[i++];
if (CONST_1.Matcher.END.test(line)) {
break;
}
if (m = CONST_1.Matcher.METHOD.exec(line.trim())) {
if (kind != "class") {
throw new ProgramError_1.ProgramError(filename, i, "@method is only allowed in classes");
}
if (in_method) {
methods.push(new Method_1.Method(method_line, method_type, method_name, method_signature, mbody));
}
in_method = true;
method_line = i;
method_type = (m[1] == "method" ? MethodKind_1.MethodKind.NonVirtual : MethodKind_1.MethodKind.Virtual);
method_name = m[2];
// Parse the signature. Just use the param parser for now,
// but note that what we get back will need postprocessing.
var pp = new ParamParser_1.ParamParser(filename, i, m[3], /* skip left paren */ 1);
var args = pp.allArgs();
args.shift(); // Discard SELF
// Issue #15: In principle there are two signatures here: there is the
// parameter signature, which we should keep intact in the
// virtual, and there is the set of arguments extracted from that,
// including any splat.
method_signature = args.map(function (x) {
return _this.parameterToArgument(filename, i, x);
});
mbody = [m[3]];
}
else if (m = CONST_1.Matcher.SPECIAL.exec(line.trim())) {
if (kind != "struct")
throw new ProgramError_1.ProgramError(filename, i, "@" + m[1] + " is only allowed in structs");
if (in_method)
methods.push(new Method_1.Method(method_line, method_type, method_name, method_signature, mbody));
method_line = i;
in_method = true;
switch (m[1]) {
case "get":
method_type = MethodKind_1.MethodKind.Get;
break;
case "set":
method_type = MethodKind_1.MethodKind.Set;
break;
}
method_name = "";
method_signature = null;
mbody = [m[2]];
}
else if (in_method) {
// TODO: if we're going to be collecting random cruft
// then blank and comment lines at the end of a method
// really should be placed at the beginning of the
// next method. Also see hack in pasteupTypes() that
// removes blank lines from the end of a method body.
mbody.push(line);
}
else if (m = CONST_1.Matcher.PROP.exec(line)) {
var qual = PropQual_1.PropQual.None;
switch (m[3]) {
case "synchronic":
qual = PropQual_1.PropQual.Synchronic;
break;
case "atomic":
qual = PropQual_1.PropQual.Atomic;
break;
}
properties.push(new Prop_1.Prop(i, m[1], qual, m[4] == "Array", m[2]));
}
else if (CONST_1.blank_re.test(line)) {
}
else
throw new ProgramError_1.ProgramError(filename, i, "Syntax error: Not a property or method: " + line);
}
if (in_method)
methods.push(new Method_1.Method(method_line, method_type, method_name, method_signature, mbody));
if (kind == "class")
defs.push(new ClassDefn_1.ClassDefn(filename, lineNumber, name_1, inherit, properties, methods, turboLines.length));
else
defs.push(new StructDefn_1.StructDefn(filename, lineNumber, name_1, properties, methods, turboLines.length));
}
return [defs, turboLines];
};
// The input is Id, Id:Blah, or ...Id. Strip any :Blah annotations.
DefinitionService.prototype.parameterToArgument = function (file, line, s) {
if (/^\s*(?:\.\.\.)[A-Za-z_$][A-Za-z0-9_$]*\s*$/.test(s))
return s;
var m = /^\s*([A-Za-z_\$][A-Za-z0-9_\$]*)\s*:?/.exec(s);
if (!m)
throw new ProgramError_1.ProgramError(file, line, "Unable to understand argument to virtual function: " + s);
return m[1];
};<|fim▁hole|>exports.DefinitionService = DefinitionService;
//# sourceMappingURL=DefinitionService.js.map<|fim▁end|> | return DefinitionService;
}()); |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//! Module containing functionality to compute array equality.
//! This module uses [ArrayData] and does not
//! depend on dynamic casting of `Array`.
use super::{
Array, ArrayData, BinaryOffsetSizeTrait, DecimalArray, FixedSizeBinaryArray,
GenericBinaryArray, GenericListArray, GenericStringArray, OffsetSizeTrait,
PrimitiveArray, StringOffsetSizeTrait, StructArray,
};
use crate::datatypes::{ArrowPrimitiveType, DataType, IntervalUnit};
mod boolean;
mod decimal;
mod dictionary;
mod fixed_binary;
mod fixed_list;
mod list;
mod null;
mod primitive;
mod structure;
mod utils;
mod variable_size;
// these methods assume the same type, len and null count.
// For this reason, they are not exposed and are instead used
// to build the generic functions below (`equal_range` and `equal`).
use boolean::boolean_equal;
use decimal::decimal_equal;
use dictionary::dictionary_equal;
use fixed_binary::fixed_binary_equal;
use fixed_list::fixed_list_equal;
use list::list_equal;
use null::null_equal;
use primitive::primitive_equal;
use structure::struct_equal;
use variable_size::variable_sized_equal;
impl PartialEq for dyn Array {
fn eq(&self, other: &Self) -> bool {
equal(self.data().as_ref(), other.data().as_ref())
}
}
impl<T: Array> PartialEq<T> for dyn Array {
fn eq(&self, other: &T) -> bool {
equal(self.data().as_ref(), other.data().as_ref())
}
}
impl<T: ArrowPrimitiveType> PartialEq for PrimitiveArray<T> {
fn eq(&self, other: &PrimitiveArray<T>) -> bool {
equal(self.data().as_ref(), other.data().as_ref())
}
}
impl<OffsetSize: StringOffsetSizeTrait> PartialEq for GenericStringArray<OffsetSize> {
fn eq(&self, other: &Self) -> bool {
equal(self.data().as_ref(), other.data().as_ref())
}
}
impl<OffsetSize: BinaryOffsetSizeTrait> PartialEq for GenericBinaryArray<OffsetSize> {
fn eq(&self, other: &Self) -> bool {
equal(self.data().as_ref(), other.data().as_ref())
}
}
impl PartialEq for FixedSizeBinaryArray {
fn eq(&self, other: &Self) -> bool {
equal(self.data().as_ref(), other.data().as_ref())
}
}
impl PartialEq for DecimalArray {
fn eq(&self, other: &Self) -> bool {
equal(self.data().as_ref(), other.data().as_ref())
}
}
impl<OffsetSize: OffsetSizeTrait> PartialEq for GenericListArray<OffsetSize> {
fn eq(&self, other: &Self) -> bool {
equal(self.data().as_ref(), other.data().as_ref())
}
}
impl PartialEq for StructArray {
fn eq(&self, other: &Self) -> bool {
equal(self.data().as_ref(), other.data().as_ref())
}
}
/// Compares the values of two [ArrayData] starting at `lhs_start` and `rhs_start` respectively
/// for `len` slots.
#[inline]
fn equal_values(
lhs: &ArrayData,
rhs: &ArrayData,
lhs_start: usize,
rhs_start: usize,
len: usize,
) -> bool {
match lhs.data_type() {
DataType::Null => null_equal(lhs, rhs, lhs_start, rhs_start, len),
DataType::Boolean => boolean_equal(lhs, rhs, lhs_start, rhs_start, len),
DataType::UInt8 => primitive_equal::<u8>(lhs, rhs, lhs_start, rhs_start, len),
DataType::UInt16 => primitive_equal::<u16>(lhs, rhs, lhs_start, rhs_start, len),
DataType::UInt32 => primitive_equal::<u32>(lhs, rhs, lhs_start, rhs_start, len),
DataType::UInt64 => primitive_equal::<u64>(lhs, rhs, lhs_start, rhs_start, len),
DataType::Int8 => primitive_equal::<i8>(lhs, rhs, lhs_start, rhs_start, len),
DataType::Int16 => primitive_equal::<i16>(lhs, rhs, lhs_start, rhs_start, len),
DataType::Int32 => primitive_equal::<i32>(lhs, rhs, lhs_start, rhs_start, len),
DataType::Int64 => primitive_equal::<i64>(lhs, rhs, lhs_start, rhs_start, len),
DataType::Float32 => primitive_equal::<f32>(lhs, rhs, lhs_start, rhs_start, len),
DataType::Float64 => primitive_equal::<f64>(lhs, rhs, lhs_start, rhs_start, len),
DataType::Date32(_)
| DataType::Time32(_)
| DataType::Interval(IntervalUnit::YearMonth) => {
primitive_equal::<i32>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::Date64(_)
| DataType::Interval(IntervalUnit::DayTime)
| DataType::Time64(_)
| DataType::Timestamp(_, _)
| DataType::Duration(_) => {
primitive_equal::<i64>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::Utf8 | DataType::Binary => {
variable_sized_equal::<i32>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::LargeUtf8 | DataType::LargeBinary => {
variable_sized_equal::<i64>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::FixedSizeBinary(_) => {
fixed_binary_equal(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::Decimal(_, _) => decimal_equal(lhs, rhs, lhs_start, rhs_start, len),
DataType::List(_) => list_equal::<i32>(lhs, rhs, lhs_start, rhs_start, len),
DataType::LargeList(_) => list_equal::<i64>(lhs, rhs, lhs_start, rhs_start, len),
DataType::FixedSizeList(_, _) => {
fixed_list_equal(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::Struct(_) => struct_equal(lhs, rhs, lhs_start, rhs_start, len),
DataType::Union(_) => unimplemented!("See ARROW-8576"),
DataType::Dictionary(data_type, _) => match data_type.as_ref() {
DataType::Int8 => dictionary_equal::<i8>(lhs, rhs, lhs_start, rhs_start, len),
DataType::Int16 => {
dictionary_equal::<i16>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::Int32 => {
dictionary_equal::<i32>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::Int64 => {
dictionary_equal::<i64>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::UInt8 => {
dictionary_equal::<u8>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::UInt16 => {
dictionary_equal::<u16>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::UInt32 => {
dictionary_equal::<u32>(lhs, rhs, lhs_start, rhs_start, len)
}
DataType::UInt64 => {
dictionary_equal::<u64>(lhs, rhs, lhs_start, rhs_start, len)
}
_ => unreachable!(),
},
DataType::Float16 => unreachable!(),
}
}
fn equal_range(
lhs: &ArrayData,
rhs: &ArrayData,
lhs_start: usize,
rhs_start: usize,
len: usize,
) -> bool {
utils::base_equal(lhs, rhs)
&& utils::equal_nulls(lhs, rhs, lhs_start, rhs_start, len)
&& equal_values(lhs, rhs, lhs_start, rhs_start, len)
}
/// Logically compares two [ArrayData].
/// Two arrays are logically equal if and only if:
/// * their data types are equal
/// * their lenghts are equal
/// * their null counts are equal<|fim▁hole|>/// * each of their items are equal
/// two items are equal when their in-memory representation is physically equal (i.e. same bit content).
/// The physical comparison depend on the data type.
/// # Panics
/// This function may panic whenever any of the [ArrayData] does not follow the Arrow specification.
/// (e.g. wrong number of buffers, buffer `len` does not correspond to the declared `len`)
pub fn equal(lhs: &ArrayData, rhs: &ArrayData) -> bool {
utils::base_equal(lhs, rhs)
&& lhs.null_count() == rhs.null_count()
&& utils::equal_nulls(lhs, rhs, 0, 0, lhs.len())
&& equal_values(lhs, rhs, 0, 0, lhs.len())
}
#[cfg(test)]
mod tests {
use std::convert::TryFrom;
use std::sync::Arc;
use crate::array::{
array::Array, ArrayDataRef, ArrayRef, BinaryOffsetSizeTrait, BooleanArray,
DecimalBuilder, FixedSizeBinaryBuilder, FixedSizeListBuilder, GenericBinaryArray,
Int32Builder, ListBuilder, NullArray, PrimitiveBuilder, StringArray,
StringDictionaryBuilder, StringOffsetSizeTrait, StructArray,
};
use crate::array::{GenericStringArray, Int32Array};
use crate::datatypes::Int16Type;
use super::*;
#[test]
fn test_null_equal() {
let a = NullArray::new(12).data();
let b = NullArray::new(12).data();
test_equal(&a, &b, true);
let b = NullArray::new(10).data();
test_equal(&a, &b, false);
// Test the case where offset != 0
let a_slice = a.slice(2, 3);
let b_slice = b.slice(1, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(5, 4);
let b_slice = b.slice(3, 3);
test_equal(&a_slice, &b_slice, false);
}
#[test]
fn test_boolean_equal() {
let a = BooleanArray::from(vec![false, false, true]).data();
let b = BooleanArray::from(vec![false, false, true]).data();
test_equal(a.as_ref(), b.as_ref(), true);
let b = BooleanArray::from(vec![false, false, false]).data();
test_equal(a.as_ref(), b.as_ref(), false);
// Test the case where null_count > 0
let a = BooleanArray::from(vec![Some(false), None, None, Some(true)]).data();
let b = BooleanArray::from(vec![Some(false), None, None, Some(true)]).data();
test_equal(a.as_ref(), b.as_ref(), true);
let b = BooleanArray::from(vec![None, None, None, Some(true)]).data();
test_equal(a.as_ref(), b.as_ref(), false);
let b = BooleanArray::from(vec![Some(true), None, None, Some(true)]).data();
test_equal(a.as_ref(), b.as_ref(), false);
// Test the case where offset != 0
let a =
BooleanArray::from(vec![false, true, false, true, false, false, true]).data();
let b =
BooleanArray::from(vec![false, false, false, true, false, true, true]).data();
assert_eq!(equal(a.as_ref(), b.as_ref()), false);
assert_eq!(equal(b.as_ref(), a.as_ref()), false);
let a_slice = a.slice(2, 3);
let b_slice = b.slice(2, 3);
assert_eq!(equal(&a_slice, &b_slice), true);
assert_eq!(equal(&b_slice, &a_slice), true);
let a_slice = a.slice(3, 4);
let b_slice = b.slice(3, 4);
assert_eq!(equal(&a_slice, &b_slice), false);
assert_eq!(equal(&b_slice, &a_slice), false);
}
#[test]
fn test_primitive() {
let cases = vec![
(
vec![Some(1), Some(2), Some(3)],
vec![Some(1), Some(2), Some(3)],
true,
),
(
vec![Some(1), Some(2), Some(3)],
vec![Some(1), Some(2), Some(4)],
false,
),
(
vec![Some(1), Some(2), None],
vec![Some(1), Some(2), None],
true,
),
(
vec![Some(1), None, Some(3)],
vec![Some(1), Some(2), None],
false,
),
(
vec![Some(1), None, None],
vec![Some(1), Some(2), None],
false,
),
];
for (lhs, rhs, expected) in cases {
let lhs = Int32Array::from(lhs).data();
let rhs = Int32Array::from(rhs).data();
test_equal(&lhs, &rhs, expected);
}
}
#[test]
fn test_primitive_slice() {
let cases = vec![
(
vec![Some(1), Some(2), Some(3)],
(0, 1),
vec![Some(1), Some(2), Some(3)],
(0, 1),
true,
),
(
vec![Some(1), Some(2), Some(3)],
(1, 1),
vec![Some(1), Some(2), Some(3)],
(2, 1),
false,
),
(
vec![Some(1), Some(2), None],
(1, 1),
vec![Some(1), None, Some(2)],
(2, 1),
true,
),
];
for (lhs, slice_lhs, rhs, slice_rhs, expected) in cases {
let lhs = Int32Array::from(lhs).data();
let lhs = lhs.slice(slice_lhs.0, slice_lhs.1);
let rhs = Int32Array::from(rhs).data();
let rhs = rhs.slice(slice_rhs.0, slice_rhs.1);
test_equal(&lhs, &rhs, expected);
}
}
fn test_equal(lhs: &ArrayData, rhs: &ArrayData, expected: bool) {
// equality is symetric
assert_eq!(equal(lhs, lhs), true, "\n{:?}\n{:?}", lhs, lhs);
assert_eq!(equal(rhs, rhs), true, "\n{:?}\n{:?}", rhs, rhs);
assert_eq!(equal(lhs, rhs), expected, "\n{:?}\n{:?}", lhs, rhs);
assert_eq!(equal(rhs, lhs), expected, "\n{:?}\n{:?}", rhs, lhs);
}
fn binary_cases() -> Vec<(Vec<Option<String>>, Vec<Option<String>>, bool)> {
let base = vec![
Some("hello".to_owned()),
None,
None,
Some("world".to_owned()),
None,
None,
];
let not_base = vec![
Some("hello".to_owned()),
Some("foo".to_owned()),
None,
Some("world".to_owned()),
None,
None,
];
vec![
(
vec![Some("hello".to_owned()), Some("world".to_owned())],
vec![Some("hello".to_owned()), Some("world".to_owned())],
true,
),
(
vec![Some("hello".to_owned()), Some("world".to_owned())],
vec![Some("hello".to_owned()), Some("arrow".to_owned())],
false,
),
(base.clone(), base.clone(), true),
(base, not_base, false),
]
}
fn test_generic_string_equal<OffsetSize: StringOffsetSizeTrait>() {
let cases = binary_cases();
for (lhs, rhs, expected) in cases {
let lhs = lhs.iter().map(|x| x.as_deref()).collect();
let rhs = rhs.iter().map(|x| x.as_deref()).collect();
let lhs = GenericStringArray::<OffsetSize>::from_opt_vec(lhs).data();
let rhs = GenericStringArray::<OffsetSize>::from_opt_vec(rhs).data();
test_equal(lhs.as_ref(), rhs.as_ref(), expected);
}
}
#[test]
fn test_string_equal() {
test_generic_string_equal::<i32>()
}
#[test]
fn test_large_string_equal() {
test_generic_string_equal::<i64>()
}
fn test_generic_binary_equal<OffsetSize: BinaryOffsetSizeTrait>() {
let cases = binary_cases();
for (lhs, rhs, expected) in cases {
let lhs = lhs
.iter()
.map(|x| x.as_deref().map(|x| x.as_bytes()))
.collect();
let rhs = rhs
.iter()
.map(|x| x.as_deref().map(|x| x.as_bytes()))
.collect();
let lhs = GenericBinaryArray::<OffsetSize>::from_opt_vec(lhs).data();
let rhs = GenericBinaryArray::<OffsetSize>::from_opt_vec(rhs).data();
test_equal(lhs.as_ref(), rhs.as_ref(), expected);
}
}
#[test]
fn test_binary_equal() {
test_generic_binary_equal::<i32>()
}
#[test]
fn test_large_binary_equal() {
test_generic_binary_equal::<i64>()
}
#[test]
fn test_null() {
let a = NullArray::new(2).data();
let b = NullArray::new(2).data();
test_equal(a.as_ref(), b.as_ref(), true);
let b = NullArray::new(1).data();
test_equal(a.as_ref(), b.as_ref(), false);
}
fn create_list_array<U: AsRef<[i32]>, T: AsRef<[Option<U>]>>(
data: T,
) -> ArrayDataRef {
let mut builder = ListBuilder::new(Int32Builder::new(10));
for d in data.as_ref() {
if let Some(v) = d {
builder.values().append_slice(v.as_ref()).unwrap();
builder.append(true).unwrap()
} else {
builder.append(false).unwrap()
}
}
builder.finish().data()
}
#[test]
fn test_list_equal() {
let a = create_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 6])]);
let b = create_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 6])]);
test_equal(a.as_ref(), b.as_ref(), true);
let b = create_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 7])]);
test_equal(a.as_ref(), b.as_ref(), false);
}
// Test the case where null_count > 0
#[test]
fn test_list_null() {
let a =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 4]), None, None]);
let b =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 4]), None, None]);
test_equal(a.as_ref(), b.as_ref(), true);
let b = create_list_array(&[
Some(&[1, 2]),
None,
Some(&[5, 6]),
Some(&[3, 4]),
None,
None,
]);
test_equal(a.as_ref(), b.as_ref(), false);
let b =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 5]), None, None]);
test_equal(a.as_ref(), b.as_ref(), false);
}
// Test the case where offset != 0
#[test]
fn test_list_offsets() {
let a =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 4]), None, None]);
let b =
create_list_array(&[Some(&[1, 2]), None, None, Some(&[3, 5]), None, None]);
let a_slice = a.slice(0, 3);
let b_slice = b.slice(0, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(0, 5);
let b_slice = b.slice(0, 5);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(4, 1);
let b_slice = b.slice(4, 1);
test_equal(&a_slice, &b_slice, true);
}
fn create_fixed_size_binary_array<U: AsRef<[u8]>, T: AsRef<[Option<U>]>>(
data: T,
) -> ArrayDataRef {
let mut builder = FixedSizeBinaryBuilder::new(15, 5);
for d in data.as_ref() {
if let Some(v) = d {
builder.append_value(v.as_ref()).unwrap();
} else {
builder.append_null().unwrap();
}
}
builder.finish().data()
}
#[test]
fn test_fixed_size_binary_equal() {
let a = create_fixed_size_binary_array(&[Some(b"hello"), Some(b"world")]);
let b = create_fixed_size_binary_array(&[Some(b"hello"), Some(b"world")]);
test_equal(a.as_ref(), b.as_ref(), true);
let b = create_fixed_size_binary_array(&[Some(b"hello"), Some(b"arrow")]);
test_equal(a.as_ref(), b.as_ref(), false);
}
// Test the case where null_count > 0
#[test]
fn test_fixed_size_binary_null() {
let a = create_fixed_size_binary_array(&[Some(b"hello"), None, Some(b"world")]);
let b = create_fixed_size_binary_array(&[Some(b"hello"), None, Some(b"world")]);
test_equal(a.as_ref(), b.as_ref(), true);
let b = create_fixed_size_binary_array(&[Some(b"hello"), Some(b"world"), None]);
test_equal(a.as_ref(), b.as_ref(), false);
let b = create_fixed_size_binary_array(&[Some(b"hello"), None, Some(b"arrow")]);
test_equal(a.as_ref(), b.as_ref(), false);
}
#[test]
fn test_fixed_size_binary_offsets() {
// Test the case where offset != 0
let a = create_fixed_size_binary_array(&[
Some(b"hello"),
None,
None,
Some(b"world"),
None,
None,
]);
let b = create_fixed_size_binary_array(&[
Some(b"hello"),
None,
None,
Some(b"arrow"),
None,
None,
]);
let a_slice = a.slice(0, 3);
let b_slice = b.slice(0, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(0, 5);
let b_slice = b.slice(0, 5);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(4, 1);
let b_slice = b.slice(4, 1);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(3, 1);
let b_slice = b.slice(3, 1);
test_equal(&a_slice, &b_slice, false);
}
fn create_decimal_array(data: &[Option<i128>]) -> ArrayDataRef {
let mut builder = DecimalBuilder::new(20, 23, 6);
for d in data {
if let Some(v) = d {
builder.append_value(*v).unwrap();
} else {
builder.append_null().unwrap();
}
}
builder.finish().data()
}
#[test]
fn test_decimal_equal() {
let a = create_decimal_array(&[Some(8_887_000_000), Some(-8_887_000_000)]);
let b = create_decimal_array(&[Some(8_887_000_000), Some(-8_887_000_000)]);
test_equal(a.as_ref(), b.as_ref(), true);
let b = create_decimal_array(&[Some(15_887_000_000), Some(-8_887_000_000)]);
test_equal(a.as_ref(), b.as_ref(), false);
}
// Test the case where null_count > 0
#[test]
fn test_decimal_null() {
let a = create_decimal_array(&[Some(8_887_000_000), None, Some(-8_887_000_000)]);
let b = create_decimal_array(&[Some(8_887_000_000), None, Some(-8_887_000_000)]);
test_equal(a.as_ref(), b.as_ref(), true);
let b = create_decimal_array(&[Some(8_887_000_000), Some(-8_887_000_000), None]);
test_equal(a.as_ref(), b.as_ref(), false);
let b = create_decimal_array(&[Some(15_887_000_000), None, Some(-8_887_000_000)]);
test_equal(a.as_ref(), b.as_ref(), false);
}
#[test]
fn test_decimal_offsets() {
// Test the case where offset != 0
let a = create_decimal_array(&[
Some(8_887_000_000),
None,
None,
Some(-8_887_000_000),
None,
None,
]);
let b = create_decimal_array(&[
Some(8_887_000_000),
None,
None,
Some(15_887_000_000),
None,
None,
]);
let a_slice = a.slice(0, 3);
let b_slice = b.slice(0, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(0, 5);
let b_slice = b.slice(0, 5);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(4, 1);
let b_slice = b.slice(4, 1);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(3, 3);
let b_slice = b.slice(3, 3);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(1, 3);
let b_slice = b.slice(1, 3);
test_equal(&a_slice, &b_slice, false);
let b = create_decimal_array(&[
None,
None,
None,
Some(-8_887_000_000),
Some(-3_000),
None,
]);
let a_slice = a.slice(1, 3);
let b_slice = b.slice(1, 3);
test_equal(&a_slice, &b_slice, true);
}
/// Create a fixed size list of 2 value lengths
fn create_fixed_size_list_array<U: AsRef<[i32]>, T: AsRef<[Option<U>]>>(
data: T,
) -> ArrayDataRef {
let mut builder = FixedSizeListBuilder::new(Int32Builder::new(10), 3);
for d in data.as_ref() {
if let Some(v) = d {
builder.values().append_slice(v.as_ref()).unwrap();
builder.append(true).unwrap()
} else {
for _ in 0..builder.value_length() {
builder.values().append_null().unwrap();
}
builder.append(false).unwrap()
}
}
builder.finish().data()
}
#[test]
fn test_fixed_size_list_equal() {
let a = create_fixed_size_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 6])]);
let b = create_fixed_size_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 6])]);
test_equal(a.as_ref(), b.as_ref(), true);
let b = create_fixed_size_list_array(&[Some(&[1, 2, 3]), Some(&[4, 5, 7])]);
test_equal(a.as_ref(), b.as_ref(), false);
}
// Test the case where null_count > 0
#[test]
fn test_fixed_list_null() {
let a = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[4, 5, 6]),
None,
None,
]);
let b = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[4, 5, 6]),
None,
None,
]);
test_equal(a.as_ref(), b.as_ref(), true);
let b = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
Some(&[7, 8, 9]),
Some(&[4, 5, 6]),
None,
None,
]);
test_equal(a.as_ref(), b.as_ref(), false);
let b = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[3, 6, 9]),
None,
None,
]);
test_equal(a.as_ref(), b.as_ref(), false);
}
#[test]
fn test_fixed_list_offsets() {
// Test the case where offset != 0
let a = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[4, 5, 6]),
None,
None,
]);
let b = create_fixed_size_list_array(&[
Some(&[1, 2, 3]),
None,
None,
Some(&[3, 6, 9]),
None,
None,
]);
let a_slice = a.slice(0, 3);
let b_slice = b.slice(0, 3);
test_equal(&a_slice, &b_slice, true);
let a_slice = a.slice(0, 5);
let b_slice = b.slice(0, 5);
test_equal(&a_slice, &b_slice, false);
let a_slice = a.slice(4, 1);
let b_slice = b.slice(4, 1);
test_equal(&a_slice, &b_slice, true);
}
#[test]
fn test_struct_equal() {
let strings: ArrayRef = Arc::new(StringArray::from(vec![
Some("joe"),
None,
None,
Some("mark"),
Some("doe"),
]));
let ints: ArrayRef = Arc::new(Int32Array::from(vec![
Some(1),
Some(2),
None,
Some(4),
Some(5),
]));
let a =
StructArray::try_from(vec![("f1", strings.clone()), ("f2", ints.clone())])
.unwrap()
.data();
let b = StructArray::try_from(vec![("f1", strings), ("f2", ints)])
.unwrap()
.data();
test_equal(a.as_ref(), b.as_ref(), true);
}
fn create_dictionary_array(values: &[&str], keys: &[Option<&str>]) -> ArrayDataRef {
let values = StringArray::from(values.to_vec());
let mut builder = StringDictionaryBuilder::new_with_dictionary(
PrimitiveBuilder::<Int16Type>::new(3),
&values,
)
.unwrap();
for key in keys {
if let Some(v) = key {
builder.append(v).unwrap();
} else {
builder.append_null().unwrap()
}
}
builder.finish().data()
}
#[test]
fn test_dictionary_equal() {
// (a, b, c), (1, 2, 1, 3) => (a, b, a, c)
let a = create_dictionary_array(
&["a", "b", "c"],
&[Some("a"), Some("b"), Some("a"), Some("c")],
);
// different representation (values and keys are swapped), same result
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), Some("b"), Some("a"), Some("c")],
);
test_equal(a.as_ref(), b.as_ref(), true);
// different len
let b =
create_dictionary_array(&["a", "c", "b"], &[Some("a"), Some("b"), Some("a")]);
test_equal(a.as_ref(), b.as_ref(), false);
// different key
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), Some("b"), Some("a"), Some("a")],
);
test_equal(a.as_ref(), b.as_ref(), false);
// different values, same keys
let b = create_dictionary_array(
&["a", "b", "d"],
&[Some("a"), Some("b"), Some("a"), Some("d")],
);
test_equal(a.as_ref(), b.as_ref(), false);
}
#[test]
fn test_dictionary_equal_null() {
// (a, b, c), (1, 2, 1, 3) => (a, b, a, c)
let a = create_dictionary_array(
&["a", "b", "c"],
&[Some("a"), None, Some("a"), Some("c")],
);
// equal to self
test_equal(a.as_ref(), a.as_ref(), true);
// different representation (values and keys are swapped), same result
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), None, Some("a"), Some("c")],
);
test_equal(a.as_ref(), b.as_ref(), true);
// different null position
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), Some("b"), Some("a"), None],
);
test_equal(a.as_ref(), b.as_ref(), false);
// different key
let b = create_dictionary_array(
&["a", "c", "b"],
&[Some("a"), None, Some("a"), Some("a")],
);
test_equal(a.as_ref(), b.as_ref(), false);
// different values, same keys
let b = create_dictionary_array(
&["a", "b", "d"],
&[Some("a"), None, Some("a"), Some("d")],
);
test_equal(a.as_ref(), b.as_ref(), false);
}
}<|fim▁end|> | /// * their null bitmaps are equal |
<|file_name|>test_util.py<|end_file_name|><|fim▁begin|>import pytest
import pytz
from datetime import datetime as dt
from arctic.date import datetime_to_ms, ms_to_datetime, mktz, to_pandas_closed_closed, DateRange, OPEN_OPEN, CLOSED_CLOSED
from arctic.date._mktz import DEFAULT_TIME_ZONE_NAME
from arctic.date._util import to_dt
@pytest.mark.parametrize('pdt', [
dt(2007, 3, 25, 1, tzinfo=mktz('Europe/London')),
dt(2004, 10, 31, 23, 3, tzinfo=mktz('Europe/London')),
dt(1990, 4, 5, 0, 0, tzinfo=mktz('Europe/London')),
dt(2007, 3, 25, 1, tzinfo=mktz('EST')),
dt(2004, 10, 31, 23, 3, tzinfo=mktz('EST')),
dt(1990, 4, 5, 0, 0, tzinfo=mktz('EST')),
]
)
def test_datetime_to_ms_and_back(pdt):
i = datetime_to_ms(pdt)
pdt = pdt.astimezone(mktz())
pdt2 = ms_to_datetime(i)
assert pdt == pdt2
def test_datetime_to_ms_and_back_microseconds():
pdt = dt(2012, 8, 1, 12, 34, 56, 999999, tzinfo=mktz(DEFAULT_TIME_ZONE_NAME))
i = datetime_to_ms(pdt)
pdt2 = ms_to_datetime(i)
assert pdt != pdt2
assert pdt.year == pdt2.year
assert pdt.month == pdt2.month
assert pdt.day == pdt2.day
assert pdt.hour == pdt2.hour
assert pdt.minute == pdt2.minute
assert pdt.second == pdt2.second
# Microsecond precision loss inevitable.
assert pdt.microsecond // 1000 == pdt2.microsecond // 1000
def test_daterange_closedclosed_None():
assert to_pandas_closed_closed(None) is None
def test_daterange_closedclosed():
date_range = DateRange(dt(2013, 1, 1, tzinfo=mktz('Europe/London')),
dt(2014, 2, 1, tzinfo=mktz('Europe/London')), OPEN_OPEN)
expected = DateRange(dt(2013, 1, 1, 0, 0, 0, 1000, tzinfo=mktz('Europe/London')),
dt(2014, 1, 31, 23, 59, 59, 999000, tzinfo=mktz('Europe/London')),
CLOSED_CLOSED)
act = to_pandas_closed_closed(date_range)
assert act == expected
def test_daterange_closedclosed_no_tz():
date_range = DateRange(dt(2013, 1, 1),
dt(2014, 2, 1), OPEN_OPEN)
expected = DateRange(dt(2013, 1, 1, 0, 0, 0, 1000, tzinfo=mktz()),
dt(2014, 1, 31, 23, 59, 59, 999000, tzinfo=mktz()),
CLOSED_CLOSED)
act = to_pandas_closed_closed(date_range)
assert act == expected
def test_to_dt_0():
assert to_dt(0) == dt(1970, 1, 1, tzinfo=mktz('UTC'))
def test_to_dt_0_default():
assert to_dt(0, mktz('UTC')) == dt(1970, 1, 1, tzinfo=mktz('UTC'))
def test_to_dt_dt_no_tz():
with pytest.raises(ValueError):
assert to_dt(dt(1970, 1, 1)) == dt(1970, 1, 1, tzinfo=mktz())
def test_to_dt_dt_no_tz_default():<|fim▁hole|>
def test_to_dt_dt_tz():
assert to_dt(dt(1970, 1, 1, tzinfo=mktz('UTC'))) == dt(1970, 1, 1, tzinfo=mktz('UTC'))
def test_to_dt_dt_tz_default():
assert to_dt(dt(1970, 1, 1, tzinfo=mktz('UTC')), mktz('Europe/London')) == dt(1970, 1, 1, tzinfo=mktz('UTC'))
def test_daterange_raises():
with pytest.raises(ValueError):
assert(DateRange(dt(2013, 1, 1), dt(2000, 1, 1)))
def test_daterange_eq():
dr = DateRange(dt(2013, 1, 1))
assert((dr == None) == False)
assert(dr == dr)
def test_daterange_lt():
dr = DateRange(dt(2013, 1, 1))
dr2 = DateRange(dt(2001, 1, 1))
assert(dr2 < dr)
dr.start = None
assert((dr2 < dr) == False)<|fim▁end|> | assert to_dt(dt(1970, 1, 1), mktz('UTC')) == dt(1970, 1, 1, tzinfo=mktz('UTC')) |
<|file_name|>watson.js<|end_file_name|><|fim▁begin|>var NaturalLanguageUnderstandingV1 = require('watson-developer-cloud/natural-language-understanding/v1.js');<|fim▁hole|>var ToneAnalyzerV3 = require('watson-developer-cloud/tone-analyzer/v3');
let fs = require('fs');
let path = require('path');
const directoryName = path.dirname(__filename);
const creds = {
tone: {
"username": "redacted",
"password": "redacted"
},
nlu: {
"username": "redacted",
"password": "redacted"
}
};
let toneAnalyzer = new ToneAnalyzerV3({
username: creds.tone.username,
password: creds.tone.password,
version: 'v3',
version_date: '2017-03-15'
});
var nlu = new NaturalLanguageUnderstandingV1({
username: creds.nlu.username,
password: creds.nlu.password,
version_date: NaturalLanguageUnderstandingV1.VERSION_DATE_2017_02_27
});
function generateToneAnalysis(title, poem) {
let toneParams = {
'text': poem,
'isHTML': false,
'sentences': false
};
let nluParams = {
'text': poem,
'features': {
'keywords': {
'emotion': true,
'sentiment': true,
'limit': 10
},
'sentiment': {}
}
}
toneAnalyzer.tone(toneParams, function(err, res1){
if (err) { console.log(err); }
else {
nlu.analyze(nluParams, function(err, res2){
if (err) { console.log(err); }
else {
var result = Object.assign({"title": title}, res1, res2);
prettyJson = JSON.stringify(result, null, 2);
fs.appendFileSync('./sentiments.json', prettyJson, {encoding: 'utf8'});
console.log(`Retrieved Watson Analysis for ${title}`);
}
});
}
});
}
function readFiles(dirname, onFileContent, onError) {
fs.readdir(dirname, function(err, filenames) {
if (err) {
onError(err);
return;
}
var index = filenames.indexOf(".DS_Store");
if (index >= 0) {
filenames.splice(index, 1 );
}
filenames.forEach(function(filename) {
fs.readFile(path.join(dirname,filename), 'utf8', function(err, content) {
if (err) {
onError(err);
return;
}
onFileContent(filename.substring(0, filename.length-4), content);
});
});
});
}
// fs.writeFileSync('./s.json', '', 'utf8');
// fs.readdir('./missing', function(err, files) {
// var index = files.indexOf(".DS_Store");
// if (index >= 0) {
// files.splice(index, 1 );
// }
// for (var i = 0; i<files.length; i++) {
// console.log(files[i]);
// file = fs.readFileSync(path.join(directoryName+'/missing', files[i]), {encoding: 'utf8'});
// generateToneAnalysis(files[i], file);
// }
// });
fs.readdir('./poems', function(err, folders){
if (err) {
console.log(err);
return;
}
var index = folders.indexOf(".DS_Store");
if (index >= 0) {
folders.splice(index, 1 );
}
for (var i = 0; i < folders.length; i++) {
let dirname = path.join('./poems', folders[i]);
readFiles(dirname, generateToneAnalysis, function(err) {
console.log(err);
});
}
});<|fim▁end|> | |
<|file_name|>tl_dense_vector_impl_eigen.cc<|end_file_name|><|fim▁begin|>#include <iostream>
#include "tl_dense_vector_impl_eigen.h"
#ifdef HAVE_VIENNACL
#define VIENNACL_HAVE_EIGEN
#include <viennacl/vector.hpp>
#include "tl_dense_vector_impl_viennacl.h"
#endif // HAVE_VIENNACL
// ---------------------------------------------------------------------------
// constructor & destructor
// ---------------------------------------------------------------------------
TlDenseVector_ImplEigen::TlDenseVector_ImplEigen(
TlDenseVectorObject::index_type dim)
: vector_(VectorDataType::Zero(dim)) {}
TlDenseVector_ImplEigen::TlDenseVector_ImplEigen(
const TlDenseVector_ImplEigen& rhs) {
this->vector_ = rhs.vector_;
}
TlDenseVector_ImplEigen::TlDenseVector_ImplEigen(const std::vector<double>& rhs)
: vector_(MapTypeConst(rhs.data(), rhs.size())) {}
TlDenseVector_ImplEigen::TlDenseVector_ImplEigen(
const double* p, const TlDenseVectorObject::size_type size)
: vector_(MapTypeConst(p, size)) {}
TlDenseVector_ImplEigen::TlDenseVector_ImplEigen(const VectorDataType& rhs) {
this->vector_ = rhs;
}
#ifdef HAVE_VIENNACL
TlDenseVector_ImplEigen::TlDenseVector_ImplEigen(
const TlDenseVector_ImplViennaCL& rhs)
: vector_(VectorDataType::Zero(rhs.getSize())) {
viennacl::copy(rhs.vector_, this->vector_);
}
#endif // HAVE_VIENNACL
TlDenseVector_ImplEigen::operator std::vector<double>() const {
const std::size_t size = this->getSize();
std::vector<double> answer(size);<|fim▁hole|> return answer;
}
TlDenseVector_ImplEigen::~TlDenseVector_ImplEigen() {}
// ---------------------------------------------------------------------------
// properties
// ---------------------------------------------------------------------------
TlDenseVectorObject::size_type TlDenseVector_ImplEigen::getSize() const {
return this->vector_.rows();
}
void TlDenseVector_ImplEigen::resize(
const TlDenseVectorObject::index_type newSize) {
this->vector_.conservativeResizeLike(VectorDataType::Zero(newSize, 1));
}
double TlDenseVector_ImplEigen::get(
const TlDenseVectorObject::index_type i) const {
return this->vector_.coeff(i);
}
void TlDenseVector_ImplEigen::set(const TlDenseVectorObject::index_type i,
const double value) {
this->vector_.coeffRef(i) = value;
}
void TlDenseVector_ImplEigen::add(const TlDenseVectorObject::index_type i,
const double value) {
this->vector_.coeffRef(i) += value;
}
void TlDenseVector_ImplEigen::mul(const TlDenseVectorObject::index_type i,
const double value) {
this->vector_.coeffRef(i) *= value;
}
// ---------------------------------------------------------------------------
// operators
// ---------------------------------------------------------------------------
TlDenseVector_ImplEigen& TlDenseVector_ImplEigen::operator=(
const TlDenseVector_ImplEigen& rhs) {
if (this != &rhs) {
this->vector_ = rhs.vector_;
}
return (*this);
}
TlDenseVector_ImplEigen& TlDenseVector_ImplEigen::operator+=(
const TlDenseVector_ImplEigen& rhs) {
this->vector_ += rhs.vector_;
return *this;
}
TlDenseVector_ImplEigen& TlDenseVector_ImplEigen::operator-=(
const TlDenseVector_ImplEigen& rhs) {
this->vector_ -= rhs.vector_;
return *this;
}
TlDenseVector_ImplEigen& TlDenseVector_ImplEigen::operator*=(const double rhs) {
this->vector_ *= rhs;
return *this;
}
TlDenseVector_ImplEigen& TlDenseVector_ImplEigen::operator/=(const double rhs) {
return this->operator*=(1.0 / rhs);
}
double TlDenseVector_ImplEigen::operator*(
const TlDenseVector_ImplEigen& rhs) const {
return this->dot(rhs);
}
// ---------------------------------------------------------------------------
// operations
// ---------------------------------------------------------------------------
double TlDenseVector_ImplEigen::sum() const {
return this->vector_.array().sum();
}
void TlDenseVector_ImplEigen::sortByGreater() {
std::sort(this->vector_.data(), this->vector_.data() + this->getSize(),
std::greater<double>());
}
double TlDenseVector_ImplEigen::dot(const TlDenseVector_ImplEigen& rhs) const {
assert(this->getSize() == rhs.getSize());
const double answer = this->vector_.dot(rhs.vector_);
return answer;
}
TlDenseVector_ImplEigen& TlDenseVector_ImplEigen::dotInPlace(
const TlDenseVector_ImplEigen& rhs) {
assert(this->getSize() == rhs.getSize());
this->vector_.array() *= rhs.vector_.array();
return *this;
}
// ---------------------------------------------------------------------------
// others
// ---------------------------------------------------------------------------
TlDenseVector_ImplEigen operator+(const TlDenseVector_ImplEigen& rhs1,
const TlDenseVector_ImplEigen& rhs2) {
TlDenseVector_ImplEigen answer = rhs1;
answer += rhs2;
return answer;
}
TlDenseVector_ImplEigen operator-(const TlDenseVector_ImplEigen& rhs1,
const TlDenseVector_ImplEigen& rhs2) {
TlDenseVector_ImplEigen answer = rhs1;
answer -= rhs2;
return answer;
}
TlDenseVector_ImplEigen operator*(const TlDenseVector_ImplEigen& rhs1,
const double rhs2) {
TlDenseVector_ImplEigen answer = rhs1;
answer *= rhs2;
return answer;
}
TlDenseVector_ImplEigen operator*(const double rhs1,
const TlDenseVector_ImplEigen& rhs2) {
return (rhs2 * rhs1);
}<|fim▁end|> | MapType(&(answer[0]), size) = this->vector_;
|
<|file_name|>dtpicker-jq.js<|end_file_name|><|fim▁begin|>/* dtpicker javascript jQuery */
(function($) {
// 严格模式
'use strict';
// 控件类名
var pluginName = 'dtpicker';
var PluginClass=T.UI.Controls.DTPicker;
var pluginRef = 't-plugin-ref';
// 胶水代码
$.fn[pluginName] = function(options) {
if(typeof options === 'string'){
// 2. 调用API
var plugin = this.data(pluginRef);
if(!plugin || !plugin[options]){
throw '方法 ' + options + ' 不存在';
}
var result = plugin[options].apply(plugin, Array.prototype.slice.call(arguments, 1));
if(options === 'destroy'){
jqElement.removeData(pluginRef);
}
return result;
}
this.each(function () {
var jqElement=$(this);
var plugin = jqElement.data(pluginRef);
if(plugin === undefined){
// 1. 创建新对象
plugin=new PluginClass(this, $.extend(true, {}, options));
jqElement.data(pluginRef, plugin);
}
else{
// 3. 更新选项
plugin.updateOptions || plugin.updateOptions(options);
}
});
return this;
};
})(jQuery);
// (function($) {
// // 'use strict';
// var dateTimePicker = function (element, options) {
// /********************************************************************************
// *
// * Public API functions
// * =====================
// *
// * Important: Do not expose direct references to private objects or the options
// * object to the outer world. Always return a clone when returning values or make
// * a clone when setting a private variable.
// *
// ********************************************************************************/
// picker.toggle = toggle;
// picker.show = show;
// picker.hide = hide;
// picker.ignoreReadonly = function (ignoreReadonly) {
// if (arguments.length === 0) {
// return options.ignoreReadonly;
// }
// if (typeof ignoreReadonly !== 'boolean') {
// throw new TypeError('ignoreReadonly () expects a boolean parameter');
// }
// options.ignoreReadonly = ignoreReadonly;
// return picker;
// };
// picker.options = function (newOptions) {
// if (arguments.length === 0) {
// return $.extend(true, {}, options);
// }
// if (!(newOptions instanceof Object)) {
// throw new TypeError('options() options parameter should be an object');
// }
// $.extend(true, options, newOptions);
// $.each(options, function (key, value) {
// if (picker[key] !== undefined) {
// picker[key](value);
// } else {
// throw new TypeError('option ' + key + ' is not recognized!');
// }
// });
// return picker;
// };
// picker.date = function (newDate) {
// ///<signature helpKeyword="$.fn.datetimepicker.date">
// ///<summary>Returns the component's model current date, a moment object or null if not set.</summary>
// ///<returns type="Moment">date.clone()</returns>
// ///</signature>
// ///<signature>
// ///<summary>Sets the components model current moment to it. Passing a null value unsets the components model current moment. Parsing of the newDate parameter is made using moment library with the options.format and options.useStrict components configuration.</summary>
// ///<param name="newDate" locid="$.fn.datetimepicker.date_p:newDate">Takes string, Date, moment, null parameter.</param>
// ///</signature>
// if (arguments.length === 0) {
// if (unset) {
// return null;
// }
// return date.clone();
// }
// if (newDate !== null && typeof newDate !== 'string' && !moment.isMoment(newDate) && !(newDate instanceof Date)) {
// throw new TypeError('date() parameter must be one of [null, string, moment or Date]');
// }
// setValue(newDate === null ? null : parseInputDate(newDate));
// return picker;
// };
// picker.format = function (newFormat) {
// ///<summary>test su</summary>
// ///<param name="newFormat">info about para</param>
// ///<returns type="string|boolean">returns foo</returns>
// if (arguments.length === 0) {
// return options.format;
// }
// if ((typeof newFormat !== 'string') && ((typeof newFormat !== 'boolean') || (newFormat !== false))) {
// throw new TypeError('format() expects a sting or boolean:false parameter ' + newFormat);
// }
// options.format = newFormat;
// if (actualFormat) {
// initFormatting(); // reinit formatting
// }
// return picker;
// };
// picker.timeZone = function (newZone) {
// if (arguments.length === 0) {
// return options.timeZone;
// }
// options.timeZone = newZone;
// return picker;
// };
// picker.dayViewHeaderFormat = function (newFormat) {
// if (arguments.length === 0) {
// return options.dayViewHeaderFormat;
// }
// if (typeof newFormat !== 'string') {
// throw new TypeError('dayViewHeaderFormat() expects a string parameter');
// }
// options.dayViewHeaderFormat = newFormat;
// return picker;
// };
// picker.extraFormats = function (formats) {
// if (arguments.length === 0) {
// return options.extraFormats;
// }
// if (formats !== false && !(formats instanceof Array)) {
// throw new TypeError('extraFormats() expects an array or false parameter');
// }
// options.extraFormats = formats;
// if (parseFormats) {
// initFormatting(); // reinit formatting
// }
// return picker;
// };
// picker.disabledDates = function (dates) {
// ///<signature helpKeyword="$.fn.datetimepicker.disabledDates">
// ///<summary>Returns an array with the currently set disabled dates on the component.</summary>
// ///<returns type="array">options.disabledDates</returns>
// ///</signature>
// ///<signature>
// ///<summary>Setting this takes precedence over options.minDate, options.maxDate configuration. Also calling this function removes the configuration of
// ///options.enabledDates if such exist.</summary>
// ///<param name="dates" locid="$.fn.datetimepicker.disabledDates_p:dates">Takes an [ string or Date or moment ] of values and allows the user to select only from those days.</param>
// ///</signature>
// if (arguments.length === 0) {
// return (options.disabledDates ? $.extend({}, options.disabledDates) : options.disabledDates);
// }
// if (!dates) {
// options.disabledDates = false;
// update();
// return picker;
// }
// if (!(dates instanceof Array)) {
// throw new TypeError('disabledDates() expects an array parameter');
// }
// options.disabledDates = indexGivenDates(dates);
// options.enabledDates = false;
// update();
// return picker;
// };
// picker.enabledDates = function (dates) {
// ///<signature helpKeyword="$.fn.datetimepicker.enabledDates">
// ///<summary>Returns an array with the currently set enabled dates on the component.</summary>
// ///<returns type="array">options.enabledDates</returns>
// ///</signature>
// ///<signature>
// ///<summary>Setting this takes precedence over options.minDate, options.maxDate configuration. Also calling this function removes the configuration of options.disabledDates if such exist.</summary>
// ///<param name="dates" locid="$.fn.datetimepicker.enabledDates_p:dates">Takes an [ string or Date or moment ] of values and allows the user to select only from those days.</param>
// ///</signature>
// if (arguments.length === 0) {
// return (options.enabledDates ? $.extend({}, options.enabledDates) : options.enabledDates);
// }
// if (!dates) {
// options.enabledDates = false;
// update();
// return picker;
// }
// if (!(dates instanceof Array)) {
// throw new TypeError('enabledDates() expects an array parameter');
// }
// options.enabledDates = indexGivenDates(dates);
// options.disabledDates = false;
// update();
// return picker;
// };
// picker.daysOfWeekDisabled = function (daysOfWeekDisabled) {
// if (arguments.length === 0) {
// return options.daysOfWeekDisabled.splice(0);
// }
// if ((typeof daysOfWeekDisabled === 'boolean') && !daysOfWeekDisabled) {
// options.daysOfWeekDisabled = false;
// update();
// return picker;
// }
// if (!(daysOfWeekDisabled instanceof Array)) {
// throw new TypeError('daysOfWeekDisabled() expects an array parameter');
// }
// options.daysOfWeekDisabled = daysOfWeekDisabled.reduce(function (previousValue, currentValue) {
// currentValue = parseInt(currentValue, 10);
// if (currentValue > 6 || currentValue < 0 || isNaN(currentValue)) {
// return previousValue;
// }
// if (previousValue.indexOf(currentValue) === -1) {
// previousValue.push(currentValue);
// }
// return previousValue;
// }, []).sort();
// if (options.useCurrent && !options.keepInvalid) {
// var tries = 0;
// while (!isValid(date, 'd')) {
// date.add(1, 'd');
// if (tries === 7) {
// throw 'Tried 7 times to find a valid date';
// }
// tries++;
// }
// setValue(date);
// }
// update();
// return picker;
// };
// picker.maxDate = function (maxDate) {
// if (arguments.length === 0) {
// return options.maxDate ? options.maxDate.clone() : options.maxDate;
// }
// if ((typeof maxDate === 'boolean') && maxDate === false) {
// options.maxDate = false;
// update();
// return picker;
// }
// if (typeof maxDate === 'string') {
// if (maxDate === 'now' || maxDate === 'moment') {
// maxDate = getMoment();
// }
// }
// var parsedDate = parseInputDate(maxDate);
// if (!parsedDate.isValid()) {
// throw new TypeError('maxDate() Could not parse date parameter: ' + maxDate);
// }
// if (options.minDate && parsedDate.isBefore(options.minDate)) {
// throw new TypeError('maxDate() date parameter is before options.minDate: ' + parsedDate.format(actualFormat));
// }
// options.maxDate = parsedDate;
// if (options.useCurrent && !options.keepInvalid && date.isAfter(maxDate)) {
// setValue(options.maxDate);
// }
// if (viewDate.isAfter(parsedDate)) {
// viewDate = parsedDate.clone().subtract(options.stepping, 'm');
// }
// update();
// return picker;
// };
// picker.minDate = function (minDate) {
// if (arguments.length === 0) {
// return options.minDate ? options.minDate.clone() : options.minDate;
// }
// if ((typeof minDate === 'boolean') && minDate === false) {
// options.minDate = false;
// update();
// return picker;
// }
// if (typeof minDate === 'string') {
// if (minDate === 'now' || minDate === 'moment') {
// minDate = getMoment();
// }
// }
// var parsedDate = parseInputDate(minDate);
// if (!parsedDate.isValid()) {
// throw new TypeError('minDate() Could not parse date parameter: ' + minDate);
// }
// if (options.maxDate && parsedDate.isAfter(options.maxDate)) {
// throw new TypeError('minDate() date parameter is after options.maxDate: ' + parsedDate.format(actualFormat));
// }
// options.minDate = parsedDate;
// if (options.useCurrent && !options.keepInvalid && date.isBefore(minDate)) {
// setValue(options.minDate);
// }
// if (viewDate.isBefore(parsedDate)) {
// viewDate = parsedDate.clone().add(options.stepping, 'm');
// }
// update();
// return picker;
// };
// picker.defaultDate = function (defaultDate) {
// ///<signature helpKeyword="$.fn.datetimepicker.defaultDate">
// ///<summary>Returns a moment with the options.defaultDate option configuration or false if not set</summary>
// ///<returns type="Moment">date.clone()</returns>
// ///</signature>
// ///<signature>
// ///<summary>Will set the picker's inital date. If a boolean:false value is passed the options.defaultDate parameter is cleared.</summary>
// ///<param name="defaultDate" locid="$.fn.datetimepicker.defaultDate_p:defaultDate">Takes a string, Date, moment, boolean:false</param>
// ///</signature>
// if (arguments.length === 0) {
// return options.defaultDate ? options.defaultDate.clone() : options.defaultDate;
// }
// if (!defaultDate) {
// options.defaultDate = false;
// return picker;
// }
// if (typeof defaultDate === 'string') {
// if (defaultDate === 'now' || defaultDate === 'moment') {
// defaultDate = getMoment();
// }
// }
// var parsedDate = parseInputDate(defaultDate);
// if (!parsedDate.isValid()) {
// throw new TypeError('defaultDate() Could not parse date parameter: ' + defaultDate);
// }
// if (!isValid(parsedDate)) {
// throw new TypeError('defaultDate() date passed is invalid according to component setup validations');
// }
// options.defaultDate = parsedDate;
// if ((options.defaultDate && options.inline) || input.val().trim() === '') {
// setValue(options.defaultDate);
// }
// return picker;
// };
// picker.locale = function (locale) {
// if (arguments.length === 0) {
// return options.locale;
// }
// if (!moment.localeData(locale)) {
// throw new TypeError('locale() locale ' + locale + ' is not loaded from moment locales!');
// }
// options.locale = locale;
// date.locale(options.locale);
// viewDate.locale(options.locale);
// if (actualFormat) {
// initFormatting(); // reinit formatting
// }
// if (widget) {
// hide();
// show();
// }
// return picker;
// };
// picker.stepping = function (stepping) {
// if (arguments.length === 0) {
// return options.stepping;
// }<|fim▁hole|>// if (isNaN(stepping) || stepping < 1) {
// stepping = 1;
// }
// options.stepping = stepping;
// return picker;
// };
// picker.useCurrent = function (useCurrent) {
// var useCurrentOptions = ['year', 'month', 'day', 'hour', 'minute'];
// if (arguments.length === 0) {
// return options.useCurrent;
// }
// if ((typeof useCurrent !== 'boolean') && (typeof useCurrent !== 'string')) {
// throw new TypeError('useCurrent() expects a boolean or string parameter');
// }
// if (typeof useCurrent === 'string' && useCurrentOptions.indexOf(useCurrent.toLowerCase()) === -1) {
// throw new TypeError('useCurrent() expects a string parameter of ' + useCurrentOptions.join(', '));
// }
// options.useCurrent = useCurrent;
// return picker;
// };
// picker.collapse = function (collapse) {
// if (arguments.length === 0) {
// return options.collapse;
// }
// if (typeof collapse !== 'boolean') {
// throw new TypeError('collapse() expects a boolean parameter');
// }
// if (options.collapse === collapse) {
// return picker;
// }
// options.collapse = collapse;
// if (widget) {
// hide();
// show();
// }
// return picker;
// };
// picker.icons = function (icons) {
// if (arguments.length === 0) {
// return $.extend({}, options.icons);
// }
// if (!(icons instanceof Object)) {
// throw new TypeError('icons() expects parameter to be an Object');
// }
// $.extend(options.icons, icons);
// if (widget) {
// hide();
// show();
// }
// return picker;
// };
// picker.tooltips = function (tooltips) {
// if (arguments.length === 0) {
// return $.extend({}, options.tooltips);
// }
// if (!(tooltips instanceof Object)) {
// throw new TypeError('tooltips() expects parameter to be an Object');
// }
// $.extend(options.tooltips, tooltips);
// if (widget) {
// hide();
// show();
// }
// return picker;
// };
// picker.useStrict = function (useStrict) {
// if (arguments.length === 0) {
// return options.useStrict;
// }
// if (typeof useStrict !== 'boolean') {
// throw new TypeError('useStrict() expects a boolean parameter');
// }
// options.useStrict = useStrict;
// return picker;
// };
// picker.sideBySide = function (sideBySide) {
// if (arguments.length === 0) {
// return options.sideBySide;
// }
// if (typeof sideBySide !== 'boolean') {
// throw new TypeError('sideBySide() expects a boolean parameter');
// }
// options.sideBySide = sideBySide;
// if (widget) {
// hide();
// show();
// }
// return picker;
// };
// picker.viewMode = function (viewMode) {
// if (arguments.length === 0) {
// return options.viewMode;
// }
// if (typeof viewMode !== 'string') {
// throw new TypeError('viewMode() expects a string parameter');
// }
// if (viewModes.indexOf(viewMode) === -1) {
// throw new TypeError('viewMode() parameter must be one of (' + viewModes.join(', ') + ') value');
// }
// options.viewMode = viewMode;
// currentViewMode = Math.max(viewModes.indexOf(viewMode), minViewModeNumber);
// showMode();
// return picker;
// };
// picker.toolbarPlacement = function (toolbarPlacement) {
// if (arguments.length === 0) {
// return options.toolbarPlacement;
// }
// if (typeof toolbarPlacement !== 'string') {
// throw new TypeError('toolbarPlacement() expects a string parameter');
// }
// if (toolbarPlacements.indexOf(toolbarPlacement) === -1) {
// throw new TypeError('toolbarPlacement() parameter must be one of (' + toolbarPlacements.join(', ') + ') value');
// }
// options.toolbarPlacement = toolbarPlacement;
// if (widget) {
// hide();
// show();
// }
// return picker;
// };
// picker.widgetPositioning = function (widgetPositioning) {
// if (arguments.length === 0) {
// return $.extend({}, options.widgetPositioning);
// }
// if (({}).toString.call(widgetPositioning) !== '[object Object]') {
// throw new TypeError('widgetPositioning() expects an object variable');
// }
// if (widgetPositioning.horizontal) {
// if (typeof widgetPositioning.horizontal !== 'string') {
// throw new TypeError('widgetPositioning() horizontal variable must be a string');
// }
// widgetPositioning.horizontal = widgetPositioning.horizontal.toLowerCase();
// if (horizontalModes.indexOf(widgetPositioning.horizontal) === -1) {
// throw new TypeError('widgetPositioning() expects horizontal parameter to be one of (' + horizontalModes.join(', ') + ')');
// }
// options.widgetPositioning.horizontal = widgetPositioning.horizontal;
// }
// if (widgetPositioning.vertical) {
// if (typeof widgetPositioning.vertical !== 'string') {
// throw new TypeError('widgetPositioning() vertical variable must be a string');
// }
// widgetPositioning.vertical = widgetPositioning.vertical.toLowerCase();
// if (verticalModes.indexOf(widgetPositioning.vertical) === -1) {
// throw new TypeError('widgetPositioning() expects vertical parameter to be one of (' + verticalModes.join(', ') + ')');
// }
// options.widgetPositioning.vertical = widgetPositioning.vertical;
// }
// update();
// return picker;
// };
// picker.calendarWeeks = function (calendarWeeks) {
// if (arguments.length === 0) {
// return options.calendarWeeks;
// }
// if (typeof calendarWeeks !== 'boolean') {
// throw new TypeError('calendarWeeks() expects parameter to be a boolean value');
// }
// options.calendarWeeks = calendarWeeks;
// update();
// return picker;
// };
// picker.showTodayButton = function (showTodayButton) {
// if (arguments.length === 0) {
// return options.showTodayButton;
// }
// if (typeof showTodayButton !== 'boolean') {
// throw new TypeError('showTodayButton() expects a boolean parameter');
// }
// options.showTodayButton = showTodayButton;
// if (widget) {
// hide();
// show();
// }
// return picker;
// };
// picker.showClear = function (showClear) {
// if (arguments.length === 0) {
// return options.showClear;
// }
// if (typeof showClear !== 'boolean') {
// throw new TypeError('showClear() expects a boolean parameter');
// }
// options.showClear = showClear;
// if (widget) {
// hide();
// show();
// }
// return picker;
// };
// picker.widgetParent = function (widgetParent) {
// if (arguments.length === 0) {
// return options.widgetParent;
// }
// if (typeof widgetParent === 'string') {
// widgetParent = $(widgetParent);
// }
// if (widgetParent !== null && (typeof widgetParent !== 'string' && !(widgetParent instanceof $))) {
// throw new TypeError('widgetParent() expects a string or a jQuery object parameter');
// }
// options.widgetParent = widgetParent;
// if (widget) {
// hide();
// show();
// }
// return picker;
// };
// picker.keepOpen = function (keepOpen) {
// if (arguments.length === 0) {
// return options.keepOpen;
// }
// if (typeof keepOpen !== 'boolean') {
// throw new TypeError('keepOpen() expects a boolean parameter');
// }
// options.keepOpen = keepOpen;
// return picker;
// };
// picker.focusOnShow = function (focusOnShow) {
// if (arguments.length === 0) {
// return options.focusOnShow;
// }
// if (typeof focusOnShow !== 'boolean') {
// throw new TypeError('focusOnShow() expects a boolean parameter');
// }
// options.focusOnShow = focusOnShow;
// return picker;
// };
// picker.inline = function (inline) {
// if (arguments.length === 0) {
// return options.inline;
// }
// if (typeof inline !== 'boolean') {
// throw new TypeError('inline() expects a boolean parameter');
// }
// options.inline = inline;
// return picker;
// };
// picker.clear = function () {
// clear();
// return picker;
// };
// picker.keyBinds = function (keyBinds) {
// options.keyBinds = keyBinds;
// return picker;
// };
// picker.getMoment = function (d) {
// return this.getMoment(d);
// };
// picker.debug = function (debug) {
// if (typeof debug !== 'boolean') {
// throw new TypeError('debug() expects a boolean parameter');
// }
// options.debug = debug;
// return picker;
// };
// picker.allowInputToggle = function (allowInputToggle) {
// if (arguments.length === 0) {
// return options.allowInputToggle;
// }
// if (typeof allowInputToggle !== 'boolean') {
// throw new TypeError('allowInputToggle() expects a boolean parameter');
// }
// options.allowInputToggle = allowInputToggle;
// return picker;
// };
// picker.showClose = function (showClose) {
// if (arguments.length === 0) {
// return options.showClose;
// }
// if (typeof showClose !== 'boolean') {
// throw new TypeError('showClose() expects a boolean parameter');
// }
// options.showClose = showClose;
// return picker;
// };
// picker.keepInvalid = function (keepInvalid) {
// if (arguments.length === 0) {
// return options.keepInvalid;
// }
// if (typeof keepInvalid !== 'boolean') {
// throw new TypeError('keepInvalid() expects a boolean parameter');
// }
// options.keepInvalid = keepInvalid;
// return picker;
// };
// picker.datepickerInput = function (datepickerInput) {
// if (arguments.length === 0) {
// return options.datepickerInput;
// }
// if (typeof datepickerInput !== 'string') {
// throw new TypeError('datepickerInput() expects a string parameter');
// }
// options.datepickerInput = datepickerInput;
// return picker;
// };
// picker.parseInputDate = function (parseInputDate) {
// if (arguments.length === 0) {
// return options.parseInputDate;
// }
// if (typeof parseInputDate !== 'function') {
// throw new TypeError('parseInputDate() sholud be as function');
// }
// options.parseInputDate = parseInputDate;
// return picker;
// };
// picker.disabledTimeIntervals = function (disabledTimeIntervals) {
// ///<signature helpKeyword="$.fn.datetimepicker.disabledTimeIntervals">
// ///<summary>Returns an array with the currently set disabled dates on the component.</summary>
// ///<returns type="array">options.disabledTimeIntervals</returns>
// ///</signature>
// ///<signature>
// ///<summary>Setting this takes precedence over options.minDate, options.maxDate configuration. Also calling this function removes the configuration of
// ///options.enabledDates if such exist.</summary>
// ///<param name="dates" locid="$.fn.datetimepicker.disabledTimeIntervals_p:dates">Takes an [ string or Date or moment ] of values and allows the user to select only from those days.</param>
// ///</signature>
// if (arguments.length === 0) {
// return (options.disabledTimeIntervals ? $.extend({}, options.disabledTimeIntervals) : options.disabledTimeIntervals);
// }
// if (!disabledTimeIntervals) {
// options.disabledTimeIntervals = false;
// update();
// return picker;
// }
// if (!(disabledTimeIntervals instanceof Array)) {
// throw new TypeError('disabledTimeIntervals() expects an array parameter');
// }
// options.disabledTimeIntervals = disabledTimeIntervals;
// update();
// return picker;
// };
// picker.disabledHours = function (hours) {
// ///<signature helpKeyword="$.fn.datetimepicker.disabledHours">
// ///<summary>Returns an array with the currently set disabled hours on the component.</summary>
// ///<returns type="array">options.disabledHours</returns>
// ///</signature>
// ///<signature>
// ///<summary>Setting this takes precedence over options.minDate, options.maxDate configuration. Also calling this function removes the configuration of
// ///options.enabledHours if such exist.</summary>
// ///<param name="hours" locid="$.fn.datetimepicker.disabledHours_p:hours">Takes an [ int ] of values and disallows the user to select only from those hours.</param>
// ///</signature>
// if (arguments.length === 0) {
// return (options.disabledHours ? $.extend({}, options.disabledHours) : options.disabledHours);
// }
// if (!hours) {
// options.disabledHours = false;
// update();
// return picker;
// }
// if (!(hours instanceof Array)) {
// throw new TypeError('disabledHours() expects an array parameter');
// }
// options.disabledHours = indexGivenHours(hours);
// options.enabledHours = false;
// if (options.useCurrent && !options.keepInvalid) {
// var tries = 0;
// while (!isValid(date, 'h')) {
// date.add(1, 'h');
// if (tries === 24) {
// throw 'Tried 24 times to find a valid date';
// }
// tries++;
// }
// setValue(date);
// }
// update();
// return picker;
// };
// picker.enabledHours = function (hours) {
// ///<signature helpKeyword="$.fn.datetimepicker.enabledHours">
// ///<summary>Returns an array with the currently set enabled hours on the component.</summary>
// ///<returns type="array">options.enabledHours</returns>
// ///</signature>
// ///<signature>
// ///<summary>Setting this takes precedence over options.minDate, options.maxDate configuration. Also calling this function removes the configuration of options.disabledHours if such exist.</summary>
// ///<param name="hours" locid="$.fn.datetimepicker.enabledHours_p:hours">Takes an [ int ] of values and allows the user to select only from those hours.</param>
// ///</signature>
// if (arguments.length === 0) {
// return (options.enabledHours ? $.extend({}, options.enabledHours) : options.enabledHours);
// }
// if (!hours) {
// options.enabledHours = false;
// update();
// return picker;
// }
// if (!(hours instanceof Array)) {
// throw new TypeError('enabledHours() expects an array parameter');
// }
// options.enabledHours = indexGivenHours(hours);
// options.disabledHours = false;
// if (options.useCurrent && !options.keepInvalid) {
// var tries = 0;
// while (!isValid(date, 'h')) {
// date.add(1, 'h');
// if (tries === 24) {
// throw 'Tried 24 times to find a valid date';
// }
// tries++;
// }
// setValue(date);
// }
// update();
// return picker;
// };
// picker.viewDate = function (newDate) {
// ///<signature helpKeyword="$.fn.datetimepicker.viewDate">
// ///<summary>Returns the component's model current viewDate, a moment object or null if not set.</summary>
// ///<returns type="Moment">viewDate.clone()</returns>
// ///</signature>
// ///<signature>
// ///<summary>Sets the components model current moment to it. Passing a null value unsets the components model current moment. Parsing of the newDate parameter is made using moment library with the options.format and options.useStrict components configuration.</summary>
// ///<param name="newDate" locid="$.fn.datetimepicker.date_p:newDate">Takes string, viewDate, moment, null parameter.</param>
// ///</signature>
// if (arguments.length === 0) {
// return viewDate.clone();
// }
// if (!newDate) {
// viewDate = date.clone();
// return picker;
// }
// if (typeof newDate !== 'string' && !moment.isMoment(newDate) && !(newDate instanceof Date)) {
// throw new TypeError('viewDate() parameter must be one of [string, moment or Date]');
// }
// viewDate = parseInputDate(newDate);
// viewUpdate();
// return picker;
// };
// };
// })(jQuery);<|fim▁end|> |
// stepping = parseInt(stepping, 10); |
<|file_name|>camera.py<|end_file_name|><|fim▁begin|>"""This component provides basic support for Foscam IP cameras."""
import asyncio
from libpyfoscam import FoscamCamera
import voluptuous as vol
from homeassistant.components.camera import PLATFORM_SCHEMA, SUPPORT_STREAM, Camera
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
)
from homeassistant.helpers import config_validation as cv, entity_platform
from .const import (
CONF_RTSP_PORT,
CONF_STREAM,
DOMAIN,
LOGGER,
SERVICE_PTZ,
SERVICE_PTZ_PRESET,
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required("ip"): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_NAME, default="Foscam Camera"): cv.string,
vol.Optional(CONF_PORT, default=88): cv.port,
vol.Optional(CONF_RTSP_PORT): cv.port,
}
)
DIR_UP = "up"
DIR_DOWN = "down"
DIR_LEFT = "left"
DIR_RIGHT = "right"
DIR_TOPLEFT = "top_left"
DIR_TOPRIGHT = "top_right"
DIR_BOTTOMLEFT = "bottom_left"
DIR_BOTTOMRIGHT = "bottom_right"
MOVEMENT_ATTRS = {
DIR_UP: "ptz_move_up",
DIR_DOWN: "ptz_move_down",
DIR_LEFT: "ptz_move_left",
DIR_RIGHT: "ptz_move_right",
DIR_TOPLEFT: "ptz_move_top_left",
DIR_TOPRIGHT: "ptz_move_top_right",
DIR_BOTTOMLEFT: "ptz_move_bottom_left",
DIR_BOTTOMRIGHT: "ptz_move_bottom_right",
}
DEFAULT_TRAVELTIME = 0.125
ATTR_MOVEMENT = "movement"
ATTR_TRAVELTIME = "travel_time"
ATTR_PRESET_NAME = "preset_name"
PTZ_GOTO_PRESET_COMMAND = "ptz_goto_preset"
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up a Foscam IP Camera."""
LOGGER.warning(
"Loading foscam via platform config is deprecated, it will be automatically imported. Please remove it afterwards."
)
config_new = {
CONF_NAME: config[CONF_NAME],
CONF_HOST: config["ip"],
CONF_PORT: config[CONF_PORT],
CONF_USERNAME: config[CONF_USERNAME],
CONF_PASSWORD: config[CONF_PASSWORD],
CONF_STREAM: "Main",
CONF_RTSP_PORT: config.get(CONF_RTSP_PORT, 554),
}
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config_new
)
)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Add a Foscam IP camera from a config entry."""
platform = entity_platform.current_platform.get()
platform.async_register_entity_service(
SERVICE_PTZ,
{
vol.Required(ATTR_MOVEMENT): vol.In(
[
DIR_UP,
DIR_DOWN,
DIR_LEFT,
DIR_RIGHT,
DIR_TOPLEFT,
DIR_TOPRIGHT,
DIR_BOTTOMLEFT,
DIR_BOTTOMRIGHT,
]
),
vol.Optional(ATTR_TRAVELTIME, default=DEFAULT_TRAVELTIME): cv.small_float,
},
"async_perform_ptz",
)
platform.async_register_entity_service(
SERVICE_PTZ_PRESET,
{
vol.Required(ATTR_PRESET_NAME): cv.string,
},
"async_perform_ptz_preset",
)
camera = FoscamCamera(
config_entry.data[CONF_HOST],
config_entry.data[CONF_PORT],
config_entry.data[CONF_USERNAME],
config_entry.data[CONF_PASSWORD],
verbose=False,
)
async_add_entities([HassFoscamCamera(camera, config_entry)])
class HassFoscamCamera(Camera):
"""An implementation of a Foscam IP camera."""
def __init__(self, camera, config_entry):
"""Initialize a Foscam camera."""
super().__init__()
self._foscam_session = camera
self._name = config_entry.title
self._username = config_entry.data[CONF_USERNAME]
self._password = config_entry.data[CONF_PASSWORD]
self._stream = config_entry.data[CONF_STREAM]
self._unique_id = config_entry.entry_id
self._rtsp_port = config_entry.data[CONF_RTSP_PORT]
self._motion_status = False
async def async_added_to_hass(self):
"""Handle entity addition to hass."""
# Get motion detection status
ret, response = await self.hass.async_add_executor_job(
self._foscam_session.get_motion_detect_config
)
if ret == -3:
LOGGER.info(
"Can't get motion detection status, camera %s configured with non-admin user",
self._name,
)
elif ret != 0:
LOGGER.error(
"Error getting motion detection status of %s: %s", self._name, ret
)
<|fim▁hole|> else:
self._motion_status = response == 1
@property
def unique_id(self):
"""Return the entity unique ID."""
return self._unique_id
def camera_image(self):
"""Return a still image response from the camera."""
# Send the request to snap a picture and return raw jpg data
# Handle exception if host is not reachable or url failed
result, response = self._foscam_session.snap_picture_2()
if result != 0:
return None
return response
@property
def supported_features(self):
"""Return supported features."""
if self._rtsp_port:
return SUPPORT_STREAM
return None
async def stream_source(self):
"""Return the stream source."""
if self._rtsp_port:
return f"rtsp://{self._username}:{self._password}@{self._foscam_session.host}:{self._rtsp_port}/video{self._stream}"
return None
@property
def motion_detection_enabled(self):
"""Camera Motion Detection Status."""
return self._motion_status
def enable_motion_detection(self):
"""Enable motion detection in camera."""
try:
ret = self._foscam_session.enable_motion_detection()
if ret != 0:
if ret == -3:
LOGGER.info(
"Can't set motion detection status, camera %s configured with non-admin user",
self._name,
)
return
self._motion_status = True
except TypeError:
LOGGER.debug(
"Failed enabling motion detection on '%s'. Is it supported by the device?",
self._name,
)
def disable_motion_detection(self):
"""Disable motion detection."""
try:
ret = self._foscam_session.disable_motion_detection()
if ret != 0:
if ret == -3:
LOGGER.info(
"Can't set motion detection status, camera %s configured with non-admin user",
self._name,
)
return
self._motion_status = False
except TypeError:
LOGGER.debug(
"Failed disabling motion detection on '%s'. Is it supported by the device?",
self._name,
)
async def async_perform_ptz(self, movement, travel_time):
"""Perform a PTZ action on the camera."""
LOGGER.debug("PTZ action '%s' on %s", movement, self._name)
movement_function = getattr(self._foscam_session, MOVEMENT_ATTRS[movement])
ret, _ = await self.hass.async_add_executor_job(movement_function)
if ret != 0:
LOGGER.error("Error moving %s '%s': %s", movement, self._name, ret)
return
await asyncio.sleep(travel_time)
ret, _ = await self.hass.async_add_executor_job(
self._foscam_session.ptz_stop_run
)
if ret != 0:
LOGGER.error("Error stopping movement on '%s': %s", self._name, ret)
return
async def async_perform_ptz_preset(self, preset_name):
"""Perform a PTZ preset action on the camera."""
LOGGER.debug("PTZ preset '%s' on %s", preset_name, self._name)
preset_function = getattr(self._foscam_session, PTZ_GOTO_PRESET_COMMAND)
ret, _ = await self.hass.async_add_executor_job(preset_function, preset_name)
if ret != 0:
LOGGER.error(
"Error moving to preset %s on '%s': %s", preset_name, self._name, ret
)
return
@property
def name(self):
"""Return the name of this camera."""
return self._name<|fim▁end|> | |
<|file_name|>issue-30318.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z parse-only<|fim▁hole|>
fn foo() { }
//! Misplaced comment...
//~^ ERROR expected outer doc comment
//~| NOTE inner doc comments like this (starting with `//!` or `/*!`) can only appear before items
fn main() { }<|fim▁end|> | |
<|file_name|>color-panel.cpp<|end_file_name|><|fim▁begin|>// -*- coding: us-ascii-unix -*-
// Copyright 2012 Lukas Kemmer
//
// Licensed under the Apache License, Version 2.0 (the "License"); you
// may not use this file except in compliance with the License. You
// may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
#include "wx/panel.h"
#include "wx/sizer.h"
#include "geo/int-size.hh"
#include "gui/color-panel.hh"
#include "gui/events.hh"
#include "gui/frame-ctrl.hh"
#include "gui/grid-ctrl.hh"
#include "gui/palette-ctrl.hh"
#include "gui/selected-color-ctrl.hh"
#include "gui/zoom-ctrl.hh"
#include "util/dumb-ptr.hh"
#include "util/either.hh"
#include "util-wx/bind-event.hh"
#include "util-wx/fwd-wx.hh"
namespace faint{
class ColorPanelImpl : public wxPanel {
public:
ColorPanelImpl(wxWindow* parent,
const PaintMap& palette,
const pick_paint_f& pickPaint,
const Getter<Color>& getBg,
const Accessor<Grid>& gridAccess,
const std::function<void()>& showGridDialog,
std::unique_ptr<FrameContext>&& frameContext,
StatusInterface& status,
const Art& art)
: wxPanel(parent)
{
// The spacing between controls in this panel
const int spacing = FromDIP(5);
auto sizer = make_wx<wxBoxSizer>(wxHORIZONTAL);
m_selectedColor = std::make_unique<SelectedColorCtrl>(this,
from_DIP(IntSize(50,50), this),
status,
pickPaint,
getBg);
sizer->Add(m_selectedColor->AsWindow(), 0, wxALL, spacing);
m_palette = std::make_unique<PaletteCtrl>(this,
palette,
status,
pickPaint,
getBg);
sizer->Add(m_palette->AsWindow(), 0, wxALL | wxEXPAND | wxSHRINK, spacing);
m_zoom = std::make_unique<ZoomCtrl>(this, status);
sizer->Add(m_zoom->AsWindow(), 0, wxALL, spacing);
m_grid = make_dumb<GridCtrl>(this, art, status, showGridDialog, gridAccess);
sizer->Add(m_grid.get(), 0, wxALL, spacing);
m_frameCtrl = make_dumb<FrameCtrl>(this, std::move(frameContext), status, art);
sizer->Add(m_frameCtrl->AsWindow(), 0, wxALL, spacing);
SetSizer(sizer);
Layout();
// Handle resizing child controls (e.g. color added to palette
// or grid-control expanded).
bind(this, EVT_FAINT_ControlResized,
[&](){
Layout();
Refresh();
});
}
std::unique_ptr<ZoomCtrl> m_zoom;
dumb_ptr<GridCtrl> m_grid;
std::unique_ptr<SelectedColorCtrl> m_selectedColor;
std::unique_ptr<PaletteCtrl> m_palette;
dumb_ptr<FrameCtrl> m_frameCtrl;
};
ColorPanel::ColorPanel(wxWindow* parent,
const PaintMap& palette,
const pick_paint_f& pickPaint,
const Getter<Color>& getBg,
const Accessor<Grid>& gridAccess,
const std::function<void()>& showGridDialog,
std::unique_ptr<FrameContext>&& frameContext,
StatusInterface& status,
const Art& art)
: m_impl(make_dumb<ColorPanelImpl>(parent,
palette,
pickPaint,
getBg,
gridAccess,
showGridDialog,
std::move(frameContext),
status,
art))
{}
void ColorPanel::AddToPalette(const Paint& paint){
m_impl->m_palette->Add(paint);
}
wxWindow* ColorPanel::AsWindow(){
return m_impl.get();
}
void ColorPanel::Freeze(){
m_impl->Freeze();
}
void ColorPanel::Hide(){
Show(false);
}
void ColorPanel::SetPalette(const PaintMap& paintMap){
m_impl->m_palette->SetPalette(paintMap);
}
void ColorPanel::Show(bool show){
m_impl->Show(show);
}
void ColorPanel::Thaw(){
m_impl->Thaw();
}
void ColorPanel::UpdateFrames(){
if (m_impl->m_frameCtrl->Update()){
m_impl->Layout();
}
}
void ColorPanel::UpdateGrid(){
m_impl->m_grid->Update();
}
void ColorPanel::UpdateSelectedColors(const ColorChoice& c){
m_impl->m_selectedColor->UpdateColors(c);
}
void ColorPanel::UpdateZoom(const ZoomLevel& zoom){
m_impl->m_zoom->UpdateZoom(zoom);
}
bool ColorPanel::Visible() const{
return m_impl->IsShown();
}<|fim▁hole|><|fim▁end|> |
} // namespace |
<|file_name|>filesystem.py<|end_file_name|><|fim▁begin|># coding=utf-8
import glob
import os
class File(object):
def __init__(self, path):
self.original = path
self.abspath = os.path.abspath(path)
def __str__(self):
prefix = ''
if self.isfile:
prefix = 'file: '
elif self.isdir:
prefix = 'dir: '
return prefix + self.original
def dir_required(self):
if not self.isdir:
raise ValueError('Only dir is supported for this operation.')
def file_required(self):
if not self.isfile:
raise ValueError('Only file is supported for this operation.')
@staticmethod
def join(path, *paths):
return os.path.join(path, *paths)
@property
def name(self, without_ext=False):
return os.path.basename(self.abspath)
@property
def name_without_ext(self):
basename = os.path.basename(self.abspath)
return os.path.splitext(basename)[0]
@property<|fim▁hole|> def isfile(self):
return os.path.isfile(self.abspath)
@property
def isdir(self):
return os.path.isdir(self.abspath)
@property
def exists(self):
return os.path.exists(self.abspath)
def find(self, pattern='*'):
self.dir_required()
wd = os.path.realpath(self.abspath)
return [File(f) for f in glob.glob(os.path.join(wd, pattern))]
def subdirs(self):
self.dir_required()
return [f for f in self.find() if f.isdir]
def files(self, pattern='*'):
self.dir_required()
return [f for f in self.find(pattern) if f.isfile]
def create_if_not_exists(self):
if not self.exists:
os.makedirs(self.abspath)
def remove(self):
if self.isdir:
os.removedirs(self.abspath)
else:
os.remove(self.abspath)
def write(self, s, mode='w', encoding='utf-8'):
with open(self.abspath, mode=mode, encoding=encoding) as f:
f.write(s)
def writelines(self, lines, mode='w', encoding='utf-8'):
with open(self.abspath, mode=mode, encoding=encoding) as f:
f.writelines(lines)
def append(self, s):
self.write(s, 'a')
def appendlines(self, lines):
self.writelines(lines, 'a')
def readlines(self, mode='r', encoding='utf-8'):
with open(self.abspath, mode, encoding=encoding) as f:
for line in f:
yield line
# read json
# write json
# pickle?
# create tmp
# move to
# iterable<|fim▁end|> | def ext(self):
return os.path.splitext(self.abspath)[1]
@property |
<|file_name|>randomtrees.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import random
"""
Generates random trees
"""
import argparse
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"
def generate_random_item(length=8, chars=alphabet):
item = ""
for i in range(length):
index = random.randint(0, len(chars) - 1)
item += chars[index]
return item
def generate_random_tree_lines(
depth,
items,
length,
chars=alphabet,
current_indentation=''):
lines = []
if depth > 0:
remaining_items_to_add = items
while remaining_items_to_add > 0:
lines.append('{0}{1}'.format(current_indentation, generate_random_item(length, chars)))
remaining_items_to_add -= 1
sub_lines = generate_random_tree_lines(<|fim▁hole|> depth - 1,
items,
length,
chars,
current_indentation + ' ')
for sub_line in sub_lines:
lines.append(sub_line)
return lines
if __name__ == '__main__':
parser = argparse.ArgumentParser('Tree sorting Stress Test')
parser.add_argument('--Depth',
help='The depth of the trees.',
type=int,
default=3)
parser.add_argument('--Items',
help='The number of items for each node of the tree.',
type=int,
default=10)
parser.add_argument('--Length',
help='The length of each item.',
type=int,
default=8)
parser.add_argument('--Alphabet',
help='The alphabet of allowed characters.',
type=str,
default=alphabet)
args = parser.parse_args()
random_tree_lines = generate_random_tree_lines(
args.Depth,
args.Items,
args.Length,
args.Alphabet)
for line in random_tree_lines:
print(line)<|fim▁end|> | |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>var dp = jQuery;
dp.noConflict();
dp(document).ready(function() {
//SMOOTH SCROLL
dp('a[href^="#"]').bind('click.smoothscroll', function(e) {
e.preventDefault();
dp('html,body').animate({
scrollTop: dp(this.hash).offset().top
}, 1200);
});
//SUPER SLIDES
// dp('#home-slide').superslides({
// animation: 'fade', // You can choose either fade or slide
// play: 6000
// });
//ANIMAZE
dp('.animaze').bind('inview', function(event, visible) {
if (visible) {<|fim▁hole|> opacity: 1,
top: '0px'
}, 500);
}
/* REMOVE THIS if you want to repeat the animation after the element not in view
else {
$(this).stop().animate({ opacity: 0 });
$(this).removeAttr('style');
}*/
});
dp('.animaze').stop().animate({
opacity: 0
});
//SERVICES
dp("#dp-service").sudoSlider({
customLink: 'a.servicesLink',
responsive: true,
speed: 350,
prevNext: false,
useCSS: true,
effect: "fadeOutIn",
continuous: true,
updateBefore: true
});
//TEXT ROTATOR
dp(".rotatez").textrotator({
animation: "fade",
separator: ",",
speed: 1700
});
//PORTFOLIO
dp('.portfolioContainer').mixitup({
filterSelector: '.portfolioFilter a',
targetSelector: '.portfolio-item',
effects: ['fade', 'scale']
});
//QUOTE SLIDE
dp("#quote-slider").sudoSlider({
customLink: 'a.quoteLink',
speed: 425,
prevNext: true,
responsive: true,
prevHtml: '<a href="#" class="quote-left-indicator"><i class="icon-arrow-left"></i></a>',
nextHtml: '<a href="#" class="quote-right-indicator"><i class="icon-arrow-right"></i></a>',
useCSS: true,
continuous: true,
effect: "fadeOutIn",
updateBefore: true
});
//MAGNIFIC POPUP
dp('.popup').magnificPopup({
type: 'image'
});
//PARALLAX
dp('.parallaxize').parallax("50%", 0.3);
// CONTACT SLIDER
dp("#contact-slider").sudoSlider({
customLink: 'a.contactLink',
speed: 750,
responsive: true,
prevNext: false,
useCSS: false,
continuous: false,
updateBefore: true,
effect: "fadeOutIn"
});
//Map
dp('#map').gmap3({
map: {
options: {
maxZoom: 15
}
},
marker: {
address: "Haltern am See, Weseler Str. 151", // PUT YOUR ADDRESS HERE
options: {
icon: new google.maps.MarkerImage(
"http://cdn.webiconset.com/map-icons/images/pin6.png",
new google.maps.Size(42, 69, "px", "px")
)
}
}
},
"autofit");
});
dp(window).load(function() {
dp("#lazyload").fadeOut();
});<|fim▁end|> | dp(this).stop().animate({ |
<|file_name|>sync.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals, print_function
"""
Sync's doctype and docfields from txt files to database
perms will get synced only if none exist
"""
import frappe
import os
from frappe.modules.import_file import import_file_by_path
from frappe.modules.patch_handler import block_user
from frappe.utils import update_progress_bar
def sync_all(force=0, verbose=False, reset_permissions=False):
block_user(True)
for app in frappe.get_installed_apps():
sync_for(app, force, verbose=verbose, reset_permissions=reset_permissions)
block_user(False)
frappe.clear_cache()
def sync_for(app_name, force=0, sync_everything = False, verbose=False, reset_permissions=False):
files = []
if app_name == "frappe":
# these need to go first at time of install
for d in (("core", "docfield"), ("core", "docperm"), ("core", "has_role"), ("core", "doctype"),
("core", "user"), ("core", "role"), ("custom", "custom_field"),
("custom", "property_setter"), ("website", "web_form"),
("website", "web_form_field"), ("website", "portal_menu_item")):
files.append(os.path.join(frappe.get_app_path("frappe"), d[0],
"doctype", d[1], d[1] + ".json"))
for module_name in frappe.local.app_modules.get(app_name) or []:
folder = os.path.dirname(frappe.get_module(app_name + "." + module_name).__file__)
get_doc_files(files, folder, force, sync_everything, verbose=verbose)
l = len(files)
if l:
for i, doc_path in enumerate(files):
import_file_by_path(doc_path, force=force, ignore_version=True,
reset_permissions=reset_permissions, for_sync=True)
#print module_name + ' | ' + doctype + ' | ' + name
frappe.db.commit()
# show progress bar
update_progress_bar("Updating DocTypes for {0}".format(app_name), i, l)
print()
def get_doc_files(files, start_path, force=0, sync_everything = False, verbose=False):
"""walk and sync all doctypes and pages"""
# load in sequence - warning for devs
document_types = ['doctype', 'page', 'report', 'print_format',
'website_theme', 'web_form', 'email_alert', 'print_style',
'data_migration_mapping', 'data_migration_plan']
for doctype in document_types:
doctype_path = os.path.join(start_path, doctype)
if os.path.exists(doctype_path):
for docname in os.listdir(doctype_path):
if os.path.isdir(os.path.join(doctype_path, docname)):
doc_path = os.path.join(doctype_path, docname, docname) + ".json"
if os.path.exists(doc_path):<|fim▁hole|><|fim▁end|> | if not doc_path in files:
files.append(doc_path) |
<|file_name|>PeriodDeltaChain.js<|end_file_name|><|fim▁begin|>/** PeriodLister Class */
function PeriodDeltaChain(params) {
this.period_root_id = params.period_root_id;
this.date_format = this.set_or_default(params.date_format, '');
this.set_due_date(params.due_date);
this.period_class = this.set_or_default(params.period_class, 'period');
}
PeriodDeltaChain.prototype.refresh = function() {
var current_time = this.due_date;
var format = this.date_format;
var period_selector = '#' + this.period_root_id + ' .' + this.period_class;
var me = this;
jQuery(period_selector).each(function() {
var from_time_node = this.querySelector('.PeriodDeltaChain_FromTime');
var to_time_node = this.querySelector('.PeriodDeltaChain_ToTime');
var hours_value = this.querySelector('.PeriodDeltaChain_Hours').value;
var from_time = moment(current_time, me.date_format);
var to_time = moment(current_time, me.date_format);
jQuery(from_time_node).html(from_time.format(format));
jQuery(to_time_node).html(to_time.add('hours', hours_value).format(format));
current_time = to_time;
});
if (jQuery(period_selector).length < 2) {
jQuery(period_selector + ' a').hide();
} else {
jQuery(period_selector + ' a').show();<|fim▁hole|> delete this.due_date;
this.due_date = convert_date(new_due_date);
}
PeriodDeltaChain.prototype.set_or_default = function(value, default_value) {
if (typeof value == 'undefined') {
return default_value;
}
return value;
}
/** Converts date string to an actual Date object. */
function convert_date(due_date) {
if (due_date.indexOf(' ') > -1) {
var arr_date = due_date.split(/[ T]/).filter(function (s) {
return s !== '';
});
due_date = arr_date[0] + ' ' + arr_date[1] + ' ' + arr_date[2];
}
return due_date;
}<|fim▁end|> | }
}
PeriodDeltaChain.prototype.set_due_date = function(new_due_date) { |
<|file_name|>0058_memberprofile1.py<|end_file_name|><|fim▁begin|># Generated by Django 2.2.10 on 2021-06-08 14:37
import django.db.models.deletion
from django.db import (
migrations,
models,
)
from accelerator.utils import copy_m2m_fields
def migrate_member_profile_data(apps, schema_editor):
MemberProfile = apps.get_model('accelerator', 'MemberProfile')
MemberProfile1 = apps.get_model('accelerator', 'MemberProfile1')
CoreProfile = apps.get_model('accelerator', 'CoreProfile')
exclude = CoreProfile.objects.all().values_list('user_id', flat=True)
m2m_fields = ['gender_identity', 'interest_categories', 'program_families',
'ethno_racial_identification', 'additional_industries',
'functional_expertise', 'mentoring_specialties']
for profile in MemberProfile.objects.exclude(user_id__in=exclude):
profile_dict = profile.__dict__.copy()
profile_dict.pop("_state")
profile_dict.pop("id")
new_profile = MemberProfile1.objects.create(**profile_dict)
copy_m2m_fields(profile, new_profile, m2m_fields)<|fim▁hole|> dependencies = [
('accelerator', '0057_update_polymorphic_ctype'),
]
operations = [
migrations.CreateModel(
name='MemberProfile1',
fields=[
('coreprofile_ptr',
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to='accelerator.CoreProfile')),
],
options={
'db_table': 'accelerator_memberprofile1',
},
bases=('accelerator.coreprofile',),
),
migrations.RunPython(migrate_member_profile_data,
migrations.RunPython.noop),
]<|fim▁end|> |
class Migration(migrations.Migration): |
<|file_name|>visualize.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Project : LM4paper
# Created by igor on 17-3-14
import os
import sys
import time
import json
<|fim▁hole|>import numpy as np
import tensorflow as tf
from tensorflow.contrib.tensorboard.plugins import projector
from mixlm.lm_train import *
from mixlm.clstmdnn import CLSTMDNN
from bmlm.common import CheckpointLoader
def load_from_checkpoint(saver, logdir):
sess = tf.get_default_session()
ckpt = tf.train.get_checkpoint_state(logdir)
if ckpt and ckpt.model_checkpoint_path:
if os.path.isabs(ckpt.model_checkpoint_path):
saver.restore(sess, ckpt.model_checkpoint_path)
else:
saver.restore(sess, os.path.join(logdir, ckpt.model_checkpoint_path))
return True
return False
class Model():
def __init__(self, logdir):
hps = CLSTMDNN.get_default_hparams().parse(FLAGS.hpconfig)
hps.num_gpus = FLAGS.num_gpus
hps.batch_size = 1
self.word_vocab = Vocabulary.from_file(os.path.join(FLAGS.vocabdir, "1b_word_vocab.txt"))
self.char_vocab = Vocabulary.from_file(os.path.join(FLAGS.vocabdir, "1b_char_vocab.txt"))
with tf.variable_scope("model"):
hps.num_sampled = 0
hps.keep_prob = 1.0
self.model = CLSTMDNN(hps, "eval", "/cpu:0")
if hps.average_params:
print("Averaging parameters for evaluation.")
self.saver = tf.train.Saver(self.model.avg_dict)
else:
self.saver = tf.train.Saver()
# Use only 4 threads for the evaluation
config = tf.ConfigProto(allow_soft_placement=True,
intra_op_parallelism_threads=20,
inter_op_parallelism_threads=1)
self.sess = tf.Session(config=config)
with self.sess.as_default():
if load_from_checkpoint(self.saver, logdir):
global_step = self.model.global_step.eval()
print("Successfully loaded model at step=%s." % global_step)
else:
print("Can't restore model from %s" % logdir)
self.hps = hps
def get_char_embedding(self, char):
id = self.char_vocab.get_id(char)
x = np.zeros(shape=(4, 20, 16))
x[:, :, :] = id
vector = self.sess.run([self.model.char_embedding.outputs],
feed_dict={self.model.char_x: x})
# print(self.model.char_embedding)
return vector[0][0][0]
def get_word_embedding(self, word):
id = self.word_vocab.get_id(word)
x = np.zeros(shape=(4, 20))
x[:, :] = id
vector = self.sess.run([self.model.word_embedding.outputs],
feed_dict={self.model.word_x: x})
return vector[0][0][0]
def visualize_char(model, path="/home/aegis/igor/LM4paper/tests/textchar.txt", ):
chars = open(path, 'r').read().splitlines()
embedding = np.empty(shape=(len(chars), model.hps.emb_char_size), dtype=np.float32)
for i, char in enumerate(chars):
embedding[i] = model.get_char_embedding(char)
print(embedding)
print(embedding.shape)
logdir = "/data/visualog/char/"
metadata = os.path.join(logdir, "metadata.tsv")
with open(metadata, "w") as metadata_file:
for c in chars:
metadata_file.write("%s\n" % c)
tf.reset_default_graph()
with tf.Session() as sess:
X = tf.Variable([0.0], name='embedding')
place = tf.placeholder(tf.float32, shape=embedding.shape)
set_x = tf.assign(X, place, validate_shape=False)
sess.run(tf.global_variables_initializer())
sess.run(set_x, feed_dict={place: embedding})
saver = tf.train.Saver([X])
saver.save(sess, os.path.join(logdir, 'char.ckpt'))
config = projector.ProjectorConfig()
# One can add multiple embeddings.
embedding = config.embeddings.add()
embedding.tensor_name = X.name
# Link this tensor to its metadata file (e.g. labels).
embedding.metadata_path = metadata
# Saves a config file that TensorBoard will read during startup.
projector.visualize_embeddings(tf.summary.FileWriter(logdir), config)
def visualize_word(model, path="/home/aegis/igor/LM4paper/tests/testdata.txt"):
words = open(path, 'r').read().splitlines()
embedding = np.empty(shape=(len(words), model.hps.emb_word_size), dtype=np.float32)
for i, w in enumerate(words):
embedding[i] = model.get_word_embedding(w)
print(embedding)
print(embedding.shape)
logdir = "/data/visualog/word/"
metadata = os.path.join(logdir, "metadata.tsv")
with open(metadata, "w") as metadata_file:
for w in words:
metadata_file.write("%s\n" % w)
tf.reset_default_graph()
with tf.Session() as sess:
X = tf.Variable([0.0], name='embedding')
place = tf.placeholder(tf.float32, shape=embedding.shape)
set_x = tf.assign(X, place, validate_shape=False)
sess.run(tf.global_variables_initializer())
sess.run(set_x, feed_dict={place: embedding})
saver = tf.train.Saver([X])
saver.save(sess, os.path.join(logdir, 'word.ckpt'))
config = projector.ProjectorConfig()
# One can add multiple embeddings.
embedding = config.embeddings.add()
embedding.tensor_name = X.name
# Link this tensor to its metadata file (e.g. labels).
embedding.metadata_path = metadata
# Saves a config file that TensorBoard will read during startup.
projector.visualize_embeddings(tf.summary.FileWriter(logdir), config)
if __name__ == '__main__':
model = Model(logdir="/data/lmlog/train")
# vector = model.get_word_embedding("hello")
# print(vector)
visualize_word(model, path="/home/aegis/igor/LM4paper/tests/testword.txt")<|fim▁end|> | |
<|file_name|>test_open_graph.py<|end_file_name|><|fim▁begin|>import lassie
from .base import LassieBaseTestCase
class LassieOpenGraphTestCase(LassieBaseTestCase):
def test_open_graph_all_properties(self):
url = 'http://lassie.it/open_graph/all_properties.html'
data = lassie.fetch(url)
self.assertEqual(data['url'], url)
self.assertEqual(data['title'], 'Lassie Open Graph All Properies Test')
self.assertEqual(data['description'], 'Just a test template with OG data!')
self.assertEqual(data['locale'], 'en_US')
self.assertEqual(data['site_name'], 'Lassie')
self.assertEqual(len(data['images']), 1)
image = data['images'][0]
self.assertEqual(image['src'], 'http://i.imgur.com/cvoR7zv.jpg')
self.assertEqual(image['width'], 550)<|fim▁hole|>
self.assertEqual(len(data['videos']), 1)
video = data['videos'][0]
self.assertEqual(video['src'], 'http://www.youtube.com/v/dQw4w9WgXcQ?version=3&autohide=1')
self.assertEqual(video['width'], 640)
self.assertEqual(video['height'], 480)
self.assertEqual(video['type'], 'application/x-shockwave-flash')
def test_open_graph_no_og_title_no_og_url(self):
url = 'http://lassie.it/open_graph/no_og_title_no_og_url.html'
data = lassie.fetch(url)
self.assertEqual(data['url'], url)
self.assertEqual(data['title'], 'Lassie Open Graph Test | No og:title, No og:url')
def test_open_graph_og_image_plus_two_body_images(self):
url = 'http://lassie.it/open_graph/og_image_plus_two_body_images.html'
data = lassie.fetch(url)
# Try without passing "all_images", then pass it
self.assertEqual(len(data['images']), 1)
data = lassie.fetch(url, all_images=True)
self.assertEqual(len(data['images']), 3)
image_0 = data['images'][0]
image_1 = data['images'][1]
image_2 = data['images'][2]
self.assertEqual(image_0['type'], 'og:image')
self.assertEqual(image_1['type'], 'body_image')
self.assertEqual(image_2['type'], 'body_image')
def test_open_graph_og_image_relative_url(self):
url = 'http://lassie.it/open_graph/og_image_relative_url.html'
data = lassie.fetch(url)
self.assertEqual(
data['images'][0]['src'], 'http://lassie.it/open_graph/name.jpg')<|fim▁end|> | self.assertEqual(image['height'], 365)
self.assertEqual(image['type'], 'og:image') |
<|file_name|>__main__.py<|end_file_name|><|fim▁begin|>import sys
## Make sure pyqtgraph is importable
p = os.path.dirname(os.path.abspath(__file__))
p = os.path.join(p, '..', '..')
sys.path.insert(0, p)
from pyqtgraph.Qt import QtCore, QtGui
from DockArea import *
from Dock import *
app = QtGui.QApplication([])<|fim▁hole|>from Dock import Dock
d1 = Dock("Dock1", size=(200,200))
d2 = Dock("Dock2", size=(100,100))
d3 = Dock("Dock3", size=(1,1))
d4 = Dock("Dock4", size=(50,50))
d5 = Dock("Dock5", size=(100,100))
d6 = Dock("Dock6", size=(300,300))
area.addDock(d1, 'left')
area.addDock(d2, 'right')
area.addDock(d3, 'bottom')
area.addDock(d4, 'right')
area.addDock(d5, 'left', d1)
area.addDock(d6, 'top', d4)
area.moveDock(d6, 'above', d4)
d3.hideTitleBar()
print "===build complete===="
for d in [d1, d2, d3, d4, d5]:
w = QtGui.QWidget()
l = QtGui.QVBoxLayout()
w.setLayout(l)
btns = []
for i in range(4):
btns.append(QtGui.QPushButton("%s Button %d"%(d.name(), i)))
l.addWidget(btns[-1])
d.w = (w, l, btns)
d.addWidget(w)
import pyqtgraph as pg
p = pg.PlotWidget()
d6.addWidget(p)
print "===widgets added==="
#s = area.saveState()
#print "\n\n-------restore----------\n\n"
#area.restoreState(s)
s = None
def save():
global s
s = area.saveState()
def load():
global s
area.restoreState(s)
#d6.container().setCurrentIndex(0)
#d2.label.setTabPos(40)
#win2 = QtGui.QMainWindow()
#area2 = DockArea()
#win2.setCentralWidget(area2)
#win2.resize(800,800)
win.show()
#win2.show()<|fim▁end|> | win = QtGui.QMainWindow()
area = DockArea()
win.setCentralWidget(area)
win.resize(800,800) |
<|file_name|>tagging.py<|end_file_name|><|fim▁begin|>from django import template
from django.contrib.contenttypes.models import ContentType
from django.template.loader import render_to_string
from ..models import TaggedItem, Tag
register = template.Library()
@register.assignment_tag
def get_tagged_items_for(object):<|fim▁hole|> '''
return TaggedItem.objects.get_for_object(object)
@register.assignment_tag
def get_tags_for(object):
'''retrieve tags which relative with the specific object.
:syntax: {% get_tags_for <object> as <variable> %}
'''
return Tag.objects.get_for_object(object)
@register.assignment_tag
def get_content_type_for(object):
'''retrieve content type object for the specific object.
:syntax: {% get_content_type_for <object> as <variable> %}
'''
return ContentType.objects.get_for_model(object)
@register.simple_tag
def render_generic_tagging_head_tag():
return render_to_string('generic_tagging/head.html')
@register.simple_tag
def render_generic_tagging_component_tag_for(object):
return render_to_string('generic_tagging/component.html', {'object': object})<|fim▁end|> | '''retrieve tagged items which relative with the specific object.
:syntax: {% get_tagged_items_for <object> as <variable> %} |
<|file_name|>power.model.ts<|end_file_name|><|fim▁begin|>export class Power{
public id:string;
public code:string;
public url:string;
public title:string;
public explain:string;
public menuId:string;
public type:string;
public isValid:boolean;
public isChecked:boolean=false;
public operation:Array<string>=new Array<string>();
public operationChecked:Array<string>=new Array<string>();
public operationMap:Array<string>=new Array<string>();
public checkboxList:Array<CheckboxList>=[]
}
export class RolePower extends Power{
public roleId:string;
}
export class NavMenu{
public id:string;
public code:string;
public url:string;
public isValid:boolean=true;
public isLeaf:boolean=false;
public title:string;
public isChecked:boolean=false;
}
<|fim▁hole|> public isUPDATE:boolean;
public isDELETE:boolean;
public isCHECK:boolean;
}
export class RoleInfo{
public id:string;
public roleName:string;
public name:string;
public desc:string;
}
export class Tree{
public id:string;
public pid:string;
public name:string;
public isLeaf:boolean;
public IsSubMenu:boolean;
public subTrees:Array<Tree>=[];
constructor(id:string,pid:string,name:string,isLeaf:boolean){
this.id=id;
this.pid=pid;
this.name=name;
this.isLeaf=isLeaf;
}
}<|fim▁end|> | export class PowerFun{
public isSHOW:boolean;
public isADD:boolean; |
<|file_name|>scroll-top.component.ts<|end_file_name|><|fim▁begin|>import {Component, HostListener, Inject, OnInit} from '@angular/core';
import {DOCUMENT} from "@angular/common";
@Component({
selector: 'scroll-to-top',<|fim▁hole|>
windowScrolled: boolean;
constructor(@Inject(DOCUMENT) private document: Document) {
}
@HostListener("window:scroll", [])
onWindowScroll() {
if (window.pageYOffset || document.documentElement.scrollTop || document.body.scrollTop > 100) {
this.windowScrolled = true;
} else if (this.windowScrolled && window.pageYOffset || document.documentElement.scrollTop || document.body.scrollTop < 10) {
this.windowScrolled = false;
}
}
scrollToTop() {
window.scrollTo(0,0);
}
ngOnInit(): void {
}
}<|fim▁end|> | templateUrl: './scroll-top.component.html',
styleUrls: ['./scroll-top.component.css']
})
export class ScrollTopComponent implements OnInit { |
<|file_name|>required_with.rs<|end_file_name|><|fim▁begin|>use params::{Map, Value};
pub fn validate_required_with(values: &Map,
field: &[&str],
others: &[Vec<&str>])
-> Result<Option<Value>, String> {
let mut required = false;
for other in others {
if required {
break;
}
required = match values.find(other) {
None |
Some(&Value::Null) => false,
Some(&Value::String(ref value)) if value.is_empty() => false,
Some(&Value::Array(ref value)) if value.is_empty() => false,
Some(&Value::Map(ref value)) if value.is_empty() => false,
_ => true,
};
}
if required {
match values.find(field) {
Some(&Value::String(ref value)) if value.is_empty() => {
Err(format!("The {} field is required.",
field.last()
.unwrap()
.to_lowercase()
.replace("_", " ")))
}
Some(&Value::Array(ref value)) if value.is_empty() => {
Err(format!("The {} field is required.",
field.last()
.unwrap()
.to_lowercase()
.replace("_", " ")))<|fim▁hole|> Err(format!("The {} field is required.",
field.last()
.unwrap()
.to_lowercase()
.replace("_", " ")))
}
Some(&Value::Null) |
None => {
Err(format!("The {} field is required.",
field.last()
.unwrap()
.to_lowercase()
.replace("_", " ")))
}
_ => Ok(None),
}
} else {
Ok(None)
}
}<|fim▁end|> | }
Some(&Value::Map(ref value)) if value.is_empty() => { |
<|file_name|>cli.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010 by Dan Jacob.
#
# Some rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * The names of the contributors may not be used to endorse or
# promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import getpass
try:
assert raw_input
except NameError:
raw_input = input
def prompt(name, default=None):
"""
Grab user input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = raw_input(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_pass(name, default=None):
"""
Grabs hidden (password) input from command line.
:param name: prompt text
:param default: default value if no input provided.
"""
prompt = name + (default and ' [%s]' % default or '')
prompt += name.endswith('?') and ' ' or ': '
while True:
rv = getpass.getpass(prompt)
if rv:
return rv
if default is not None:
return default
def prompt_bool(name, default=False, yes_choices=None, no_choices=None):
"""
Grabs user input from command line and converts to boolean
value.
:param name: prompt text
:param default: default value if no input provided.
:param yes_choices: default 'y', 'yes', '1', 'on', 'true', 't'<|fim▁hole|> :param no_choices: default 'n', 'no', '0', 'off', 'false', 'f'
"""
yes_choices = yes_choices or ('y', 'yes', '1', 'on', 'true', 't')
no_choices = no_choices or ('n', 'no', '0', 'off', 'false', 'f')
while True:
rv = prompt(name + '?', default and yes_choices[0] or no_choices[0])
if rv.lower() in yes_choices:
return True
elif rv.lower() in no_choices:
return False
def prompt_choices(name, choices, default=None, no_choice=('none',)):
"""
Grabs user input from command line from set of provided choices.
:param name: prompt text
:param choices: list or tuple of available choices.
:param default: default value if no input provided.
:param no_choice: acceptable list of strings for "null choice"
"""
_choices = []
options = []
for choice in choices:
options.append(choice)
_choices.append(choice)
while True:
rv = prompt(name + '? - (%s)' % ', '.join(options), default)
rv = rv.lower()
if rv in no_choice:
return None
if rv in _choices:
return rv<|fim▁end|> | |
<|file_name|>outlive.rs<|end_file_name|><|fim▁begin|>extern crate rc;
use rc::Rc;<|fim▁hole|> let boxed_fn: Box<Fn() -> i32> = Box::new(|| { i });
//~^ error `i` does not live long enough
Rc::from(boxed_fn)
};
}<|fim▁end|> |
fn main() {
let rc_fn = {
let i = 0; |
<|file_name|>static-vec-repeat-not-constant.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.<|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn foo() -> int { 23 }
static a: [int, ..2] = [foo(), ..2]; //~ ERROR: function calls in constants are limited to struct and enum constructors
fn main() {}<|fim▁end|> | |
<|file_name|>heart.py<|end_file_name|><|fim▁begin|>from dolfin import error, info
class Heart:
def __init__(self, cell_model):
self._cell_model = cell_model
# Mandatory stuff
def mesh(self):
error("Need to prescribe domain")
def conductivities(self):
error("Need to prescribe conducitivites")
# Optional stuff
def applied_current(self):
return None
def end_time(self):
info("Using default end time (T = 1.0)")
return 1.0
def essential_boundaries(self):
return None
<|fim▁hole|> return None
def initial_conditions(self):
return None
def neumann_boundaries(self):
return None
def boundary_current(self):
return None
# Peculiar stuff (for now)
def is_dynamic(self):
return True
# Helper functions
def cell_model(self):
return self._cell_model<|fim▁end|> | def essential_boundary_values(self): |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate orbital;
use orbital::Color;
use window::ConsoleWindow;
mod window;<|fim▁hole|> let mut window = ConsoleWindow::new(-1, -1, 576, 400, "Terminal");
loop {
window.print("# ", Color::rgb(255, 255, 255));
if let Some(line) = window.read() {
window.print(&format!("{}\n", line), Color::rgb(224, 224, 224));
} else {
break;
}
}
}<|fim▁end|> |
#[no_mangle] pub fn main() { |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
calabar.tunnels
This module encapsulates various tunnel processes and their management.
"""
import signal
import os
import sys
import psi.process
TUN_TYPE_STR = 'tunnel_type' # Configuration/dictionary key for the type of tunnel
# Should match the tunnel_type argument to Tunnel __init__ methods
PROC_NOT_RUNNING = [
psi.process.PROC_STATUS_DEAD,
psi.process.PROC_STATUS_ZOMBIE,
psi.process.PROC_STATUS_STOPPED
]
def is_really_running(tunnel):
pt = psi.process.ProcessTable()
try:
proc = pt.get(tunnel.proc.pid, None)
except AttributeError:
# we might not actually have a tunnel.proc or it might poof while we're checking
return False
if proc:
status = proc.status
if not status in PROC_NOT_RUNNING:
return True
return False
class TunnelsAlreadyLoadedException(Exception):
"""Once tunnels are loaded the first time, other methods must be used to
update them"""
pass
class ExecutableNotFound(Exception):
"""
The given tunnel executable wasn't found or isn't executable.
"""
pass
class TunnelTypeDoesNotMatch(Exception):
"""
The given ``tun_type`` doesn't match expected Tunnel.
"""
pass
class TunnelManager():
"""
A class for working with multiple :class:`calabar.tunnels.base.TunnelBase`
tunnels.
Creating this tunnels registers it for SIG_CHLD signals, so only ONE
TunnelManager can exist at a time for purposes of keeping the other tunnels
running.
"""
def __init__(self):
self.tunnels = []
self._register_for_close()
def load_tunnels(self, config):
"""
Load config information to create all required tunnels.
"""
if self.tunnels:
raise TunnelsAlreadyLoadedException("TunnelManager.load_tunnels can't be called after tunnels have already been loaded. Use update_tunnels() instead")
tun_confs_d = get_tunnels(config)
for name, tun_conf_d in tun_confs_d.items():
t = self._load_tunnel(name, tun_conf_d)
self.tunnels.append(t)
def _load_tunnel(self, tunnel_name, tun_conf_d):
"""
Create and return a tunnel instance from a ``tun_conf_d`` dictionary.
``tun_conf_d`` is a dictionary matching the output of a tunnel's
implementation of :mod:`calabar.tunnels.base.TunnelBase:parse_configuration`
method.
"""
from calabar.conf import TUNNELS
tun_type = tun_conf_d[TUN_TYPE_STR]
for tunnel in TUNNELS:
if tunnel.TUNNEL_TYPE == tun_type:
t = tunnel(name=tunnel_name, **tun_conf_d)
return t
raise NotImplementedError()
def start_tunnels(self):
"""
Start all of the configured tunnels and register to keep them running.
"""
for t in self.tunnels:
try:
t.open()
except ExecutableNotFound, e:
print >> sys.stderr, e
def continue_tunnels(self):
"""
Ensure that all of the tunnels are still running.
"""
for t in self.tunnels:
if not t.is_running():
print "TUNNEL [%s] EXITED" % t.name
print "RESTARTING"
try:
t.open()
except ExecutableNotFound, e:
print >> sys.stderr, e
else:
print "[%s]:%s running" % (t.name, t.proc.pid)
def _register_for_close(self):
"""
Register the child tunnel process for a close event. This keeps process
from becoming defunct.
"""
signal.signal(signal.SIGCHLD, self._handle_child_close)
# Register for a termination signal so we can clean up children
signal.signal(signal.SIGTERM, self._handle_terminate)
def _handle_terminate(self, signum, frame):
for t in self.tunnels:
t.close(wait=False)
exit()
def _handle_child_close(self, signum, frame):
"""
Handle a closed child.
Call :mod:os.wait() on the process so that it's not defunct.
"""
assert signum == signal.SIGCHLD
print "CHILD TUNNEL CLOSED"
pid, exit_status = os.wait()
for t in self.tunnels:
# For all of the "closing" tunnels, if they've stopped running, handle the close
if t.closing and not t.is_running():
# Assume the same exit_status
t.handle_closed(exit_status)
TUNNEL_PREFIX = 'tunnel:'
def get_tunnels(config):
"""
Return a dictionary of dictionaries containg tunnel configurations based on the
given SafeConfigParser instance.
An example return value might be::
{
'foo':
{
'tunnel_type': 'vpnc',
'conf_file': '/etc/calabar/foo.conf',
'ips': [10.10.254.1]
},
'bar':
{
'tunnel_type': 'ssh',
'from': '[email protected]:386',
'to': '127.0.0.1:387
}
}
"""
tun_confs_d = {}
for section in config.sections():
if section.startswith(TUNNEL_PREFIX):
tun_conf_d = parse_tunnel(config, section)
tun_name = section[len(TUNNEL_PREFIX):]
tun_confs_d[tun_name] = tun_conf_d
return tun_confs_d
def parse_tunnel(config, section):
"""
Parse the given ``section`` in the given ``config``
:mod:`ConfigParser.ConfigParser` object to generate a tunnel configuration
dictionary using all configured tunnel types and their configuration
parsers.
"""<|fim▁hole|> tun_type = config.get(section, TUN_TYPE_STR)
for tunnel in TUNNELS:
if tun_type == tunnel.TUNNEL_TYPE:
tun_conf_d = tunnel.parse_configuration(config, section)
return tun_conf_d
raise NotImplementedError("The tunnel type [%s] isn't supported" % tun_type)<|fim▁end|> | from calabar.conf import TUNNELS |
<|file_name|>customEvent.js<|end_file_name|><|fim▁begin|>/**
* Custom events to control showing and hiding of tooltip
*
* @attributes
* - `event` {String}
* - `eventOff` {String}
*/
export const checkStatus = function(dataEventOff, e) {
const { show } = this.state;
const { id } = this.props;
const isCapture = this.isCapture(e.currentTarget);
const currentItem = e.currentTarget.getAttribute('currentItem');
if (!isCapture) e.stopPropagation();
if (show && currentItem === 'true') {
if (!dataEventOff) this.hideTooltip(e);
} else {
e.currentTarget.setAttribute('currentItem', 'true');
setUntargetItems(e.currentTarget, this.getTargetArray(id));
this.showTooltip(e);
}
};
const setUntargetItems = function(currentTarget, targetArray) {
for (let i = 0; i < targetArray.length; i++) {
if (currentTarget !== targetArray[i]) {
targetArray[i].setAttribute('currentItem', 'false');
} else {
targetArray[i].setAttribute('currentItem', 'true');
}
}
};
const customListeners = {
id: '9b69f92e-d3fe-498b-b1b4-c5e63a51b0cf',
set(target, event, listener) {
if (this.id in target) {
const map = target[this.id];
map[event] = listener;
} else {
// this is workaround for WeakMap, which is not supported in older browsers, such as IE
Object.defineProperty(target, this.id, {
configurable: true,
value: { [event]: listener }
});
}
},
get(target, event) {
const map = target[this.id];
if (map !== undefined) {<|fim▁hole|> return map[event];
}
}
};
export default function(target) {
target.prototype.isCustomEvent = function(ele) {
const { event } = this.state;
return event || !!ele.getAttribute('data-event');
};
/* Bind listener for custom event */
target.prototype.customBindListener = function(ele) {
const { event, eventOff } = this.state;
const dataEvent = ele.getAttribute('data-event') || event;
const dataEventOff = ele.getAttribute('data-event-off') || eventOff;
dataEvent.split(' ').forEach(event => {
ele.removeEventListener(event, customListeners.get(ele, event));
const customListener = checkStatus.bind(this, dataEventOff);
customListeners.set(ele, event, customListener);
ele.addEventListener(event, customListener, false);
});
if (dataEventOff) {
dataEventOff.split(' ').forEach(event => {
ele.removeEventListener(event, this.hideTooltip);
ele.addEventListener(event, this.hideTooltip, false);
});
}
};
/* Unbind listener for custom event */
target.prototype.customUnbindListener = function(ele) {
const { event, eventOff } = this.state;
const dataEvent = event || ele.getAttribute('data-event');
const dataEventOff = eventOff || ele.getAttribute('data-event-off');
ele.removeEventListener(dataEvent, customListeners.get(ele, event));
if (dataEventOff) ele.removeEventListener(dataEventOff, this.hideTooltip);
};
}<|fim▁end|> | |
<|file_name|>hr_employee.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import models, _
class HrEmployee(models.Model):
_inherit = 'hr.employee'
def action_open_work_entries(self):
self.ensure_one()
return {<|fim▁hole|> 'context': {'default_employee_id': self.id},
'domain': [('employee_id', '=', self.id)],
}<|fim▁end|> | 'type': 'ir.actions.act_window',
'name': _('%s work entries', self.display_name),
'view_mode': 'calendar,gantt,tree,form',
'res_model': 'hr.work.entry', |
<|file_name|>extractSupportQuery.js<|end_file_name|><|fim▁begin|><|fim▁hole|>export default function extractSupportQuery(ruleSet: string): string {
return ruleSet
.split('{')[0]
.slice(9)
.trim()
}<|fim▁end|> | /* @flow */ |
<|file_name|>actual.js<|end_file_name|><|fim▁begin|>function foo() {
var x = 2;
var xxx = 1;
if (xxx) {
console.log(xxx + x);
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>UpdateByExampleWithoutBLOBsMethodGenerator.java<|end_file_name|><|fim▁begin|>/**
* Copyright 2006-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mybatis.generator.codegen.ibatis2.dao.elements;
import java.util.Set;
import java.util.TreeSet;
import org.mybatis.generator.api.dom.java.FullyQualifiedJavaType;
import org.mybatis.generator.api.dom.java.Interface;
import org.mybatis.generator.api.dom.java.JavaVisibility;
import org.mybatis.generator.api.dom.java.Method;
import org.mybatis.generator.api.dom.java.Parameter;
import org.mybatis.generator.api.dom.java.TopLevelClass;
/**
*
* @author Jeff Butler
*
*/
public class UpdateByExampleWithoutBLOBsMethodGenerator extends
AbstractDAOElementGenerator {
public UpdateByExampleWithoutBLOBsMethodGenerator() {
super();
}
@Override
public void addImplementationElements(TopLevelClass topLevelClass) {
Set<FullyQualifiedJavaType> importedTypes = new TreeSet<FullyQualifiedJavaType>();
Method method = getMethodShell(importedTypes);
method
.addBodyLine("UpdateByExampleParms parms = new UpdateByExampleParms(record, example);"); //$NON-NLS-1$
StringBuilder sb = new StringBuilder();
sb.append("int rows = "); //$NON-NLS-1$
sb.append(daoTemplate.getUpdateMethod(introspectedTable
.getIbatis2SqlMapNamespace(), introspectedTable
.getUpdateByExampleStatementId(), "parms")); //$NON-NLS-1$
method.addBodyLine(sb.toString());
method.addBodyLine("return rows;"); //$NON-NLS-1$
if (context.getPlugins()
.clientUpdateByExampleWithoutBLOBsMethodGenerated(method,
topLevelClass, introspectedTable)) {
topLevelClass.addImportedTypes(importedTypes);
topLevelClass.addMethod(method);
}
}
@Override
public void addInterfaceElements(Interface interfaze) {
if (getExampleMethodVisibility() == JavaVisibility.PUBLIC) {
Set<FullyQualifiedJavaType> importedTypes = new TreeSet<FullyQualifiedJavaType>();
Method method = getMethodShell(importedTypes);
if (context.getPlugins()
.clientUpdateByExampleWithoutBLOBsMethodGenerated(method,
interfaze, introspectedTable)) {
interfaze.addImportedTypes(importedTypes);
interfaze.addMethod(method);
}
}
}
private Method getMethodShell(Set<FullyQualifiedJavaType> importedTypes) {
FullyQualifiedJavaType parameterType;
if (introspectedTable.getRules().generateBaseRecordClass()) {
parameterType = new FullyQualifiedJavaType(introspectedTable
.getBaseRecordType());
} else {
parameterType = new FullyQualifiedJavaType(introspectedTable
.getPrimaryKeyType());
}
importedTypes.add(parameterType);
Method method = new Method();
method.setVisibility(getExampleMethodVisibility());<|fim▁hole|> method.addParameter(new Parameter(new FullyQualifiedJavaType(
introspectedTable.getExampleType()), "example")); //$NON-NLS-1$
for (FullyQualifiedJavaType fqjt : daoTemplate.getCheckedExceptions()) {
method.addException(fqjt);
importedTypes.add(fqjt);
}
context.getCommentGenerator().addGeneralMethodComment(method,
introspectedTable);
return method;
}
}<|fim▁end|> | method.setReturnType(FullyQualifiedJavaType.getIntInstance());
method.setName(getDAOMethodNameCalculator()
.getUpdateByExampleWithoutBLOBsMethodName(introspectedTable));
method.addParameter(new Parameter(parameterType, "record")); //$NON-NLS-1$ |
<|file_name|>HTML.java<|end_file_name|><|fim▁begin|>package ms.aurora.browser.wrapper;
import org.apache.log4j.Logger;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.w3c.tidy.Tidy;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.*;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;<|fim▁hole|> * A parser / access layer for HTML pages
* @author Rick
*/
public final class HTML {
private static Logger logger = Logger.getLogger(HTML.class);
private Document dom;
public HTML(Document dom) {
this.dom = dom;
}
public Document getDOM() {
return dom;
}
public List<Node> searchXPath(String expression) {
List<Node> matchingElements = new ArrayList<Node>();
try {
XPathExpression expressionObj = getExpression(expression);
NodeList resultingNodeList = (NodeList) expressionObj.evaluate(dom,
XPathConstants.NODESET);
for (int index = 0; index < resultingNodeList.getLength(); index++) {
matchingElements.add(resultingNodeList.item(index));
}
} catch (XPathExpressionException e) {
logger.error("Incorrect XPath expression", e);
}
return matchingElements;
}
public List<Node> searchXPath(Node base, String expression) {
List<Node> matchingElements = new ArrayList<Node>();
try {
XPathExpression expressionObj = getExpression(expression);
NodeList resultingNodeList = (NodeList) expressionObj.evaluate(base,
XPathConstants.NODESET);
for (int index = 0; index < resultingNodeList.getLength(); index++) {
matchingElements.add(resultingNodeList.item(index));
}
} catch (XPathExpressionException e) {
logger.error("Incorrect XPath expression", e);
}
return matchingElements;
}
private XPathExpression getExpression(String expression) throws XPathExpressionException {
XPath xpath = XPathFactory.newInstance().newXPath();
return xpath.compile(expression);
}
public static HTML fromStream(InputStream stream) {
try {
/*
* UGLY ASS W3C API IS UGLY
*/
Tidy tidy = new Tidy();
tidy.setXHTML(true);
Document dom = tidy.parseDOM(stream, null);
dom.getDocumentElement().normalize();
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
Source xmlSource = new DOMSource(dom);
Result outputTarget = new StreamResult(outputStream);
TransformerFactory.newInstance().newTransformer().transform(xmlSource, outputTarget);
InputStream is = new ByteArrayInputStream(outputStream.toByteArray());
return new HTML(db.parse(is));
} catch (Exception e) {
logger.error("Failed to parse HTML properly", e);
}
return null;
}
}<|fim▁end|> | import java.util.ArrayList;
import java.util.List;
/** |
<|file_name|>RetrospectiveEvent.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package de.beuth.sp.screbo.eventBus.events;
import de.beuth.sp.screbo.database.Retrospective;
/**
* Superclass for all retrospective based events.
*
* @author volker.gronau
*
*/
@SuppressWarnings("serial")
public class RetrospectiveEvent extends ScreboEvent {
protected Retrospective retrospective;
public RetrospectiveEvent(Retrospective retrospective) {
super();
this.retrospective = retrospective;
}
public Retrospective getRetrospective() {
return retrospective;
}
@Override
public String toString() {
return getClass().getSimpleName() + " [retrospective=" + retrospective + "]";
}
}<|fim▁end|> | |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>require("sdk/system/unload").when(function() {
let prefService = require("sdk/preferences/service");
prefService.reset("general.useragent.site_specific_overrides");
prefService.reset("general.useragent.override.youtube.com");
prefService.reset("media.mediasource.enabled");
});<|fim▁hole|>prefService.set('general.useragent.site_specific_overrides', true);
prefService.set("general.useragent.override.youtube.com",
"Mozilla/5.0 (Linux; Android 4.4.2; Nexus 4 Build/KOT49H) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.114 Mobile Safari/537.36"
);
prefService.set("media.mediasource.enabled", true);<|fim▁end|> |
let prefService = require("sdk/preferences/service"); |
<|file_name|>trivial.rs<|end_file_name|><|fim▁begin|>// No-op test
#[test="test_trivial"]
fn test_trivial()
{
bb0: {
ASSIGN retval = ();
} RETURN;
}
// Dead code elimination, should remove any useless code
// - Unused assignments
// - Unused locals
// - Unused drop flags
// - Unused blocks
#[test="dce_exp"]
fn dce()
{
let df0 = false;
let useless: ();
bb0: {
ASSIGN useless = ();
ASSIGN useless = useless; // Never read, so will be removed
ASSIGN retval = ();
//ASSIGN retval = (); // Note: won't be de-duplicated (TODO)
} RETURN;
// Never referenced, will be removed
bb_unused: {
} GOTO bb_unused_indir;
// Indirectly unused, will still be removed
bb_unused_indir: {
} DIVERGE;
// References itself without other reference, will be removed
bb_unused_self: {
} GOTO bb_unused_self;
}
fn dce_exp()
{<|fim▁hole|>}
// Inlining
#[test="inlining_exp"]
fn inlining()
{
bb0: {
} CALL retval = ::""::inlining_target() => bb1 else bb2;
bb1: {
} RETURN;
bb2: {
} DIVERGE;
}
fn inlining_target()
{
bb0: {
ASSIGN retval = ();
} RETURN;
}
fn inlining_exp()
{
bb0: {
ASSIGN retval = ();
} RETURN;
}
// Constant propagation leading to DCE
#[test="constprop_exp"]
fn constprop()
{
let v1: bool;
bb0: {
ASSIGN v1 = true;
} IF v1 => bb1 else bb2;
bb1: {
ASSIGN retval = ();
} RETURN;
bb2: {
} DIVERGE;
}
fn constprop_exp()
{
bb0: {
ASSIGN retval = ();
} RETURN;
}<|fim▁end|> | bb0: {
ASSIGN retval = ();
} RETURN; |
<|file_name|>ch08-arrays--splice.js<|end_file_name|><|fim▁begin|>// ch08-arrays--splice.js
'use strict';
// ADDING OR REMOVING ELEMENTS AT ANY POSITION
// SPLICE
// The first argument is the index you want to start modifying
// The second argument is the number of elements to remove (use 0 if you don’t want to remove any elements)
// The remaining arguments are the elements to be added
const arr = [1, 5, 7];
console.log(`arr: \t${arr}\tlength: \t${arr.length}`);
// arr: 1,5,7 length: 3
const arr1 = arr.splice(1, 0, 2, 3, 4);
console.log(`arr: \t${arr}\tlength: ${arr.length}\tarr1: \t${arr1}\tlength: ${arr1.length}`);
// arr: 1,2,3,4,5,7 length: 6 arr1: [] length: 0
<|fim▁hole|>
const arr2 = arr.splice(5, 0, 6);
console.log(`arr: \t${arr}\tlength: ${arr.length}\tarr2: \t${arr2}\tlength: ${arr2.length}`);
// arr: 1,2,3,4,5,6,7 length: 7 arr2: [] length: 0
const arr3 = arr.splice(1, 2);
console.log(`arr: \t${arr}\tlength: ${arr.length}\tarr3: \t${arr3}\tlength: ${arr3.length}`);
// arr: 1,4,5,6,7 length: 5 arr3: 2,3 length: 2
const arr4 = arr.splice(2, 1, 'a', 'b');
console.log(`arr: \t${arr}\tlength: ${arr.length}\tarr4: \t${arr4}\tlength: ${arr4.length}`);
// arr: 1,4,a,b,6,7 length: 6 arr4: 5 length: 1<|fim▁end|> | |
<|file_name|>test_geoip.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import socket
import unittest
import warnings
from unittest import skipUnless
from django.conf import settings
from django.contrib.gis.geoip import HAS_GEOIP
from django.contrib.gis.geos import HAS_GEOS, GEOSGeometry
from django.test import ignore_warnings
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_text
if HAS_GEOIP:
from django.contrib.gis.geoip import GeoIP, GeoIPException
from django.contrib.gis.geoip.prototypes import GeoIP_lib_version
# Note: Requires use of both the GeoIP country and city datasets.
# The GEOIP_DATA path should be the only setting set (the directory
# should contain links or the actual database files 'GeoIP.dat' and
# 'GeoLiteCity.dat'.
@skipUnless(
HAS_GEOIP and getattr(settings, "GEOIP_PATH", None),
"GeoIP is required along with the GEOIP_PATH setting."
)
@ignore_warnings(category=RemovedInDjango20Warning)
class GeoIPTest(unittest.TestCase):
addr = '162.242.220.127'
fqdn = 'www.djangoproject.com'
def _is_dns_available(self, domain):
# Naive check to see if there is DNS available to use.
# Used to conditionally skip fqdn geoip checks.
# See #25407 for details.
ErrClass = socket.error if six.PY2 else OSError
try:
socket.gethostbyname(domain)
return True
except ErrClass:
return False
def test01_init(self):
"Testing GeoIP initialization."
g1 = GeoIP() # Everything inferred from GeoIP path
path = settings.GEOIP_PATH
g2 = GeoIP(path, 0) # Passing in data path explicitly.
g3 = GeoIP.open(path, 0) # MaxMind Python API syntax.
for g in (g1, g2, g3):
self.assertTrue(g._country)
self.assertTrue(g._city)
# Only passing in the location of one database.
city = os.path.join(path, 'GeoLiteCity.dat')
cntry = os.path.join(path, 'GeoIP.dat')
g4 = GeoIP(city, country='')
self.assertIsNone(g4._country)
g5 = GeoIP(cntry, city='')
self.assertIsNone(g5._city)
# Improper parameters.
bad_params = (23, 'foo', 15.23)
for bad in bad_params:
with self.assertRaises(GeoIPException):
GeoIP(cache=bad)
if isinstance(bad, six.string_types):
e = GeoIPException
else:
e = TypeError
with self.assertRaises(e):
GeoIP(bad, 0)
def test02_bad_query(self):
"Testing GeoIP query parameter checking."
cntry_g = GeoIP(city='<foo>')
# No city database available, these calls should fail.
with self.assertRaises(GeoIPException):
cntry_g.city('google.com')
with self.assertRaises(GeoIPException):
cntry_g.coords('yahoo.com')
# Non-string query should raise TypeError
with self.assertRaises(TypeError):
cntry_g.country_code(17)
with self.assertRaises(TypeError):
cntry_g.country_name(GeoIP)
def test03_country(self):
"Testing GeoIP country querying methods."
g = GeoIP(city='<foo>')
queries = [self.addr]
if self._is_dns_available(self.fqdn):
queries.append(self.fqdn)
for query in queries:
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name):
self.assertEqual('US', func(query), 'Failed for func %s and query %s' % (func, query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query), 'Failed for func %s and query %s' % (func, query))
self.assertEqual({'country_code': 'US', 'country_name': 'United States'},
g.country(query))
@skipUnless(HAS_GEOS, "Geos is required")
def test04_city(self):
"Testing GeoIP city querying methods."
g = GeoIP(country='<foo>')
queries = [self.addr]
if self._is_dns_available(self.fqdn):<|fim▁hole|> self.assertEqual('US', func(query))
for func in (g.country_name, g.country_name_by_addr, g.country_name_by_name):
self.assertEqual('United States', func(query))
self.assertEqual({'country_code': 'US', 'country_name': 'United States'},
g.country(query))
# City information dictionary.
d = g.city(query)
self.assertEqual('USA', d['country_code3'])
self.assertEqual('San Antonio', d['city'])
self.assertEqual('TX', d['region'])
self.assertEqual(210, d['area_code'])
geom = g.geos(query)
self.assertIsInstance(geom, GEOSGeometry)
lon, lat = (-98, 29)
lat_lon = g.lat_lon(query)
lat_lon = (lat_lon[1], lat_lon[0])
for tup in (geom.tuple, g.coords(query), g.lon_lat(query), lat_lon):
self.assertAlmostEqual(lon, tup[0], 0)
self.assertAlmostEqual(lat, tup[1], 0)
def test05_unicode_response(self):
"Testing that GeoIP strings are properly encoded, see #16553."
g = GeoIP()
fqdn = "messe-duesseldorf.com"
if self._is_dns_available(fqdn):
d = g.city(fqdn)
self.assertEqual('Düsseldorf', d['city'])
d = g.country('200.26.205.1')
# Some databases have only unaccented countries
self.assertIn(d['country_name'], ('Curaçao', 'Curacao'))
def test_deprecation_warning(self):
with warnings.catch_warnings(record=True) as warns:
warnings.simplefilter('always')
GeoIP()
self.assertEqual(len(warns), 1)
msg = str(warns[0].message)
self.assertIn('django.contrib.gis.geoip is deprecated', msg)
def test_repr(self):
path = settings.GEOIP_PATH
g = GeoIP(path=path)
country_path = g._country_file
city_path = g._city_file
if GeoIP_lib_version:
expected = '<GeoIP [v%(version)s] _country_file="%(country)s", _city_file="%(city)s">' % {
'version': force_text(GeoIP_lib_version()),
'country': country_path,
'city': city_path,
}
else:
expected = '<GeoIP _country_file="%(country)s", _city_file="%(city)s">' % {
'country': country_path,
'city': city_path,
}
self.assertEqual(repr(g), expected)<|fim▁end|> | queries.append(self.fqdn)
for query in queries:
# Country queries should still work.
for func in (g.country_code, g.country_code_by_addr, g.country_code_by_name): |
<|file_name|>test_multiple-networks.py<|end_file_name|><|fim▁begin|>import pytest
def test_unknown_virtual_host(docker_compose, nginxproxy):
r = nginxproxy.get("http://nginx-proxy/")
assert r.status_code == 503
def test_forwards_to_web1(docker_compose, nginxproxy):
r = nginxproxy.get("http://web1.nginx-proxy.local/port")
assert r.status_code == 200 <|fim▁hole|> r = nginxproxy.get("http://web2.nginx-proxy.local/port")
assert r.status_code == 200
assert r.text == "answer from port 82\n"<|fim▁end|> | assert r.text == "answer from port 81\n"
def test_forwards_to_web2(docker_compose, nginxproxy): |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import re, datetime
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.http import Http404
from django.views.generic import date_based, list_detail
from basic.events.models import *
def event_list(request, page=0):
return list_detail.object_list(
request,
queryset=EventTime.objects.all(),<|fim▁hole|>event_list.__doc__ = list_detail.object_list.__doc__
def event_archive_year(request, year):
return date_based.archive_year(
request,
year=year,
date_field='start',
queryset=EventTime.objects.all(),
make_object_list=True,
allow_future=True,
)
event_archive_year.__doc__ = date_based.archive_year.__doc__
def event_archive_month(request, year, month):
return date_based.archive_month(
request,
year=year,
month=month,
date_field='start',
queryset=EventTime.objects.all(),
allow_future=True,
)
event_archive_month.__doc__ = date_based.archive_month.__doc__
def event_archive_day(request, year, month, day):
return date_based.archive_day(
request,
year=year,
month=month,
day=day,
date_field='start',
queryset=EventTime.objects.all(),
allow_future=True,
)
event_archive_day.__doc__ = date_based.archive_day.__doc__
def event_detail(request, slug, year, month, day, id):
return date_based.object_detail(
request,
year=year,
month=month,
day=day,
date_field='start',
object_id=id,
queryset=EventTime.objects.all(),
allow_future=True,
)
event_detail.__doc__ = date_based.object_detail.__doc__<|fim▁end|> | paginate_by=20,
page=page,
) |
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>from werkzeug.contrib.fixers import ProxyFix<|fim▁hole|><|fim▁end|> | from app import app
app.wsgi_app = ProxyFix(app.wsgi_app) |
<|file_name|>model.js<|end_file_name|><|fim▁begin|>var Models = {};
module.exports = Models;
/**
* Creates a new instance of the Model class
* @constructor
*/
Models.Model = function(id, name, status, type, normalValue, wierdValue) {
/** @type {string} */
this._id = id || '';
/** @type {string} */
this.name = name || '';
/** @type {string} */
this.status = status || 'disable';
/** @type {string} */
this.type = type || 'normal';
/** @type {number} */
<|fim▁hole|>
/** @type {number} */
this.wierdValue = wierdValue || 0;
};
/**
* Creates a new instance of a PublicModel
* @constructor
*/
Models.PublicModel = function(name, value) {
/** @type {string} */
this.name = name || '';
/** @type {number} */
this.value = value || 0;
};<|fim▁end|> | this.normalValue = normalValue || 0;
|
<|file_name|>InactiveSpringBootAppExcludeFilter.java<|end_file_name|><|fim▁begin|>package de.felixroske.jfxsupport.util;
<|fim▁hole|>import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.core.type.classreading.MetadataReader;
import org.springframework.core.type.classreading.MetadataReaderFactory;
import java.io.IOException;
/**
* Created by Krystian Kałużny on 03.07.2017.
*/
class InactiveSpringBootAppExcludeFilter extends ExcludeFilter {
@Override
public boolean exclude(MetadataReader metadataReader, MetadataReaderFactory metadataReaderFactory) throws IOException {
try {
if (isAnnotated(metadataReader, SpringBootApplication.class)) {
return !activeSpringBootClass.getName().equals(metadataReader.getClassMetadata().getClassName());
}
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
return false;
}
}<|fim▁end|> | |
<|file_name|>appdirs.rs<|end_file_name|><|fim▁begin|>use std::path::PathBuf;
use dirs;
/// OS specific path to the application cache directory.
pub fn cache(app_name: &str) -> Option<PathBuf>
{
if app_name.is_empty() {
return None;
}
cache_home().map(|mut dir| { dir.push(app_name); dir })
}
/// OS specific path for caches.
pub fn cache_home() -> Option<PathBuf>
{
#[cfg(unix)]
fn _cache_home() -> Option<PathBuf>
{
dirs::home_dir().map(|mut dir| { dir.push(".cache"); dir })
}
#[cfg(windows)]
fn _cache_home() -> Option<PathBuf>
{
dirs::home_dir().map(|mut dir| {
dir.push("Local Settings");
dir.push("Cache");
dir
})
}
_cache_home()<|fim▁hole|>#[test]
#[cfg(test)]
fn tests()
{
#[cfg(unix)]
fn _tests()
{
match dirs::home_dir() {
Some(mut dir) => {
dir.push(".cache");
dir.push("blub");
let dir_str = format!("{}", dir.display());
let cache_str = format!("{}", cache("blub").unwrap().display());
assert_eq!(dir_str, cache_str);
}
None => assert!(false, "Couldn't get homedir!")
}
}
_tests()
}<|fim▁end|> | }
|
<|file_name|>Single Number.cpp<|end_file_name|><|fim▁begin|>class Solution {
public:
int singleNumber(vector<int>& nums) {
const int n = nums.size();
if (n == 1) return nums[0];
int result = nums[0];
for (int i = 1; i < n; ++i){
result ^= nums[i];
}<|fim▁hole|><|fim▁end|> | return result;
}
}; |
<|file_name|>AbstractFieldElementSetValueReadOnly.java<|end_file_name|><|fim▁begin|>package com.vaadin.tests.elements.abstracttextfield;
import com.vaadin.server.VaadinRequest;
import com.vaadin.tests.components.AbstractTestUI;
import com.vaadin.ui.AbstractField;
import com.vaadin.ui.AbstractMultiSelect;
import com.vaadin.ui.AbstractSingleSelect;
import com.vaadin.ui.CheckBox;
import com.vaadin.ui.CheckBoxGroup;<|fim▁hole|>import com.vaadin.ui.ListSelect;
import com.vaadin.ui.NativeSelect;
import com.vaadin.ui.PasswordField;
import com.vaadin.ui.RadioButtonGroup;
import com.vaadin.ui.RichTextArea;
import com.vaadin.ui.Slider;
import com.vaadin.ui.TextArea;
import com.vaadin.ui.TextField;
import com.vaadin.ui.TwinColSelect;
public class AbstractFieldElementSetValueReadOnly extends AbstractTestUI {
private AbstractField<?>[] fields = { new TextArea(), new TextField(),
new DateField(), new PasswordField(), new CheckBox(),
new RichTextArea(), new Slider() };
private AbstractMultiSelect<?>[] multiSelects = { new ListSelect(),
new CheckBoxGroup(), new TwinColSelect() };
private AbstractSingleSelect<?>[] singleSelects = { new ComboBox(),
new NativeSelect(), new RadioButtonGroup() };
@Override
protected void setup(VaadinRequest request) {
for (AbstractField field : fields) {
field.setReadOnly(true);
addComponent(field);
}
for (AbstractMultiSelect multiSelect : multiSelects) {
multiSelect.setReadOnly(true);
addComponent(multiSelect);
}
for (AbstractSingleSelect singleSelect : singleSelects) {
singleSelect.setReadOnly(true);
addComponent(singleSelect);
}
}
@Override
protected String getTestDescription() {
return "When vaadin element is set ReadOnly, setValue() method should raise an exception";
}
@Override
protected Integer getTicketNumber() {
return 14068;
}
}<|fim▁end|> | import com.vaadin.ui.ComboBox;
import com.vaadin.ui.DateField; |
<|file_name|>testkmeans.py<|end_file_name|><|fim▁begin|>"""Run tests for the kmeans portion of the kmeans module"""
import kmeans.kmeans.kmeans as kmeans
import numpy as np
import random
def test_1dim_distance():
"""See if this contraption works in 1 dimension"""
num1 = random.random()
num2 = random.random()
assert kmeans.ndim_euclidean_distance(num1, num2) == abs(num1-num2)
<|fim▁hole|> rand = random.random
point1 = [rand(), rand(), rand(), rand(), rand(), rand()]
point2 = [point1[0]+1] + point1[1:] # just shift x to the right by 1
assert int(round(kmeans.ndim_euclidean_distance(point1, point2))) == 1
def test_maxiters():
"""ensure the iteration ceiling works"""
# assert kmeans.should_iter([], [], iterations=29) == True
assert kmeans.should_iter([], [], iterations=30) == False
assert kmeans.should_iter([], [], iterations=31) == False
def test_random_centroid_dimensions():
"""ensure the correct number of dimensions"""
dimensions = random.randrange(1, 100)
k = random.randrange(1, 100)
centroids = kmeans.random_centroids(k, dimensions)
for centroid in centroids:
assert len(centroid) == dimensions
def test_iterated_centroid():
"""ensure that the average across each dimension is returned"""
new_centroid = kmeans.iterated_centroid([[1, 1, 1], [2, 2, 2]],\
[[100, 200, 300]], [(0, 0), (1, 0)])
np.testing.assert_allclose(new_centroid, np.array([[1.5, 1.5, 1.5]]),\
rtol=1e-5)<|fim▁end|> | def test_ndim_distance():
"""Test to see if changing val by 1 does what it ought to do
convert to float to integer because floating arithmetic makes testing
analytic functions a mess""" |
<|file_name|>ol3-layerswitcher.js<|end_file_name|><|fim▁begin|>/**
* OpenLayers 3 Layer Switcher Control.
* See [the examples](./examples) for usage.
* @constructor
* @extends {ol.control.Control}
* @param {Object} opt_options Control options, extends olx.control.ControlOptions adding:
* **`tipLabel`** `String` - the button tooltip.
*/
ol.control.LayerSwitcher = function(opt_options) {
var options = opt_options || {};
var tipLabel = options.tipLabel ?
options.tipLabel : 'Legend';
this.mapListeners = [];
this.hiddenClassName = 'ol-unselectable ol-control layer-switcher';
this.shownClassName = this.hiddenClassName + ' shown';
var element = document.createElement('div');
element.className = this.hiddenClassName;
var button = document.createElement('button');
button.setAttribute('title', tipLabel);
element.appendChild(button);
this.panel = document.createElement('div');
this.panel.className = 'panel';
element.appendChild(this.panel);
var this_ = this;
element.onmouseover = function(e) {
this_.showPanel();
};
button.onclick = function(e) {
this_.showPanel();
};
element.onmouseout = function(e) {
e = e || window.event;
if (!element.contains(e.toElement)) {
this_.hidePanel();
}
};
ol.control.Control.call(this, {
element: element,
target: options.target
});
};
ol.inherits(ol.control.LayerSwitcher, ol.control.Control);
/**
* Show the layer panel.
*/
ol.control.LayerSwitcher.prototype.showPanel = function() {
if (this.element.className != this.shownClassName) {
this.element.className = this.shownClassName;
this.renderPanel();
}
};
/**
* Hide the layer panel.
*/
ol.control.LayerSwitcher.prototype.hidePanel = function() {
if (this.element.className != this.hiddenClassName) {
this.element.className = this.hiddenClassName;
}
};
/**
* Re-draw the layer panel to represent the current state of the layers.
*/
ol.control.LayerSwitcher.prototype.renderPanel = function() {
this.ensureTopVisibleBaseLayerShown_();
while(this.panel.firstChild) {
this.panel.removeChild(this.panel.firstChild);
}
var ul = document.createElement('ul');
this.panel.appendChild(ul);
this.renderLayers_(this.getMap(), ul);
};
/**
* Set the map instance the control is associated with.
* @param {ol.Map} map The map instance.
*/
ol.control.LayerSwitcher.prototype.setMap = function(map) {
// Clean up listeners associated with the previous map
for (var i = 0, key; i < this.mapListeners.length; i++) {
this.getMap().unByKey(this.mapListeners[i]);
}
this.mapListeners.length = 0;
// Wire up listeners etc. and store reference to new map
ol.control.Control.prototype.setMap.call(this, map);
if (map) {
var this_ = this;
this.mapListeners.push(map.on('pointerdown', function() {
this_.hidePanel();
}));
this.renderPanel();
}
};
<|fim▁hole|> * @private
*/
ol.control.LayerSwitcher.prototype.ensureTopVisibleBaseLayerShown_ = function() {
var lastVisibleBaseLyr;
ol.control.LayerSwitcher.forEachRecursive(this.getMap(), function(l, idx, a) {
if (l.get('type') === 'base' && l.getVisible()) {
lastVisibleBaseLyr = l;
}
});
if (lastVisibleBaseLyr) this.setVisible_(lastVisibleBaseLyr, true);
};
/**
* Toggle the visible state of a layer.
* Takes care of hiding other layers in the same exclusive group if the layer
* is toggle to visible.
* @private
* @param {ol.layer.Base} The layer whos visibility will be toggled.
*/
ol.control.LayerSwitcher.prototype.setVisible_ = function(lyr, visible) {
var map = this.getMap();
lyr.setVisible(visible);
if (visible && lyr.get('type') === 'base') {
// Hide all other base layers regardless of grouping
ol.control.LayerSwitcher.forEachRecursive(map, function(l, idx, a) {
if (l != lyr && l.get('type') === 'base') {
l.setVisible(false);
}
});
}
};
/**
* Render all layers that are children of a group.
* @private
* @param {ol.layer.Base} lyr Layer to be rendered (should have a title property).
* @param {Number} idx Position in parent group list.
*/
ol.control.LayerSwitcher.prototype.renderLayer_ = function(lyr, idx) {
var this_ = this;
var li = document.createElement('li');
var lyrTitle = lyr.get('title');
var lyrId = lyr.get('title').replace(' ', '-') + '_' + idx;
var label = document.createElement('label');
if (lyr.getLayers) {
li.className = 'group';
label.innerHTML = lyrTitle;
li.appendChild(label);
var ul = document.createElement('ul');
li.appendChild(ul);
this.renderLayers_(lyr, ul);
} else {
var input = document.createElement('input');
if (lyr.get('type') === 'base') {
input.type = 'radio';
input.name = 'base';
} else {
input.type = 'checkbox';
}
input.id = lyrId;
input.checked = lyr.get('visible');
input.onchange = function(e) {
this_.setVisible_(lyr, e.target.checked);
};
li.appendChild(input);
label.htmlFor = lyrId;
label.innerHTML = lyrTitle;
li.appendChild(label);
}
return li;
};
/**
* Render all layers that are children of a group.
* @private
* @param {ol.layer.Group} lyr Group layer whos children will be rendered.
* @param {Element} elm DOM element that children will be appended to.
*/
ol.control.LayerSwitcher.prototype.renderLayers_ = function(lyr, elm) {
var lyrs = lyr.getLayers().getArray().slice().reverse();
for (var i = 0, l; i < lyrs.length; i++) {
l = lyrs[i];
if (l.get('title')) {
elm.appendChild(this.renderLayer_(l, i));
}
}
};
/**
* **Static** Call the supplied function for each layer in the passed layer group
* recursing nested groups.
* @param {ol.layer.Group} lyr The layer group to start iterating from.
* @param {Function} fn Callback which will be called for each `ol.layer.Base`
* found under `lyr`. The signature for `fn` is the same as `ol.Collection#forEach`
*/
ol.control.LayerSwitcher.forEachRecursive = function(lyr, fn) {
lyr.getLayers().forEach(function(lyr, idx, a) {
fn(lyr, idx, a);
if (lyr.getLayers) {
ol.control.LayerSwitcher.forEachRecursive(lyr, fn);
}
});
};<|fim▁end|> | /**
* Ensure only the top-most base layer is visible if more than one is visible. |
<|file_name|>AdvertisingModule.cpp<|end_file_name|><|fim▁begin|>/**
Copyright (c) 2014-2015 "M-Way Solutions GmbH"
FruityMesh - Bluetooth Low Energy mesh protocol [http://mwaysolutions.com/]
This file is part of FruityMesh
FruityMesh is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <adv_packets.h>
#include <AdvertisingController.h>
#include <AdvertisingModule.h>
#include <Logger.h>
#include <Utility.h>
#include <Storage.h>
extern "C"{
#include <app_error.h>
}
//This module allows a number of advertising messages to be configured.
//These will be broadcasted periodically
/*
TODO: Who's responsible for restoring the mesh-advertising packet? This module or the Node?
* */
AdvertisingModule::AdvertisingModule(u8 moduleId, Node* node, ConnectionManager* cm, const char* name, u16 storageSlot)
: Module(moduleId, node, cm, name, storageSlot)
{
//Register callbacks n' stuff
//Save configuration to base class variables<|fim▁hole|> //Start module configuration loading
LoadModuleConfiguration();
}
void AdvertisingModule::ConfigurationLoadedHandler()
{
//Does basic testing on the loaded configuration
Module::ConfigurationLoadedHandler();
//Version migration can be added here
if(configuration.moduleVersion == 1){/* ... */};
//Do additional initialization upon loading the config
//Start the Module...
logt("ADVMOD", "Config set");
}
void AdvertisingModule::TimerEventHandler(u16 passedTime, u32 appTimer)
{
//Do stuff on timer...
}
void AdvertisingModule::ResetToDefaultConfiguration()
{
//Set default configuration values
configuration.moduleId = moduleId;
configuration.moduleActive = true;
configuration.moduleVersion = 1;
memset(configuration.messageData[0].messageData, 0, 31);
advStructureFlags flags;
advStructureName name;
flags.len = SIZEOF_ADV_STRUCTURE_FLAGS-1;
flags.type = BLE_GAP_AD_TYPE_FLAGS;
flags.flags = BLE_GAP_ADV_FLAG_LE_GENERAL_DISC_MODE | BLE_GAP_ADV_FLAG_BR_EDR_NOT_SUPPORTED;
name.len = SIZEOF_ADV_STRUCTURE_NAME-1;
name.type = BLE_GAP_AD_TYPE_COMPLETE_LOCAL_NAME;
name.name[0] = 'A';
name.name[1] = 'B';
configuration.advertisingIntervalMs = 100;
configuration.messageCount = 1;
configuration.messageData[0].messageLength = 31;
memcpy(configuration.messageData[0].messageData, &flags, SIZEOF_ADV_STRUCTURE_FLAGS);
memcpy(configuration.messageData[0].messageData+SIZEOF_ADV_STRUCTURE_FLAGS, &name, SIZEOF_ADV_STRUCTURE_NAME);
}
void AdvertisingModule::NodeStateChangedHandler(discoveryState newState)
{
if(newState == discoveryState::BACK_OFF || newState == discoveryState::DISCOVERY_OFF){
//Activate our advertising
//This is a small packet for debugging a node's state
if(Config->advertiseDebugPackets){
u8 buffer[31];
memset(buffer, 0, 31);
advStructureFlags* flags = (advStructureFlags*)buffer;
flags->len = SIZEOF_ADV_STRUCTURE_FLAGS-1;
flags->type = BLE_GAP_AD_TYPE_FLAGS;
flags->flags = BLE_GAP_ADV_FLAG_LE_GENERAL_DISC_MODE | BLE_GAP_ADV_FLAG_BR_EDR_NOT_SUPPORTED;
advStructureManufacturer* manufacturer = (advStructureManufacturer*)(buffer+3);
manufacturer->len = 26;
manufacturer->type = BLE_GAP_AD_TYPE_MANUFACTURER_SPECIFIC_DATA;
manufacturer->companyIdentifier = 0x24D;
AdvertisingModuleDebugMessage* msg = (AdvertisingModuleDebugMessage*)(buffer+7);
msg->debugPacketIdentifier = 0xDE;
msg->senderId = node->persistentConfig.nodeId;
msg->connLossCounter = node->persistentConfig.connectionLossCounter;
for(int i=0; i<Config->meshMaxConnections; i++){
if(cm->connections[i]->handshakeDone()){
msg->partners[i] = cm->connections[i]->partnerId;
msg->rssiVals[i] = cm->connections[i]->rssiAverage;
msg->droppedVals[i] = cm->connections[i]->droppedPackets;
} else {
msg->partners[i] = 0;
msg->rssiVals[i] = 0;
msg->droppedVals[i] = 0;
}
}
char strbuffer[200];
Logger::getInstance().convertBufferToHexString(buffer, 31, strbuffer, 200);
logt("ADVMOD", "ADV set to %s", strbuffer);
u32 err = sd_ble_gap_adv_data_set(buffer, 31, NULL, 0);
if(err != NRF_SUCCESS){
logt("ADVMOD", "Debug Packet corrupt");
}
AdvertisingController::SetAdvertisingState(advState::ADV_STATE_HIGH);
}
else if(configuration.messageCount > 0){
u32 err = sd_ble_gap_adv_data_set(configuration.messageData[0].messageData, configuration.messageData[0].messageLength, NULL, 0);
if(err != NRF_SUCCESS){
logt("ADVMOD", "Adv msg corrupt");
}
char buffer[200];
Logger::getInstance().convertBufferToHexString((u8*)configuration.messageData[0].messageData, 31, buffer, 200);
logt("ADVMOD", "ADV set to %s", buffer);
if(configuration.messageData[0].forceNonConnectable)
{
AdvertisingController::SetNonConnectable();
}
//Now, start advertising
//TODO: Use advertising parameters from config to advertise
AdvertisingController::SetAdvertisingState(advState::ADV_STATE_HIGH);
}
} else if (newState == discoveryState::DISCOVERY) {
//Do not trigger custom advertisings anymore, reset to node's advertising
node->UpdateJoinMePacket();
}
}
bool AdvertisingModule::TerminalCommandHandler(string commandName, vector<string> commandArgs)
{
if(commandArgs.size() >= 2 && commandArgs[1] == moduleName)
{
if(commandName == "action")
{
if(commandArgs[1] != moduleName) return false;
if(commandArgs[2] == "broadcast_debug")
{
Config->advertiseDebugPackets = !Config->advertiseDebugPackets;
logt("ADVMOD", "Debug Packets are now set to %u", Config->advertiseDebugPackets);
return true;
}
}
}
//Must be called to allow the module to get and set the config
return Module::TerminalCommandHandler(commandName, commandArgs);
}<|fim▁end|> | //sizeof configuration must be a multiple of 4 bytes
configurationPointer = &configuration;
configurationLength = sizeof(AdvertisingModuleConfiguration);
|
<|file_name|>toaster-container.component.spec.ts<|end_file_name|><|fim▁begin|>import { Component, NgModule } from '@angular/core';
import { TestBed } from '@angular/core/testing';
import { ComponentFixture } from '@angular/core/testing';
import { Toast, ToastType } from './toast';
import { ToasterService } from './toaster.service';
import { ToasterContainerComponent } from './toaster-container.component';
import { ToasterConfig } from './toaster-config';
import { BodyOutputType } from './bodyOutputType';
import { ToasterModule } from './toaster.module';
import { BrowserModule } from '@angular/platform-browser';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
type ExtendedToastType = ('customtype' | '') & ToastType;
// Mock component for bootstrapping <toaster-container></toaster-container>
@Component({
selector: 'test-component',
template: '<toaster-container [toasterconfig]="toasterconfig"></toaster-container>',
})
export class TestComponent {
toasterService: ToasterService;
public toasterconfig: ToasterConfig =
new ToasterConfig({ showCloseButton: true, tapToDismiss: false, timeout: 0, toastContainerId: 1 });
public toasterconfig2: ToasterConfig =
new ToasterConfig({ showCloseButton: true, tapToDismiss: false, timeout: 0, toastContainerId: 2 });
constructor(toasterService: ToasterService) {
this.toasterService = toasterService;
}
}
@NgModule({
imports: [ToasterModule.forRoot(), BrowserAnimationsModule],
declarations: [TestComponent]
})
export class TestComponentModule { }
// Mock component for testing bodyOutputType Component rendering
@Component({
selector: 'test-dynamic-component',
template: `<div>loaded via component</div>`
})
export class TestDynamicComponent { }
@NgModule({
imports: [ToasterModule.forChild()],
bootstrap: [TestDynamicComponent],
declarations: [TestDynamicComponent]
})
export class TestDynamicComponentModule { }
@Component({
selector: 'bound-dynamic-component',
template: '<div>{{someValue}} loaded via component<button (click)="clickHandler()" id="click"></button></div>'
})
export class TestBoundDynamicComponent {
someValue = 'Some value';
public toast: Toast = null;
clickHandler() {
this.toast.title = 'updated title';
}
}
@NgModule({
bootstrap: [TestBoundDynamicComponent],
declarations: [TestBoundDynamicComponent]
})
export class TestBoundDynamicComponentModule { }
describe('ToasterContainerComponent with sync ToasterService', () => {
let toasterService: ToasterService,
toasterContainer: ToasterContainerComponent,
fixture: ComponentFixture<TestComponent>;
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [TestComponent],
imports: [ToasterModule.forRoot(), BrowserModule, BrowserAnimationsModule]
}).compileComponents();
fixture = TestBed.createComponent<TestComponent>(TestComponent);
toasterContainer = fixture.debugElement.children[0].componentInstance;
toasterService = fixture.componentInstance.toasterService;
return fixture;
});
it('should pop toast synchronously', () => {
toasterContainer.ngOnInit();
toasterService.pop('success', 'test', 'test');
expect(toasterContainer.toasts.length).toBe(1);
});
it('should pop toast asynchronously', async (done) => {
// create test-specific fixture to protect against
// container being overwritten by other tests since this
// test now executes fully asynchronously
const fixtureInstance = TestBed.createComponent<TestComponent>(TestComponent);
const toasterContainerInstance = fixtureInstance.debugElement.children[0].componentInstance;
const toasterServiceInstance = fixtureInstance.componentInstance.toasterService;
fixtureInstance.detectChanges();
// this will initialize the component.
// a call to ngOnInit is redundant.
await fixtureInstance.whenStable();
toasterServiceInstance.popAsync('success', 'test', 'test')
.subscribe(toast => {
expect(toast).toBeDefined();
expect(toast.type).toBe('success');
expect(toasterContainerInstance.toasts.length).toBe(1);
expect(toast.toastId).toBe(toasterContainerInstance.toasts[0].toastId);
done()
});
});
it('should pop toast asynchronously multiple times', async (done) => {
// create test-specific fixture to protect against
// container being overwritten by other tests since this
// test now executes fully asynchronously
const fixtureInstance = TestBed.createComponent<TestComponent>(TestComponent);
const toasterContainerInstance = fixtureInstance.debugElement.children[0].componentInstance;
const toasterServiceInstance = fixtureInstance.componentInstance.toasterService;
fixtureInstance.detectChanges();
// this will initialize the component.
// a call to ngOnInit is redundant.
await fixtureInstance.whenStable();
toasterServiceInstance.popAsync('success', 'test', 'test');
toasterServiceInstance.popAsync('success', 'test', 'test');
toasterServiceInstance.popAsync('success', 'test', 'test')
.subscribe(toast => {
expect(toast).toBeDefined();
expect(toast.type).toBe('success');
let locatedToast;
for (let i = 0; i < toasterContainerInstance.toasts.length; i++) {
if (toasterContainerInstance.toasts[i].toastId === toast.toastId) {
locatedToast = toasterContainerInstance.toasts[i];
break;
}
}
expect(locatedToast).toBeDefined();
done();
});
});
it('should retrieve toast instance from pop observer', () => {
toasterContainer.ngOnInit();
let toast: Toast = {
type: 'success',
title: 'observer toast'
};
expect(toasterContainer.toasts.length).toBe(0);
toast = toasterService.pop(toast);
expect(toast).toBeDefined();
expect(toast.type).toBe(toast.type);
expect(toast.title).toBe(toast.title);
expect(toast.toastId).toBe(toasterContainer.toasts[0].toastId);
});
it('should clear toast synchronously', () => {
toasterContainer.ngOnInit();
toasterService.pop('success', 'test', 'test');
expect(toasterContainer.toasts.length).toBe(1);
toasterService.clear();
expect(toasterContainer.toasts.length).toBe(0);
});
it('should throw exception if toast is popped without any subscribers being registered', () => {
let hasError = false;
try {
toasterService.pop('success', 'test', 'test');
} catch (e) {
hasError = true;
expect(e.message).toBe('No Toaster Containers have been initialized to receive toasts.');
}
expect(toasterContainer.toasts.length).toBe(0);
expect(hasError).toBe(true);
});
it('should remove subscribers when ngOnDestroy is called', () => {
toasterContainer.ngOnInit();
toasterService.pop('success', 'test', 'test');
expect(toasterContainer.toasts.length).toBe(1);
toasterContainer.ngOnDestroy();
toasterService.pop('success', 'test 2', 'test 2');
toasterService.clear();
expect(toasterContainer.toasts.length).toBe(1);
});
it('will not attempt to remove subscribers when ngOnDestroy is called if ngOnInit is not called', () => {
spyOn(toasterContainer, 'ngOnInit').and.callThrough();
spyOn(toasterContainer, 'ngOnDestroy').and.callThrough();
expect(toasterContainer.ngOnInit).not.toHaveBeenCalled();
toasterContainer.ngOnDestroy();
expect(toasterContainer.ngOnDestroy).toHaveBeenCalled();
});
it('addToast should not add toast if toasterContainerId is provided and it does not match', () => {
toasterContainer.toasterconfig = new ToasterConfig({ toastContainerId: 2 })
const toast: Toast = { type: 'success', toastContainerId: 1 };
toasterContainer.ngOnInit();
toasterService.pop(toast);
expect(toasterContainer.toasts.length).toBe(0);
});
it('addToast should use defaultTypeClass if type is empty string', () => {
toasterContainer.ngOnInit();
toasterService.pop(<ExtendedToastType>'', '', '');
expect(toasterContainer.toasterconfig.defaultToastType).toBe('info');
expect(toasterContainer.toasts.length).toBe(1);
expect(toasterContainer.toasts[0].type).toBe('info');
});
it('addToast should not add toast if preventDuplicates and the same toastId exists', () => {
toasterContainer.toasterconfig = new ToasterConfig({ preventDuplicates: true, toastContainerId: 30 });
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info', toastContainerId: 30 };
toasterService.pop(toast);
expect(toasterContainer.toasts.length).toBe(1);
toasterService.pop(toast);
expect(toasterContainer.toasts.length).toBe(1);
});
it('addToast should not add toast if preventDuplicates and toastId does not exist and the same body exists', () => {
toasterContainer.toasterconfig = new ToasterConfig({ preventDuplicates: true });
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info', body: 'test' };
const toast2: Toast = { type: 'info', body: 'test2' };
const toast3: Toast = { type: 'info', body: 'test2' };
toasterService.pop(toast);
expect(toasterContainer.toasts.length).toBe(1);
toasterService.pop(toast2);
expect(toasterContainer.toasts.length).toBe(2);
toasterService.pop(toast3);
expect(toasterContainer.toasts.length).toBe(2);
});
it('addToast uses toast.showCloseButton if defined', () => {
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info', showCloseButton: true };
toasterService.pop(toast);
fixture.detectChanges();
expect(toasterContainer.toasts[0].showCloseButton).toBe(true);
});
it('addToast uses toasterconfig.showCloseButton object if defined and toast.showCloseButton is undefined', () => {
toasterContainer.toasterconfig = new ToasterConfig({ showCloseButton: { 'info': true } });
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info' };
const toast2: Toast = { type: 'success' };
toasterService.pop(toast);
toasterService.pop(toast2);
const infoToast = toasterContainer.toasts.filter(t => t.type === 'info')[0];
const successToast = toasterContainer.toasts.filter(t => t.type === 'success')[0];
expect(infoToast.showCloseButton).toBe(true);
expect(successToast.showCloseButton).toBeUndefined();
});
it('addToast uses toast.showCloseButton if defined as an empty string', () => {
toasterContainer.toasterconfig = new ToasterConfig({ showCloseButton: false });
(<any>toasterContainer.toasterconfig.showCloseButton) = '';
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info' };
toasterService.pop(toast);
expect(toasterContainer.toasts[0].showCloseButton).toBeUndefined();
});
it('addToast removes toast from bottom if toasterconfig.newestOnTop and limit exceeded', () => {
toasterContainer.toasterconfig = new ToasterConfig({ limit: 2 });
toasterContainer.ngOnInit();
expect(toasterContainer.toasterconfig.newestOnTop).toBe(true);
expect(toasterContainer.toasterconfig.limit).toBe(2);
const toast1: Toast = { type: 'info', title: '1', body: '1' };
const toast2: Toast = { type: 'info', title: '2', body: '2' };
const toast3: Toast = { type: 'info', title: '3', body: '3' };
const toast4: Toast = { type: 'info', title: '4', body: '4' };
toasterService.pop(toast1);
toasterService.pop(toast2);
toasterService.pop(toast3);
toasterService.pop(toast4);
expect(toasterContainer.toasts.length).toBe(2);
expect(toasterContainer.toasts[0].title).toBe('4');
expect(toasterContainer.toasts[0]).toBe(toast4);
});
it('addToast will not populate body with TrustedHtml if body is null', () => {
toasterContainer.toasterconfig = new ToasterConfig();
toasterContainer.ngOnInit();
const testSvg = '<svg width="400" height="110"><rect width="300" height="100" style="fill:rgb(0,0,255);stroke-width:3;stroke:rgb(0,0,0)"></rect></svg>';
const toast1: Toast = {
type: 'info',
title: '1',
body: testSvg,
bodyOutputType: BodyOutputType.TrustedHtml
};
toasterService.pop(toast1);
fixture.detectChanges();
const closeButtonEle = fixture.nativeElement.querySelector('.toast-message');
expect(closeButtonEle.innerHTML).toContain(testSvg);
});
it('addToast will not populate safeCloseHtml if closeHtml is null', () => {
toasterContainer.toasterconfig = new ToasterConfig();
toasterContainer.toasterconfig.closeHtml = null;
toasterContainer.ngOnInit();
const toast1: Toast = { type: 'info', title: '1', body: '1', showCloseButton: true };
toasterService.pop(toast1);
fixture.detectChanges();
const closeButtonEle = fixture.nativeElement.querySelector('.toast-close-button');
expect(closeButtonEle.innerHTML).toBe('');
});
it('addToast will populate safeCloseHtml with default html', () => {
toasterContainer.toasterconfig = new ToasterConfig();
toasterContainer.ngOnInit();
const toast1: Toast = { type: 'info', title: '1', body: '1', showCloseButton: true };
toasterService.pop(toast1);
fixture.detectChanges();
const closeButtonEle = fixture.nativeElement.querySelector('.toast-close-button');
expect(closeButtonEle.innerHTML).toBe('<span>×</span>');
});
it('addToast removes toast from top if !toasterconfig.newestOnTop and limit exceeded', () => {
toasterContainer.toasterconfig = new ToasterConfig({ newestOnTop: false, limit: 2 });
toasterContainer.ngOnInit();
expect(toasterContainer.toasterconfig.newestOnTop).toBe(false);
expect(toasterContainer.toasterconfig.limit).toBe(2);
const toast1: Toast = { type: 'info', title: '1', body: '1' };
const toast2: Toast = { type: 'info', title: '2', body: '2' };
const toast3: Toast = { type: 'info', title: '3', body: '3' };
const toast4: Toast = { type: 'info', title: '4', body: '4' };
toasterService.pop(toast1);
toasterService.pop(toast2);
toasterService.pop(toast3);
toasterService.pop(toast4);
expect(toasterContainer.toasts.length).toBe(2);
expect(toasterContainer.toasts[0].title).toBe('3');
expect(toasterContainer.toasts[0]).toBe(toast3);
});
it('addToast calls onShowCallback if it exists', () => {
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info', title: 'default', onShowCallback: (toaster) => toaster.title = 'updated' };
toasterService.pop(toast);
expect(toasterContainer.toasts[0].title).toBe('updated');
});
it('removeToast will not remove the toast if it is not found in the toasters array', () => {
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info' };
toasterService.pop(toast);
expect(toasterContainer.toasts.length).toBe(1);
toasterService.clear('faketoastid');
expect(toasterContainer.toasts.length).toBe(1);
});
it('removeToast calls onHideCallback if it exists', () => {
toasterContainer.ngOnInit();
let status = 'not updated';
const toast: Toast = { type: 'info', title: 'default', onHideCallback: (toastInstance) => status = 'updated' };
toasterService.pop(toast);
toasterService.clear(toast.toastId);
expect(status).toBe('updated');
});
it('removeToast notifies the removeToast subscribers', (done) => {
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info', title: 'default' };
toasterService.pop(toast);
toasterService.removeToast.subscribe(t => {
expect(t.toastId).toEqual(toast.toastId);
expect(t.toastContainerId).toEqual(toast.toastContainerId);
done();
});
toasterService.clear(toast.toastId);
});
it('clearToasts will clear toasts from all containers if toastContainerId is undefined', () => {
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info' };
toasterService.pop(toast);
expect(toasterContainer.toasts.length).toBe(1);
toasterService.clear(null, undefined);
expect(toasterContainer.toasts.length).toBe(0);
});
it('clearToasts will clear toasts from specified container if toastContainerId is number', () => {
toasterContainer.toasterconfig = new ToasterConfig({ toastContainerId: 1 });
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info', toastContainerId: 1 };
toasterService.pop(toast);
expect(toasterContainer.toasts.length).toBe(1);
toasterService.clear(null, 1);
expect(toasterContainer.toasts.length).toBe(0);
});
it('createGuid should create unique Guids', () => {
toasterContainer.toasterconfig = new ToasterConfig({ toastContainerId: 1 });
toasterContainer.ngOnInit();
let toastIds = [];
for (let i = 0; i < 10000; i++) {
const toast = toasterService.pop('success', 'toast');
toastIds.push(toast.toastId);
toasterService.clear();
}
let valuesSoFar = Object.create(null);
let dupFound = false;
for (let i = 0; i < toastIds.length; ++i) {
const value = toastIds[i];
if (value in valuesSoFar) {
dupFound = true;
break;
}
valuesSoFar[value] = true;
}
expect(dupFound).toBe(false);
toastIds = null;
valuesSoFar = null;
});
it('toastIdOrDefault should return empty string if toast.toastId is null', () => {
let toast: Toast = { type: 'info', toastId: null };
const toastId = toasterContainer['toastIdOrDefault'](toast);
expect(toastId).toBe('');
});
it('toastIdOrDefault should return empty string if toast.toastId is undefined', () => {
let toast: Toast = { type: 'info', toastId: undefined };
const toastId = toasterContainer['toastIdOrDefault'](toast);
expect(toastId).toBe('');
});
it('toastIdOrDefault should return empty string if toast.toastId is empty string', () => {
let toast: Toast = { type: 'info', toastId: '' };
const toastId = toasterContainer['toastIdOrDefault'](toast);
expect(toastId).toBe('');
});
it('should use toast.toastId parameter if passed', () => {
toasterContainer.ngOnInit();
let toast: Toast = { type: 'success', title: '', body: '', toastId: '12345' };
toasterService.pop(toast);
expect(toasterContainer.toasts.length).toBe(1);
expect(toasterContainer.toasts[0].toastId).toBe('12345');
});
});
describe('ToasterContainerComponent with sync ToasterService', () => {
let toasterService: ToasterService,
toasterContainer: ToasterContainerComponent,
fixture: ComponentFixture<TestComponent>;
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [TestComponent],
imports: [ToasterModule.forRoot(), BrowserModule, BrowserAnimationsModule]
});
fixture = TestBed.createComponent<TestComponent>(TestComponent);
toasterContainer = fixture.debugElement.children[0].componentInstance;
toasterService = fixture.componentInstance.toasterService;
return fixture;
});
it('addToast does not populate data if not not defined', () => {
toasterContainer.toasterconfig = new ToasterConfig();
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info' };
toasterService.pop(toast);
expect(toasterContainer.toasts[0].data).toBeUndefined();
});
it('addToast sets data if type number', () => {
toasterContainer.toasterconfig = new ToasterConfig();
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info', data: 1 };
toasterService.pop(toast);
expect(toasterContainer.toasts[0].data).toBe(1);
});
it('clearToasts will not clear toasts from specified container if toastContainerId does not match', () => {
toasterContainer.toasterconfig = new ToasterConfig({ toastContainerId: 1 });
toasterContainer.ngOnInit();
const toast: Toast = { type: 'info', toastContainerId: 1 };
<|fim▁hole|> toasterService.clear(null, 2);
expect(toasterContainer.toasts.length).toBe(1);
});
});
describe('ToasterContainerComponent when included as a component', () => {
let fixture: ComponentFixture<TestComponent>;
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [TestComponent],
imports: [ToasterModule.forRoot(), TestDynamicComponentModule, BrowserAnimationsModule]
});
fixture = TestBed.createComponent<TestComponent>(TestComponent);
});
it('should use the bound toasterconfig object if provided', () => {
fixture.detectChanges();
expect(fixture.componentInstance).toBeDefined();
const container = fixture.debugElement.children[0].componentInstance;
expect(container).toBeDefined();
expect(container.toasterconfig).toBeDefined();
expect(container.toasterconfig.showCloseButton).toBe(true);
expect(container.toasterconfig.tapToDismiss).toBe(false);
expect(container.toasterconfig.timeout).toBe(0);
});
it('should invoke the click event when a toast is clicked but not remove toast if !tapToDismiss', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
expect(container.toasterconfig.tapToDismiss).toBe(false);
fixture.componentInstance.toasterService.pop('success', 'test', 'test');
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const toast = fixture.nativeElement.querySelector('div.toast');
toast.click();
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
});
it('should invoke the click event when a toast is clicked and remove toast if tapToDismiss', () => {
fixture.componentInstance.toasterconfig.tapToDismiss = true;
fixture.detectChanges();
expect(fixture.componentInstance).toBeDefined();
const container = fixture.debugElement.children[0].componentInstance;
expect(container.toasterconfig.tapToDismiss).toBe(true);
fixture.componentInstance.toasterService.pop('success', 'test', 'test');
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const toast = fixture.nativeElement.querySelector('div.toast');
toast.click();
fixture.detectChanges();
expect(container.toasts.length).toBe(0);
});
it('should invoke the click event when the close button is clicked even if !tapToDismiss', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
expect(container.toasterconfig.tapToDismiss).toBe(false);
fixture.componentInstance.toasterService.pop('success', 'test', 'test');
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const toastButton = fixture.nativeElement.querySelector('.toast-close-button');
toastButton.click();
fixture.detectChanges();
expect(container.toasts.length).toBe(0);
});
it('should call onClickHandler if it exists on toast', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
onClickCallback: () => {
return true;
},
tapToDismiss: false
};
spyOn(toast, 'onClickCallback');
fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const toastButton = fixture.nativeElement.querySelector('.toast');
toastButton.click();
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
expect(toast.onClickCallback).toHaveBeenCalled();
});
it('should call onClickHandler if it exists on toast before closing toast', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
onClickCallback: () => {
return true;
},
tapToDismiss: false
};
spyOn(toast, 'onClickCallback');
fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const toastButton = fixture.nativeElement.querySelector('.toast-close-button');
toastButton.click();
fixture.detectChanges();
expect(container.toasts.length).toBe(0);
expect(toast.onClickCallback).toHaveBeenCalled();
});
it('should not call onClickHandler if it does not exist on toast', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
tapToDismiss: false
};
fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const toastButton = fixture.nativeElement.querySelector('.toast');
toastButton.click();
fixture.detectChanges();
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
expect(toast.onClickCallback).not.toBeNull();
});
it('addToast should render component if it exists', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
title: 'Yay',
body: TestDynamicComponent,
bodyOutputType: BodyOutputType.Component
};
fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const renderedToast = fixture.nativeElement.querySelector('test-dynamic-component');
expect(renderedToast.innerHTML).toBe('<div>loaded via component</div>');
});
it('addToast should render module if it exists', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
title: 'Yay',
body: TestDynamicComponent,
bodyOutputType: BodyOutputType.Component
};
fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const renderedToast = fixture.nativeElement.querySelector('test-dynamic-component');
expect(renderedToast.innerHTML).toBe('<div>loaded via component</div>');
});
it('addToast should render html passed in toast.body if bodyOutputType is TrustedHtml', () => {
const textContent = 'here is test text';
const htmlContent = '<h4>' + textContent + '</h4>';
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
title: 'Yay',
body: htmlContent,
bodyOutputType: BodyOutputType.TrustedHtml
};
fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const renderedToast = fixture.nativeElement.querySelector('.toast-message');
const innerBody = renderedToast.querySelector('div');
expect(innerBody.innerHTML).toBe(htmlContent);
expect(innerBody.textContent).toBe(textContent);
expect(innerBody.innerHTML).not.toBe(innerBody.textContent);
});
it('addToast will not render html if bodyOutputType is TrustedHtml and body is null', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
title: 'Yay',
body: null,
bodyOutputType: BodyOutputType.TrustedHtml
};
fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const renderedToast = fixture.nativeElement.querySelector('.toast-message');
const innerBody = renderedToast.querySelector('div');
expect(innerBody.innerHTML).toBe('');
});
it('addToast will render encoded text instead of html if bodyOutputType is Default', () => {
const textContent = 'here is test text';
const htmlContent = '<h4>' + textContent + '</h4>';
const encodedString = '<h4>here is test text</h4>';
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
title: 'Yay',
body: htmlContent,
bodyOutputType: BodyOutputType.Default
};
fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const renderedToast = fixture.nativeElement.querySelector('.toast-message');
const innerBody = renderedToast.querySelector('div');
expect(innerBody.innerHTML).toBe(encodedString);
expect(innerBody.textContent).toBe(htmlContent);
});
});
describe('Multiple ToasterContainerComponent components', () => {
let fixture: ComponentFixture<TestComponent>;
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [TestComponent],
imports: [ToasterModule.forRoot(), TestDynamicComponentModule, BrowserAnimationsModule]
});
TestBed.overrideComponent(TestComponent,
{
set: {
template: `<toaster-container [toasterconfig]="toasterconfig"></toaster-container>
<toaster-container [toasterconfig]="toasterconfig2"></toaster-container>`
}
}
);
fixture = TestBed.createComponent<TestComponent>(TestComponent);
});
it('should create multiple container instances', () => {
fixture.componentInstance.toasterconfig.toastContainerId = 1;
fixture.componentInstance.toasterconfig2.toastContainerId = 2;
fixture.detectChanges();
expect(fixture).toBeDefined();
expect(fixture.componentInstance.toasterconfig).toBeDefined();
expect(fixture.componentInstance.toasterconfig2).toBeDefined();
});
it('should only receive toasts targeted for that container', () => {
fixture.componentInstance.toasterconfig.toastContainerId = 1;
fixture.componentInstance.toasterconfig2.toastContainerId = 2;
fixture.detectChanges();
const toast1: Toast = {
type: 'success',
title: 'fixture 1',
toastContainerId: 1
};
const toast2: Toast = {
type: 'success',
title: 'fixture 2',
toastContainerId: 2
};
fixture.componentInstance.toasterService.pop(toast1);
fixture.componentInstance.toasterService.pop(toast2);
fixture.detectChanges();
const container1 = fixture.debugElement.children[0].componentInstance;
const container2 = fixture.debugElement.children[1].componentInstance;
expect(container1.toasts.length).toBe(1);
expect(container2.toasts.length).toBe(1);
expect(container1.toasts[0].title).toBe('fixture 1');
expect(container2.toasts[0].title).toBe('fixture 2');
});
});
describe('ToasterContainerComponent when included as a component with bindings', () => {
let fixture: ComponentFixture<TestComponent>;
beforeEach(() => {
TestBed.configureTestingModule({
declarations: [TestComponent],
imports: [ToasterModule.forRoot(), TestBoundDynamicComponentModule, BrowserAnimationsModule]
});
fixture = TestBed.createComponent<TestComponent>(TestComponent);
});
it('should use the bound toasterconfig object if provided', () => {
fixture.detectChanges();
expect(fixture.componentInstance).toBeDefined();
const container = fixture.debugElement.children[0].componentInstance;
expect(container).toBeDefined();
expect(container.toasterconfig).toBeDefined();
expect(container.toasterconfig.showCloseButton).toBe(true);
expect(container.toasterconfig.tapToDismiss).toBe(false);
expect(container.toasterconfig.timeout).toBe(0);
});
it('should render the dynamic bound content', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
title: 'Yay',
body: TestBoundDynamicComponent,
bodyOutputType: BodyOutputType.Component
};
fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
const renderedToast = fixture.nativeElement.querySelector('bound-dynamic-component');
expect(renderedToast.innerHTML).toBe('<div>Some value loaded via component<button id="click"></button></div>');
});
it('should propagate the toast instance to the component', () => {
fixture.detectChanges();
const container = fixture.debugElement.children[0].componentInstance;
const toast: Toast = {
type: 'success',
title: 'test',
body: TestBoundDynamicComponent,
bodyOutputType: BodyOutputType.Component
};
const toastInstance = fixture.componentInstance.toasterService.pop(toast);
fixture.detectChanges();
expect(container.toasts.length).toBe(1);
expect(toastInstance.title).toBe('test');
const clickButton = fixture.nativeElement.querySelector('#click');
clickButton.click();
fixture.detectChanges();
expect(toastInstance.title).toBe('updated title');
});
});<|fim▁end|> | toasterService.pop(toast);
expect(toasterContainer.toasts.length).toBe(1);
|
<|file_name|>operations.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import re
import logging
logging.basicConfig(level=logging.INFO)
class Executor(object):
def __init__(self, op_map):
processed = {}
for pattern, f in op_map.iteritems():
s = self._build_pattern_groups(pattern.lower())
processed[re.compile(s)] = f
self.operations = processed
def execute(self, context, op):
s = "%04x" % op
for pattern, f in self.operations.iteritems():
m = pattern.match(s)
if m:
return f(context, *[int(v, base=16) for v in m.groups()])
assert False, s
def _build_pattern_groups(self, pattern):
s = pattern.replace('?', '.')
for id in ['x', 'y', 'z']:
m = re.search('%s+' % id, s)
if m:
s = s[:m.start()] + ('(.{%s})' % (m.end() - m.start())) + s[m.end():]
return '^' + s + '$'
def set_mem_v0_vx(context, x):
for i in range(x):
context.memory.write_byte(context.index_reg + i, context.v[i])
context.pc += 2
def fill_v0_vx(context, x):
for i in range(x+1):
context.v[i] = context.memory.get_byte(context.index_reg + i)
context.pc += 2
def set_bcd_vx(context, x):
val = int(context.v[x])
context.memory.write_byte(context.index_reg, val / 100)
context.memory.write_byte(context.index_reg + 1, val % 100 / 10)
context.memory.write_byte(context.index_reg + 2, val % 100 % 10)
context.pc += 2
def set_i_font(context, x):
context.index_reg = context.memory.get_font_address(context.v[x])
context.pc += 2
def add_reg_ind(context, x):
context.index_reg += context.v[x]
context.pc += 2
def set_delay_timer(context, x):
context.delay_timer = context.v[x]
context.pc += 2
def set_sound_timer(context, x):
context.sound_timer = context.v[x]
context.pc += 2
def set_vx_key_pressed(context, x):
context.v[x] = context.keypad.wait_for_keypress()
context.pc += 2
def set_vx_delay_timer(context, x):
context.v[x] = context.delay_timer
context.pc += 2
def skip_key_vx(context, x, result=True):
if context.keypad.is_keypressed(context.v[x]) == result:
context.pc += 2
context.pc += 2
def draw_sprite(context, x, y, n):
sprite = []
for cb in range(n):
sprite.append(context.memory.get_byte(context.index_reg + cb))
collision = context.screen.draw(context.v[x], context.v[y], sprite)
context.v[15] = collision
context.pc += 2
def jump_nnn_v0(context, nnn):
context.pc = context.v[0] + nnn
def set_vx_rand(context, x, nn):
import random
context.v[x] = random.randint(0, 0xFF) & nn
context.pc += 2
def jump_noteq(context, x, y):
if context.v[x] != context.v[y]:
context.pc += 2
context.pc += 2
def shift_vy_left(context, x, y):
context.v[15] = context.v[15] >> 7 # First value
context.v[x] = (context.v[y] << 1) % 255
context.pc += 2
def shift_right(context, x, y):
context.v[15] = context.v[y] & 0x1
context.v[x] = context.v[y] >> 1
context.pc += 2
def sub_vx_vy_vf(context, x, y):
logging.info('Setting V[X] = V[X] - V[Y], V[F] = 1 if V[Y] > V[X]')
context.v[15] = 1 if context.v[y] > context.v[x] else 0
context.v[x] = context.v[x] - context.v[y]
context.pc += 2
def add_vx_vy(context, x, y):
logging.info('Setting V[X] = V[X] + V[Y]')
val = context.v[x] + context.v[y]
context.v[15] = 1 if val > 255 else 0
context.v[x] = val % 256
context.pc += 2
def sub_vx_vy(context, x, y):
logging.info('Setting V[X] = V[X] - V[Y]')
val = context.v[x] - context.v[y]
context.v[15] = 1 if val < 0 else 0
context.v[x] = val % 256
context.pc += 2
def set_vx_or_vy(context, x, y):
logging.info('Setting V[X] = V[X] | V[Y]')
context.v[x] = context.v[x] | context.v[y]
context.pc += 2
def set_vx_xor_vy(context, x, y):
logging.info('Setting V[X] = V[X] ^ V[Y]')
context.v[x] = context.v[x] ^ context.v[y]
context.pc += 2
def set_vx_and_vy(context, x, y):
logging.info('Setting V[X] = V[X] & V[Y]')
context.v[x] = context.v[x] & context.v[y]
context.pc += 2
def set_vx_vy(context, x, y):
logging.info('Setting V[X] = V[Y]')
context.v[x] = context.v[y]
context.pc += 2
def add_reg(context, x, nnn):
logging.info('Adding NNN to V[X]')
context.v[x] = (context.v[x] + nnn) % 256
context.pc += 2
def set_i(context, nnn):
logging.info('Setting NNN to index_reg')
context.index_reg = nnn
context.pc += 2
def pop_stack(context):
logging.info('Returning from a subroutine')
context.pc = context.stack.pop()
def call_rca1082(context, address): #TODO
print("operation not implemented yet:", address)
context.pc += 1
def clear(context):
logging.info('Clearing screen')
context.screen.clear()
context.pc += 2
def jump(context, address):
logging.info('Jump at 0x%2x address' % address)
context.pc = address
def call(context, address):
logging.info('Calling subroutine at 0x%2x address' % address)
context.pc += 2
context.stack.append(context.pc)
context.pc = address
def skip_equal(context, x, nnn, ifeq=True):
logging.info('Skip if V[X] === NNN is %s' % ifeq)
if (context.v[x] == nnn) == ifeq:
context.pc += 2
context.pc += 2
<|fim▁hole|> logging.info('Skip if V[X] === V[Y]')
if context.v[x] == context.v[y]:
context.pc += 2
context.pc += 2
def set_reg(context, x, nnn):
logging.info('Set NNN to cpu reg V[x]')
context.v[x] = nnn
context.pc += 2
op_map = {
'0?E0': clear,
'0?EE': pop_stack,
'0XXX': call_rca1082,
'1XXX': jump,
'2XXX': call,
'3XYY': skip_equal,
'4XYY': lambda context, x, nn: skip_equal(context, x, nn, ifeq = False),
'5XY0': skip_eq_reg,
'6XYY': set_reg,
'7XYY': add_reg,
'8XY0': set_vx_vy,
'8XY1': set_vx_or_vy,
'8XY2': set_vx_and_vy,
'8XY3': set_vx_xor_vy,
'8XY4': add_vx_vy,
'8XY5': sub_vx_vy,
'8XY6': shift_right,
'8XY7': sub_vx_vy_vf,
'8XYE': shift_vy_left,
'9XY0': jump_noteq,
'AXXX': set_i,
'BXXX': jump_nnn_v0,
'CXYY': set_vx_rand,
'DXYZ': draw_sprite,
'EX9E': lambda context, x: skip_key_vx(x, result=False),
'EXA1': skip_key_vx,
'FX07': set_vx_delay_timer,
'FX0A': set_vx_key_pressed,
'FX15': set_delay_timer,
'FX18': set_sound_timer,
'FX1E': add_reg_ind,
'FX29': set_i_font,
'FX33': set_bcd_vx,
'FX55': set_mem_v0_vx,
'FX65': fill_v0_vx
}<|fim▁end|> | def skip_eq_reg(context, x, y): |
<|file_name|>try.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) Arni Mar Jonsson.
# See LICENSE for details.
import rocksdb, pointless, random, string, itertools, collections
from twisted.internet import reactor, defer, threads
def compare(a, b):
return cmp(a, b)
c = 'bytewise'
c = ('rocksdb.BytewiseComparator', compare)
kw = {
'create_if_missing': True,
'error_if_exists': False,
'paranoid_checks': False,
'block_cache_size': 8 * (2 << 20),
'write_buffer_size': 2 * (2 << 20),
'block_size': 4096,
'max_open_files': 1000,
'block_restart_interval': 16,
'comparator': c
}
def random_value(n):
return bytearray(''.join(random.choice(string.ascii_letters) for i in xrange(n)))
def generate_data():
random.seed(0)
k_ = []
v_ = []
for i in xrange(1000000):
k = random_value(8)
v = random_value(8)
k_.append(k)
v_.append(v)
pointless.serialize([k_, v_], 'data.map')
@defer.inlineCallbacks
def insert_alot(db, kv, ops, stop):
while not stop[0]:
k = random.choice(kv[0])
v = random.choice(kv[1])
yield threads.deferToThread(db.Put, k, v)
ops['n_insert'] += 1
if ops['n_insert'] > 0 and ops['n_insert'] % 1000 == 0:
print 'INFO: n_insert: %iK' % (ops['n_insert'] // 1000,)
@defer.inlineCallbacks
def scan_alot(db, kv, ops, stop):
n_scans = 0
while not stop[0]:
k_a = random.choice(kv[0])
k_b = random.choice(kv[0])
k_a, k_b = min(k_a, k_b), min(k_a, k_b)
i = db.RangeIter(k_a, k_b)
n_max = random.randint(100, 10000)
for c in itertools.count():
try:
next = yield threads.deferToThread(i.next)
except StopIteration:
break
if c > n_max:
break
ops['n_scans'] += 1
if ops['n_scans'] > 0 and ops['n_scans'] % 1000 == 0:
print 'INFO: n_scans: %iK' % (ops['n_scans'] // 1000,)
def main():
#generate_data()
reactor.suggestThreadPoolSize(20)<|fim▁hole|> stop = [False]
def do_stop():
stop[0] = True
reactor.stop()
ops = collections.defaultdict(int)
for i in xrange(10):
reactor.callWhenRunning(insert_alot, db, kv, ops, stop)
reactor.callWhenRunning(scan_alot, db, kv, ops, stop)
reactor.callLater(10000.0, do_stop)
reactor.run()
if __name__ == '__main__':
main()<|fim▁end|> |
kv = pointless.Pointless('/home/arni/py-rocksdb/data.map', allow_print = False).GetRoot()
db = rocksdb.RocksDB('./db', **kw)
|
<|file_name|>rake.py<|end_file_name|><|fim▁begin|># Implementation of RAKE - Rapid Automtic Keyword Exraction algorithm
# as described in:
# Rose, S., D. Engel, N. Cramer, and W. Cowley (2010).
# Automatic keyword extraction from indi-vidual documents.
# In M. W. Berry and J. Kogan (Eds.), Text Mining: Applications and Theory.unknown: John Wiley and Sons, Ltd.
import re
import operator
import os
BASE_DIR = (os.path.dirname(os.path.abspath(__file__)))
debug = False
def is_number(s):
try:
float(s) if '.' in s else int(s)
return True
except ValueError:
return False
def load_stop_words(stop_word_file):
"""
Utility function to load stop words from a file and return as a list of words
@param stop_word_file Path and file name of a file containing stop words.
@return list A list of stop words.
"""
stop_words = []
for line in open(stop_word_file):
if line.strip()[0:1] != "#":
for word in line.split(): # in case more than one per line
stop_words.append(word)
return stop_words
def separate_words(text, min_word_return_size):
"""
Utility function to return a list of all words that are have a length greater than a specified number of characters.
@param text The text that must be split in to words.
@param min_word_return_size The minimum no of characters a word must have to be included.
"""<|fim▁hole|> for single_word in splitter.split(text):
current_word = single_word.strip().lower()
# leave numbers in phrase, but don't count as words, since they tend to invalidate scores of their phrases
if len(current_word) > min_word_return_size and current_word != '' and not is_number(current_word):
words.append(current_word)
return words
def split_sentences(text):
"""
Utility function to return a list of sentences.
@param text The text that must be split in to sentences.
"""
sentence_delimiters = re.compile(r'[!\?;:\[\]\t\"\(\)]|\s\-\s|[^0-9],[^a-zA-Z0-9]|\.[^a-zA-Z0-9]|\.$')
sentences = sentence_delimiters.split(text)
return sentences
def build_stop_word_regex(stop_word_file_path):
stop_word_list = load_stop_words(stop_word_file_path)
stop_word_regex_list = []
for word in stop_word_list:
word_regex = r'\b'+word+r'(?![\w-])' # added look ahead for hyphen
stop_word_regex_list.append(word_regex)
stop_word_pattern = re.compile('|'.join(stop_word_regex_list), re.IGNORECASE)
return stop_word_pattern
def generate_candidate_keywords(sentence_list, stopword_pattern):
phrase_list = []
for s in sentence_list:
tmp = re.sub(stopword_pattern, '|', s.strip())
phrases = tmp.split("|")
for phrase in phrases:
phrase = phrase.strip().lower()
if phrase != "":
phrase_list.append(phrase)
return phrase_list
def calculate_word_scores(phraseList):
word_frequency = {}
word_degree = {}
for phrase in phraseList:
word_list = separate_words(phrase, 0)
word_list_length = len(word_list)
word_list_degree = word_list_length-1
# if word_list_degree > 3: word_list_degree = 3 #exp.
for word in word_list:
word_frequency.setdefault(word, 0)
word_frequency[word] += 1
word_degree.setdefault(word, 0)
word_degree[word] += word_list_degree # orig.
# word_degree[word] += 1/(word_list_length*1.0) #exp.
for item in word_frequency:
word_degree[item] = word_degree[item]+word_frequency[item]
# Calculate Word scores = deg(w)/frew(w)
word_score = {}
for item in word_frequency:
word_score.setdefault(item, 0)
word_score[item] = word_degree[item]/(word_frequency[item]*1.0) # orig.
# word_score[item] = word_frequency[item]/(word_degree[item] * 1.0) #exp.
return word_score
def generate_candidate_keyword_scores(phrase_list, word_score):
keyword_candidates = {}
for phrase in phrase_list:
keyword_candidates.setdefault(phrase, 0)
word_list = separate_words(phrase, 0)
candidate_score = 0
for word in word_list:
candidate_score += word_score[word]
keyword_candidates[phrase] = candidate_score
return keyword_candidates
class Rake(object):
def __init__(self, stop_words_path=os.path.join(BASE_DIR, "SmartStoplist.txt")):
self.stop_words_path = stop_words_path
self.__stop_words_pattern = build_stop_word_regex(stop_words_path)
def run(self, text):
sentence_list = split_sentences(text)
phrase_list = generate_candidate_keywords(sentence_list, self.__stop_words_pattern)
word_scores = calculate_word_scores(phrase_list)
keyword_candidates = generate_candidate_keyword_scores(phrase_list, word_scores)
sorted_keywords = sorted(keyword_candidates.iteritems(), key=operator.itemgetter(1), reverse=True)
return sorted_keywords
if __name__ == "__main__":
text = "Compatibility of systems of linear constraints over the set of natural numbers. Criteria of compatibility of a system of linear Diophantine equations, strict inequations, and nonstrict inequations are considered. Upper bounds for components of a minimal set of solutions and algorithms of construction of minimal generating sets of solutions for all types of systems are given. These criteria and the corresponding algorithms for constructing a minimal supporting set of solutions can be used in solving all the considered types of systems and systems of mixed types."
# Split text into sentences
sentenceList = split_sentences(text)
# stoppath = "FoxStoplist.txt" #Fox stoplist contains "numbers", so it will not find "natural numbers" like in Table 1.1
stoppath = os.path.join(BASE_DIR, "SmartStoplist.txt") # SMART stoplist misses some of the lower-scoring keywords in Figure 1.5, which means that the top 1/3 cuts off one of the 4.0 score words in Table 1.1
stopwordpattern = build_stop_word_regex(stoppath)
# generate candidate keywords
phraseList = generate_candidate_keywords(sentenceList, stopwordpattern)
# calculate individual word scores
wordscores = calculate_word_scores(phraseList)
# generate candidate keyword scores
keywordcandidates = generate_candidate_keyword_scores(phraseList, wordscores)
if debug: print keywordcandidates
sortedKeywords = sorted(keywordcandidates.iteritems(), key=operator.itemgetter(1), reverse=True)
if debug: print sortedKeywords
totalKeywords = len(sortedKeywords)
if debug: print totalKeywords
print sortedKeywords[0:(totalKeywords/3)]
rake = Rake(stoppath)
keywords = rake.run(text)
print keywords<|fim▁end|> | splitter = re.compile(r'[^a-zA-Z0-9_\+\-/]')
words = [] |
<|file_name|>SMESH_Controls.cpp<|end_file_name|><|fim▁begin|>// Copyright (C) 2007-2015 CEA/DEN, EDF R&D, OPEN CASCADE
//
// Copyright (C) 2003-2007 OPEN CASCADE, EADS/CCR, LIP6, CEA/DEN,
// CEDRAT, EDF R&D, LEG, PRINCIPIA R&D, BUREAU VERITAS
//
// This library is free software; you can redistribute it and/or
// modify it under the terms of the GNU Lesser General Public
// License as published by the Free Software Foundation; either
// version 2.1 of the License, or (at your option) any later version.
//
// This library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public
// License along with this library; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
//
// See http://www.salome-platform.org/ or email : [email protected]
//
#include "SMESH_ControlsDef.hxx"
#include "SMDS_BallElement.hxx"
#include "SMDS_Iterator.hxx"
#include "SMDS_Mesh.hxx"
#include "SMDS_MeshElement.hxx"
#include "SMDS_MeshNode.hxx"
#include "SMDS_QuadraticEdge.hxx"
#include "SMDS_QuadraticFaceOfNodes.hxx"
#include "SMDS_VolumeTool.hxx"
#include "SMESHDS_GroupBase.hxx"
#include "SMESHDS_GroupOnFilter.hxx"
#include "SMESHDS_Mesh.hxx"
#include "SMESH_MeshAlgos.hxx"
#include "SMESH_OctreeNode.hxx"
#include <Basics_Utils.hxx>
#include <BRepAdaptor_Surface.hxx>
#include <BRepClass_FaceClassifier.hxx>
#include <BRep_Tool.hxx>
#include <Geom_CylindricalSurface.hxx>
#include <Geom_Plane.hxx>
#include <Geom_Surface.hxx>
#include <Precision.hxx>
#include <TColStd_MapIteratorOfMapOfInteger.hxx>
#include <TColStd_MapOfInteger.hxx>
#include <TColStd_SequenceOfAsciiString.hxx>
#include <TColgp_Array1OfXYZ.hxx>
#include <TopAbs.hxx>
#include <TopExp.hxx>
#include <TopoDS.hxx>
#include <TopoDS_Edge.hxx>
#include <TopoDS_Face.hxx>
#include <TopoDS_Iterator.hxx>
#include <TopoDS_Shape.hxx>
#include <TopoDS_Vertex.hxx>
#include <gp_Ax3.hxx>
#include <gp_Cylinder.hxx>
#include <gp_Dir.hxx>
#include <gp_Pln.hxx>
#include <gp_Pnt.hxx>
#include <gp_Vec.hxx>
#include <gp_XYZ.hxx>
#include <vtkMeshQuality.h>
#include <set>
#include <limits>
/*
AUXILIARY METHODS
*/
namespace {
const double theEps = 1e-100;
const double theInf = 1e+100;
inline gp_XYZ gpXYZ(const SMDS_MeshNode* aNode )
{
return gp_XYZ(aNode->X(), aNode->Y(), aNode->Z() );
}
inline double getAngle( const gp_XYZ& P1, const gp_XYZ& P2, const gp_XYZ& P3 )
{
gp_Vec v1( P1 - P2 ), v2( P3 - P2 );
return v1.Magnitude() < gp::Resolution() ||
v2.Magnitude() < gp::Resolution() ? 0 : v1.Angle( v2 );
}
inline double getArea( const gp_XYZ& P1, const gp_XYZ& P2, const gp_XYZ& P3 )
{
gp_Vec aVec1( P2 - P1 );
gp_Vec aVec2( P3 - P1 );
return ( aVec1 ^ aVec2 ).Magnitude() * 0.5;
}
inline double getArea( const gp_Pnt& P1, const gp_Pnt& P2, const gp_Pnt& P3 )
{
return getArea( P1.XYZ(), P2.XYZ(), P3.XYZ() );
}
inline double getDistance( const gp_XYZ& P1, const gp_XYZ& P2 )
{
double aDist = gp_Pnt( P1 ).Distance( gp_Pnt( P2 ) );
return aDist;
}
int getNbMultiConnection( const SMDS_Mesh* theMesh, const int theId )
{
if ( theMesh == 0 )
return 0;
const SMDS_MeshElement* anEdge = theMesh->FindElement( theId );
if ( anEdge == 0 || anEdge->GetType() != SMDSAbs_Edge/* || anEdge->NbNodes() != 2 */)
return 0;
// for each pair of nodes in anEdge (there are 2 pairs in a quadratic edge)
// count elements containing both nodes of the pair.
// Note that there may be such cases for a quadratic edge (a horizontal line):
//
// Case 1 Case 2
// | | | | |
// | | | | |
// +-----+------+ +-----+------+
// | | | |
// | | | |
// result sould be 2 in both cases
//
int aResult0 = 0, aResult1 = 0;
// last node, it is a medium one in a quadratic edge
const SMDS_MeshNode* aLastNode = anEdge->GetNode( anEdge->NbNodes() - 1 );
const SMDS_MeshNode* aNode0 = anEdge->GetNode( 0 );
const SMDS_MeshNode* aNode1 = anEdge->GetNode( 1 );
if ( aNode1 == aLastNode ) aNode1 = 0;
SMDS_ElemIteratorPtr anElemIter = aLastNode->GetInverseElementIterator();
while( anElemIter->more() ) {
const SMDS_MeshElement* anElem = anElemIter->next();
if ( anElem != 0 && anElem->GetType() != SMDSAbs_Edge ) {
SMDS_ElemIteratorPtr anIter = anElem->nodesIterator();
while ( anIter->more() ) {
if ( const SMDS_MeshElement* anElemNode = anIter->next() ) {
if ( anElemNode == aNode0 ) {
aResult0++;
if ( !aNode1 ) break; // not a quadratic edge
}
else if ( anElemNode == aNode1 )
aResult1++;
}
}
}
}
int aResult = std::max ( aResult0, aResult1 );
return aResult;
}
gp_XYZ getNormale( const SMDS_MeshFace* theFace, bool* ok=0 )
{
int aNbNode = theFace->NbNodes();
gp_XYZ q1 = gpXYZ( theFace->GetNode(1)) - gpXYZ( theFace->GetNode(0));
gp_XYZ q2 = gpXYZ( theFace->GetNode(2)) - gpXYZ( theFace->GetNode(0));
gp_XYZ n = q1 ^ q2;
if ( aNbNode > 3 ) {
gp_XYZ q3 = gpXYZ( theFace->GetNode(3)) - gpXYZ( theFace->GetNode(0));
n += q2 ^ q3;
}
double len = n.Modulus();
bool zeroLen = ( len <= numeric_limits<double>::min());
if ( !zeroLen )
n /= len;
if (ok) *ok = !zeroLen;
return n;
}
}
using namespace SMESH::Controls;
/*
* FUNCTORS
*/
//================================================================================
/*
Class : NumericalFunctor
Description : Base class for numerical functors
*/
//================================================================================
NumericalFunctor::NumericalFunctor():
myMesh(NULL)
{
myPrecision = -1;
}
void NumericalFunctor::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool NumericalFunctor::GetPoints(const int theId,
TSequenceOfXYZ& theRes ) const
{
theRes.clear();
if ( myMesh == 0 )
return false;
const SMDS_MeshElement* anElem = myMesh->FindElement( theId );
if ( !anElem || anElem->GetType() != this->GetType() )
return false;
return GetPoints( anElem, theRes );
}
bool NumericalFunctor::GetPoints(const SMDS_MeshElement* anElem,
TSequenceOfXYZ& theRes )
{
theRes.clear();
if ( anElem == 0 )
return false;
theRes.reserve( anElem->NbNodes() );
theRes.setElement( anElem );
// Get nodes of the element
SMDS_ElemIteratorPtr anIter;
if ( anElem->IsQuadratic() ) {
switch ( anElem->GetType() ) {
case SMDSAbs_Edge:
anIter = dynamic_cast<const SMDS_VtkEdge*>
(anElem)->interlacedNodesElemIterator();
break;
case SMDSAbs_Face:
anIter = dynamic_cast<const SMDS_VtkFace*>
(anElem)->interlacedNodesElemIterator();
break;
default:
anIter = anElem->nodesIterator();
}
}
else {
anIter = anElem->nodesIterator();
}
if ( anIter ) {
double xyz[3];
while( anIter->more() ) {
if ( const SMDS_MeshNode* aNode = static_cast<const SMDS_MeshNode*>( anIter->next() ))
{
aNode->GetXYZ( xyz );
theRes.push_back( gp_XYZ( xyz[0], xyz[1], xyz[2] ));
}
}
}
return true;
}
long NumericalFunctor::GetPrecision() const
{
return myPrecision;
}
void NumericalFunctor::SetPrecision( const long thePrecision )
{
myPrecision = thePrecision;
myPrecisionValue = pow( 10., (double)( myPrecision ) );
}
double NumericalFunctor::GetValue( long theId )
{
double aVal = 0;
myCurrElement = myMesh->FindElement( theId );
TSequenceOfXYZ P;
if ( GetPoints( theId, P )) // elem type is checked here
aVal = Round( GetValue( P ));
return aVal;
}
double NumericalFunctor::Round( const double & aVal )
{
return ( myPrecision >= 0 ) ? floor( aVal * myPrecisionValue + 0.5 ) / myPrecisionValue : aVal;
}
//================================================================================
/*!
* \brief Return histogram of functor values
* \param nbIntervals - number of intervals
* \param nbEvents - number of mesh elements having values within i-th interval
* \param funValues - boundaries of intervals
* \param elements - elements to check vulue of; empty list means "of all"
* \param minmax - boundaries of diapason of values to divide into intervals
*/
//================================================================================
void NumericalFunctor::GetHistogram(int nbIntervals,
std::vector<int>& nbEvents,
std::vector<double>& funValues,
const vector<int>& elements,
const double* minmax,
const bool isLogarithmic)
{
if ( nbIntervals < 1 ||
!myMesh ||
!myMesh->GetMeshInfo().NbElements( GetType() ))
return;
nbEvents.resize( nbIntervals, 0 );
funValues.resize( nbIntervals+1 );
// get all values sorted
std::multiset< double > values;
if ( elements.empty() )
{
SMDS_ElemIteratorPtr elemIt = myMesh->elementsIterator( GetType() );
while ( elemIt->more() )
values.insert( GetValue( elemIt->next()->GetID() ));
}
else
{
vector<int>::const_iterator id = elements.begin();
for ( ; id != elements.end(); ++id )
values.insert( GetValue( *id ));
}
if ( minmax )
{
funValues[0] = minmax[0];
funValues[nbIntervals] = minmax[1];
}
else
{
funValues[0] = *values.begin();
funValues[nbIntervals] = *values.rbegin();
}
// case nbIntervals == 1
if ( nbIntervals == 1 )
{
nbEvents[0] = values.size();
return;
}
// case of 1 value
if (funValues.front() == funValues.back())
{
nbEvents.resize( 1 );
nbEvents[0] = values.size();
funValues[1] = funValues.back();
funValues.resize( 2 );
}
// generic case
std::multiset< double >::iterator min = values.begin(), max;
for ( int i = 0; i < nbIntervals; ++i )
{
// find end value of i-th interval
double r = (i+1) / double(nbIntervals);
if (isLogarithmic && funValues.front() > 1e-07 && funValues.back() > 1e-07) {
double logmin = log10(funValues.front());
double lval = logmin + r * (log10(funValues.back()) - logmin);
funValues[i+1] = pow(10.0, lval);
}
else {
funValues[i+1] = funValues.front() * (1-r) + funValues.back() * r;
}
// count values in the i-th interval if there are any
if ( min != values.end() && *min <= funValues[i+1] )
{
// find the first value out of the interval
max = values.upper_bound( funValues[i+1] ); // max is greater than funValues[i+1], or end()
nbEvents[i] = std::distance( min, max );
min = max;
}
}
// add values larger than minmax[1]
nbEvents.back() += std::distance( min, values.end() );
}
//=======================================================================
/*
Class : Volume
Description : Functor calculating volume of a 3D element
*/
//================================================================================
double Volume::GetValue( long theElementId )
{
if ( theElementId && myMesh ) {
SMDS_VolumeTool aVolumeTool;
if ( aVolumeTool.Set( myMesh->FindElement( theElementId )))
return aVolumeTool.GetSize();
}
return 0;
}
double Volume::GetBadRate( double Value, int /*nbNodes*/ ) const
{
return Value;
}
SMDSAbs_ElementType Volume::GetType() const
{
return SMDSAbs_Volume;
}
//=======================================================================
/*
Class : MaxElementLength2D
Description : Functor calculating maximum length of 2D element
*/
//================================================================================
double MaxElementLength2D::GetValue( const TSequenceOfXYZ& P )
{
if(P.size() == 0)
return 0.;
double aVal = 0;
int len = P.size();
if( len == 3 ) { // triangles
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 1 ));
aVal = Max(L1,Max(L2,L3));
}
else if( len == 4 ) { // quadrangles
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 1 ));
double D1 = getDistance(P( 1 ),P( 3 ));
double D2 = getDistance(P( 2 ),P( 4 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(D1,D2));
}
else if( len == 6 ) { // quadratic triangles
double L1 = getDistance(P( 1 ),P( 2 )) + getDistance(P( 2 ),P( 3 ));
double L2 = getDistance(P( 3 ),P( 4 )) + getDistance(P( 4 ),P( 5 ));
double L3 = getDistance(P( 5 ),P( 6 )) + getDistance(P( 6 ),P( 1 ));
aVal = Max(L1,Max(L2,L3));
}
else if( len == 8 || len == 9 ) { // quadratic quadrangles
double L1 = getDistance(P( 1 ),P( 2 )) + getDistance(P( 2 ),P( 3 ));
double L2 = getDistance(P( 3 ),P( 4 )) + getDistance(P( 4 ),P( 5 ));
double L3 = getDistance(P( 5 ),P( 6 )) + getDistance(P( 6 ),P( 7 ));
double L4 = getDistance(P( 7 ),P( 8 )) + getDistance(P( 8 ),P( 1 ));
double D1 = getDistance(P( 1 ),P( 5 ));
double D2 = getDistance(P( 3 ),P( 7 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(D1,D2));
}
// Diagonals are undefined for concave polygons
// else if ( P.getElementEntity() == SMDSEntity_Quad_Polygon && P.size() > 2 ) // quad polygon
// {
// // sides
// aVal = getDistance( P( 1 ), P( P.size() )) + getDistance( P( P.size() ), P( P.size()-1 ));
// for ( size_t i = 1; i < P.size()-1; i += 2 )
// {
// double L = getDistance( P( i ), P( i+1 )) + getDistance( P( i+1 ), P( i+2 ));
// aVal = Max( aVal, L );
// }
// // diagonals
// for ( int i = P.size()-5; i > 0; i -= 2 )
// for ( int j = i + 4; j < P.size() + i - 2; i += 2 )
// {
// double D = getDistance( P( i ), P( j ));
// aVal = Max( aVal, D );
// }
// }
// { // polygons
// }
if( myPrecision >= 0 )
{
double prec = pow( 10., (double)myPrecision );
aVal = floor( aVal * prec + 0.5 ) / prec;
}
return aVal;
}
double MaxElementLength2D::GetValue( long theElementId )
{
TSequenceOfXYZ P;
return GetPoints( theElementId, P ) ? GetValue(P) : 0.0;
}
double MaxElementLength2D::GetBadRate( double Value, int /*nbNodes*/ ) const
{
return Value;
}
SMDSAbs_ElementType MaxElementLength2D::GetType() const
{
return SMDSAbs_Face;
}
//=======================================================================
/*
Class : MaxElementLength3D
Description : Functor calculating maximum length of 3D element
*/
//================================================================================
double MaxElementLength3D::GetValue( long theElementId )
{
TSequenceOfXYZ P;
if( GetPoints( theElementId, P ) ) {
double aVal = 0;
const SMDS_MeshElement* aElem = myMesh->FindElement( theElementId );
SMDSAbs_ElementType aType = aElem->GetType();
int len = P.size();
switch( aType ) {
case SMDSAbs_Volume:
if( len == 4 ) { // tetras
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 1 ));
double L4 = getDistance(P( 1 ),P( 4 ));
double L5 = getDistance(P( 2 ),P( 4 ));
double L6 = getDistance(P( 3 ),P( 4 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(L5,L6));
break;
}
else if( len == 5 ) { // pyramids
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 1 ));
double L5 = getDistance(P( 1 ),P( 5 ));
double L6 = getDistance(P( 2 ),P( 5 ));
double L7 = getDistance(P( 3 ),P( 5 ));
double L8 = getDistance(P( 4 ),P( 5 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(L5,L6));
aVal = Max(aVal,Max(L7,L8));
break;
}
else if( len == 6 ) { // pentas
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 1 ));
double L4 = getDistance(P( 4 ),P( 5 ));
double L5 = getDistance(P( 5 ),P( 6 ));
double L6 = getDistance(P( 6 ),P( 4 ));
double L7 = getDistance(P( 1 ),P( 4 ));
double L8 = getDistance(P( 2 ),P( 5 ));
double L9 = getDistance(P( 3 ),P( 6 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(L5,L6));
aVal = Max(aVal,Max(Max(L7,L8),L9));
break;
}
else if( len == 8 ) { // hexas
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 1 ));
double L5 = getDistance(P( 5 ),P( 6 ));
double L6 = getDistance(P( 6 ),P( 7 ));
double L7 = getDistance(P( 7 ),P( 8 ));
double L8 = getDistance(P( 8 ),P( 5 ));
double L9 = getDistance(P( 1 ),P( 5 ));
double L10= getDistance(P( 2 ),P( 6 ));
double L11= getDistance(P( 3 ),P( 7 ));
double L12= getDistance(P( 4 ),P( 8 ));
double D1 = getDistance(P( 1 ),P( 7 ));
double D2 = getDistance(P( 2 ),P( 8 ));
double D3 = getDistance(P( 3 ),P( 5 ));
double D4 = getDistance(P( 4 ),P( 6 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(L5,L6));
aVal = Max(aVal,Max(Max(L7,L8),Max(L9,L10)));
aVal = Max(aVal,Max(L11,L12));
aVal = Max(aVal,Max(Max(D1,D2),Max(D3,D4)));
break;
}
else if( len == 12 ) { // hexagonal prism
for ( int i1 = 1; i1 < 12; ++i1 )
for ( int i2 = i1+1; i1 <= 12; ++i1 )
aVal = Max( aVal, getDistance(P( i1 ),P( i2 )));
break;
}
else if( len == 10 ) { // quadratic tetras
double L1 = getDistance(P( 1 ),P( 5 )) + getDistance(P( 5 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 6 )) + getDistance(P( 6 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 7 )) + getDistance(P( 7 ),P( 1 ));
double L4 = getDistance(P( 1 ),P( 8 )) + getDistance(P( 8 ),P( 4 ));
double L5 = getDistance(P( 2 ),P( 9 )) + getDistance(P( 9 ),P( 4 ));
double L6 = getDistance(P( 3 ),P( 10 )) + getDistance(P( 10 ),P( 4 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(L5,L6));
break;
}
else if( len == 13 ) { // quadratic pyramids
double L1 = getDistance(P( 1 ),P( 6 )) + getDistance(P( 6 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 7 )) + getDistance(P( 7 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 8 )) + getDistance(P( 8 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 9 )) + getDistance(P( 9 ),P( 1 ));
double L5 = getDistance(P( 1 ),P( 10 )) + getDistance(P( 10 ),P( 5 ));
double L6 = getDistance(P( 2 ),P( 11 )) + getDistance(P( 11 ),P( 5 ));
double L7 = getDistance(P( 3 ),P( 12 )) + getDistance(P( 12 ),P( 5 ));
double L8 = getDistance(P( 4 ),P( 13 )) + getDistance(P( 13 ),P( 5 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(L5,L6));
aVal = Max(aVal,Max(L7,L8));
break;
}
else if( len == 15 ) { // quadratic pentas
double L1 = getDistance(P( 1 ),P( 7 )) + getDistance(P( 7 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 8 )) + getDistance(P( 8 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 9 )) + getDistance(P( 9 ),P( 1 ));
double L4 = getDistance(P( 4 ),P( 10 )) + getDistance(P( 10 ),P( 5 ));
double L5 = getDistance(P( 5 ),P( 11 )) + getDistance(P( 11 ),P( 6 ));
double L6 = getDistance(P( 6 ),P( 12 )) + getDistance(P( 12 ),P( 4 ));
double L7 = getDistance(P( 1 ),P( 13 )) + getDistance(P( 13 ),P( 4 ));
double L8 = getDistance(P( 2 ),P( 14 )) + getDistance(P( 14 ),P( 5 ));
double L9 = getDistance(P( 3 ),P( 15 )) + getDistance(P( 15 ),P( 6 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(L5,L6));
aVal = Max(aVal,Max(Max(L7,L8),L9));
break;
}
else if( len == 20 || len == 27 ) { // quadratic hexas
double L1 = getDistance(P( 1 ),P( 9 )) + getDistance(P( 9 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 10 )) + getDistance(P( 10 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 11 )) + getDistance(P( 11 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 12 )) + getDistance(P( 12 ),P( 1 ));
double L5 = getDistance(P( 5 ),P( 13 )) + getDistance(P( 13 ),P( 6 ));
double L6 = getDistance(P( 6 ),P( 14 )) + getDistance(P( 14 ),P( 7 ));
double L7 = getDistance(P( 7 ),P( 15 )) + getDistance(P( 15 ),P( 8 ));
double L8 = getDistance(P( 8 ),P( 16 )) + getDistance(P( 16 ),P( 5 ));
double L9 = getDistance(P( 1 ),P( 17 )) + getDistance(P( 17 ),P( 5 ));
double L10= getDistance(P( 2 ),P( 18 )) + getDistance(P( 18 ),P( 6 ));
double L11= getDistance(P( 3 ),P( 19 )) + getDistance(P( 19 ),P( 7 ));
double L12= getDistance(P( 4 ),P( 20 )) + getDistance(P( 20 ),P( 8 ));
double D1 = getDistance(P( 1 ),P( 7 ));
double D2 = getDistance(P( 2 ),P( 8 ));
double D3 = getDistance(P( 3 ),P( 5 ));
double D4 = getDistance(P( 4 ),P( 6 ));
aVal = Max(Max(Max(L1,L2),Max(L3,L4)),Max(L5,L6));
aVal = Max(aVal,Max(Max(L7,L8),Max(L9,L10)));
aVal = Max(aVal,Max(L11,L12));
aVal = Max(aVal,Max(Max(D1,D2),Max(D3,D4)));
break;
}
else if( len > 1 && aElem->IsPoly() ) { // polys
// get the maximum distance between all pairs of nodes
for( int i = 1; i <= len; i++ ) {
for( int j = 1; j <= len; j++ ) {
if( j > i ) { // optimization of the loop
double D = getDistance( P(i), P(j) );
aVal = Max( aVal, D );
}
}
}
}
}
if( myPrecision >= 0 )
{
double prec = pow( 10., (double)myPrecision );
aVal = floor( aVal * prec + 0.5 ) / prec;
}
return aVal;
}
return 0.;
}
double MaxElementLength3D::GetBadRate( double Value, int /*nbNodes*/ ) const
{
return Value;
}
SMDSAbs_ElementType MaxElementLength3D::GetType() const
{
return SMDSAbs_Volume;
}
//=======================================================================
/*
Class : MinimumAngle
Description : Functor for calculation of minimum angle
*/
//================================================================================
double MinimumAngle::GetValue( const TSequenceOfXYZ& P )
{
double aMin;
if (P.size() <3)
return 0.;
aMin = getAngle(P( P.size() ), P( 1 ), P( 2 ));
aMin = Min(aMin,getAngle(P( P.size()-1 ), P( P.size() ), P( 1 )));
for ( int i = 2; i < P.size(); i++ )
{
double A0 = getAngle( P( i-1 ), P( i ), P( i+1 ) );
aMin = Min(aMin,A0);
}
return aMin * 180.0 / M_PI;
}
double MinimumAngle::GetBadRate( double Value, int nbNodes ) const
{
//const double aBestAngle = PI / nbNodes;
const double aBestAngle = 180.0 - ( 360.0 / double(nbNodes) );
return ( fabs( aBestAngle - Value ));
}
SMDSAbs_ElementType MinimumAngle::GetType() const
{
return SMDSAbs_Face;
}
//================================================================================
/*
Class : AspectRatio
Description : Functor for calculating aspect ratio
*/
//================================================================================
double AspectRatio::GetValue( long theId )
{
double aVal = 0;
myCurrElement = myMesh->FindElement( theId );
if ( myCurrElement && myCurrElement->GetVtkType() == VTK_QUAD )
{
// issue 21723
vtkUnstructuredGrid* grid = SMDS_Mesh::_meshList[myCurrElement->getMeshId()]->getGrid();
if ( vtkCell* avtkCell = grid->GetCell( myCurrElement->getVtkId() ))
aVal = Round( vtkMeshQuality::QuadAspectRatio( avtkCell ));
}
else
{
TSequenceOfXYZ P;
if ( GetPoints( myCurrElement, P ))
aVal = Round( GetValue( P ));
}
return aVal;
}
double AspectRatio::GetValue( const TSequenceOfXYZ& P )
{
// According to "Mesh quality control" by Nadir Bouhamau referring to
// Pascal Jean Frey and Paul-Louis George. Maillages, applications aux elements finis.
// Hermes Science publications, Paris 1999 ISBN 2-7462-0024-4
// PAL10872
int nbNodes = P.size();
if ( nbNodes < 3 )
return 0;
// Compute aspect ratio
if ( nbNodes == 3 ) {
// Compute lengths of the sides
std::vector< double > aLen (nbNodes);
for ( int i = 0; i < nbNodes - 1; i++ )
aLen[ i ] = getDistance( P( i + 1 ), P( i + 2 ) );
aLen[ nbNodes - 1 ] = getDistance( P( 1 ), P( nbNodes ) );
// Q = alfa * h * p / S, where
//
// alfa = sqrt( 3 ) / 6
// h - length of the longest edge
// p - half perimeter
// S - triangle surface
const double alfa = sqrt( 3. ) / 6.;
double maxLen = Max( aLen[ 0 ], Max( aLen[ 1 ], aLen[ 2 ] ) );
double half_perimeter = ( aLen[0] + aLen[1] + aLen[2] ) / 2.;
double anArea = getArea( P( 1 ), P( 2 ), P( 3 ) );
if ( anArea <= theEps )
return theInf;
return alfa * maxLen * half_perimeter / anArea;
}
else if ( nbNodes == 6 ) { // quadratic triangles
// Compute lengths of the sides
std::vector< double > aLen (3);
aLen[0] = getDistance( P(1), P(3) );
aLen[1] = getDistance( P(3), P(5) );
aLen[2] = getDistance( P(5), P(1) );
// Q = alfa * h * p / S, where
//
// alfa = sqrt( 3 ) / 6
// h - length of the longest edge
// p - half perimeter
// S - triangle surface
const double alfa = sqrt( 3. ) / 6.;
double maxLen = Max( aLen[ 0 ], Max( aLen[ 1 ], aLen[ 2 ] ) );
double half_perimeter = ( aLen[0] + aLen[1] + aLen[2] ) / 2.;
double anArea = getArea( P(1), P(3), P(5) );
if ( anArea <= theEps )
return theInf;
return alfa * maxLen * half_perimeter / anArea;
}
else if( nbNodes == 4 ) { // quadrangle
// Compute lengths of the sides
std::vector< double > aLen (4);
aLen[0] = getDistance( P(1), P(2) );
aLen[1] = getDistance( P(2), P(3) );
aLen[2] = getDistance( P(3), P(4) );
aLen[3] = getDistance( P(4), P(1) );
// Compute lengths of the diagonals
std::vector< double > aDia (2);
aDia[0] = getDistance( P(1), P(3) );
aDia[1] = getDistance( P(2), P(4) );
// Compute areas of all triangles which can be built
// taking three nodes of the quadrangle
std::vector< double > anArea (4);
anArea[0] = getArea( P(1), P(2), P(3) );
anArea[1] = getArea( P(1), P(2), P(4) );
anArea[2] = getArea( P(1), P(3), P(4) );
anArea[3] = getArea( P(2), P(3), P(4) );
// Q = alpha * L * C1 / C2, where
//
// alpha = sqrt( 1/32 )
// L = max( L1, L2, L3, L4, D1, D2 )
// C1 = sqrt( ( L1^2 + L1^2 + L1^2 + L1^2 ) / 4 )
// C2 = min( S1, S2, S3, S4 )
// Li - lengths of the edges
// Di - lengths of the diagonals
// Si - areas of the triangles
const double alpha = sqrt( 1 / 32. );
double L = Max( aLen[ 0 ],
Max( aLen[ 1 ],
Max( aLen[ 2 ],
Max( aLen[ 3 ],
Max( aDia[ 0 ], aDia[ 1 ] ) ) ) ) );
double C1 = sqrt( ( aLen[0] * aLen[0] +
aLen[1] * aLen[1] +
aLen[2] * aLen[2] +
aLen[3] * aLen[3] ) / 4. );
double C2 = Min( anArea[ 0 ],
Min( anArea[ 1 ],
Min( anArea[ 2 ], anArea[ 3 ] ) ) );
if ( C2 <= theEps )
return theInf;
return alpha * L * C1 / C2;
}
else if( nbNodes == 8 || nbNodes == 9 ) { // nbNodes==8 - quadratic quadrangle
// Compute lengths of the sides
std::vector< double > aLen (4);
aLen[0] = getDistance( P(1), P(3) );
aLen[1] = getDistance( P(3), P(5) );
aLen[2] = getDistance( P(5), P(7) );
aLen[3] = getDistance( P(7), P(1) );
// Compute lengths of the diagonals
std::vector< double > aDia (2);
aDia[0] = getDistance( P(1), P(5) );
aDia[1] = getDistance( P(3), P(7) );
// Compute areas of all triangles which can be built
// taking three nodes of the quadrangle
std::vector< double > anArea (4);
anArea[0] = getArea( P(1), P(3), P(5) );
anArea[1] = getArea( P(1), P(3), P(7) );
anArea[2] = getArea( P(1), P(5), P(7) );
anArea[3] = getArea( P(3), P(5), P(7) );
// Q = alpha * L * C1 / C2, where
//
// alpha = sqrt( 1/32 )
// L = max( L1, L2, L3, L4, D1, D2 )
// C1 = sqrt( ( L1^2 + L1^2 + L1^2 + L1^2 ) / 4 )
// C2 = min( S1, S2, S3, S4 )
// Li - lengths of the edges
// Di - lengths of the diagonals
// Si - areas of the triangles
const double alpha = sqrt( 1 / 32. );
double L = Max( aLen[ 0 ],
Max( aLen[ 1 ],
Max( aLen[ 2 ],
Max( aLen[ 3 ],
Max( aDia[ 0 ], aDia[ 1 ] ) ) ) ) );
double C1 = sqrt( ( aLen[0] * aLen[0] +
aLen[1] * aLen[1] +
aLen[2] * aLen[2] +
aLen[3] * aLen[3] ) / 4. );
double C2 = Min( anArea[ 0 ],
Min( anArea[ 1 ],
Min( anArea[ 2 ], anArea[ 3 ] ) ) );
if ( C2 <= theEps )
return theInf;
return alpha * L * C1 / C2;
}
return 0;
}
double AspectRatio::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// the aspect ratio is in the range [1.0,infinity]
// < 1.0 = very bad, zero area
// 1.0 = good
// infinity = bad
return ( Value < 0.9 ) ? 1000 : Value / 1000.;
}
SMDSAbs_ElementType AspectRatio::GetType() const
{
return SMDSAbs_Face;
}
//================================================================================
/*
Class : AspectRatio3D
Description : Functor for calculating aspect ratio
*/
//================================================================================
namespace{
inline double getHalfPerimeter(double theTria[3]){
return (theTria[0] + theTria[1] + theTria[2])/2.0;
}
inline double getArea(double theHalfPerim, double theTria[3]){
return sqrt(theHalfPerim*
(theHalfPerim-theTria[0])*
(theHalfPerim-theTria[1])*
(theHalfPerim-theTria[2]));
}
inline double getVolume(double theLen[6]){
double a2 = theLen[0]*theLen[0];
double b2 = theLen[1]*theLen[1];
double c2 = theLen[2]*theLen[2];
double d2 = theLen[3]*theLen[3];
double e2 = theLen[4]*theLen[4];
double f2 = theLen[5]*theLen[5];
double P = 4.0*a2*b2*d2;
double Q = a2*(b2+d2-e2)-b2*(a2+d2-f2)-d2*(a2+b2-c2);
double R = (b2+d2-e2)*(a2+d2-f2)*(a2+d2-f2);
return sqrt(P-Q+R)/12.0;
}
inline double getVolume2(double theLen[6]){
double a2 = theLen[0]*theLen[0];
double b2 = theLen[1]*theLen[1];
double c2 = theLen[2]*theLen[2];
double d2 = theLen[3]*theLen[3];
double e2 = theLen[4]*theLen[4];
double f2 = theLen[5]*theLen[5];
double P = a2*e2*(b2+c2+d2+f2-a2-e2);
double Q = b2*f2*(a2+c2+d2+e2-b2-f2);
double R = c2*d2*(a2+b2+e2+f2-c2-d2);
double S = a2*b2*d2+b2*c2*e2+a2*c2*f2+d2*e2*f2;
return sqrt(P+Q+R-S)/12.0;
}
inline double getVolume(const TSequenceOfXYZ& P){
gp_Vec aVec1( P( 2 ) - P( 1 ) );
gp_Vec aVec2( P( 3 ) - P( 1 ) );
gp_Vec aVec3( P( 4 ) - P( 1 ) );
gp_Vec anAreaVec( aVec1 ^ aVec2 );
return fabs(aVec3 * anAreaVec) / 6.0;
}
inline double getMaxHeight(double theLen[6])
{
double aHeight = std::max(theLen[0],theLen[1]);
aHeight = std::max(aHeight,theLen[2]);
aHeight = std::max(aHeight,theLen[3]);
aHeight = std::max(aHeight,theLen[4]);
aHeight = std::max(aHeight,theLen[5]);
return aHeight;
}
}
double AspectRatio3D::GetValue( long theId )
{
double aVal = 0;
myCurrElement = myMesh->FindElement( theId );
if ( myCurrElement && myCurrElement->GetVtkType() == VTK_TETRA )
{
// Action from CoTech | ACTION 31.3:
// EURIWARE BO: Homogenize the formulas used to calculate the Controls in SMESH to fit with
// those of ParaView. The library used by ParaView for those calculations can be reused in SMESH.
vtkUnstructuredGrid* grid = SMDS_Mesh::_meshList[myCurrElement->getMeshId()]->getGrid();
if ( vtkCell* avtkCell = grid->GetCell( myCurrElement->getVtkId() ))
aVal = Round( vtkMeshQuality::TetAspectRatio( avtkCell ));
}
else
{
TSequenceOfXYZ P;
if ( GetPoints( myCurrElement, P ))
aVal = Round( GetValue( P ));
}
return aVal;
}
double AspectRatio3D::GetValue( const TSequenceOfXYZ& P )
{
double aQuality = 0.0;
if(myCurrElement->IsPoly()) return aQuality;
int nbNodes = P.size();
if(myCurrElement->IsQuadratic()) {
if(nbNodes==10) nbNodes=4; // quadratic tetrahedron
else if(nbNodes==13) nbNodes=5; // quadratic pyramid
else if(nbNodes==15) nbNodes=6; // quadratic pentahedron
else if(nbNodes==20) nbNodes=8; // quadratic hexahedron
else if(nbNodes==27) nbNodes=8; // quadratic hexahedron
else return aQuality;
}
switch(nbNodes) {
case 4:{
double aLen[6] = {
getDistance(P( 1 ),P( 2 )), // a
getDistance(P( 2 ),P( 3 )), // b
getDistance(P( 3 ),P( 1 )), // c
getDistance(P( 2 ),P( 4 )), // d
getDistance(P( 3 ),P( 4 )), // e
getDistance(P( 1 ),P( 4 )) // f
};
double aTria[4][3] = {
{aLen[0],aLen[1],aLen[2]}, // abc
{aLen[0],aLen[3],aLen[5]}, // adf
{aLen[1],aLen[3],aLen[4]}, // bde
{aLen[2],aLen[4],aLen[5]} // cef
};
double aSumArea = 0.0;
double aHalfPerimeter = getHalfPerimeter(aTria[0]);
double anArea = getArea(aHalfPerimeter,aTria[0]);
aSumArea += anArea;
aHalfPerimeter = getHalfPerimeter(aTria[1]);
anArea = getArea(aHalfPerimeter,aTria[1]);
aSumArea += anArea;
aHalfPerimeter = getHalfPerimeter(aTria[2]);
anArea = getArea(aHalfPerimeter,aTria[2]);
aSumArea += anArea;
aHalfPerimeter = getHalfPerimeter(aTria[3]);
anArea = getArea(aHalfPerimeter,aTria[3]);
aSumArea += anArea;
double aVolume = getVolume(P);
//double aVolume = getVolume(aLen);
double aHeight = getMaxHeight(aLen);
static double aCoeff = sqrt(2.0)/12.0;
if ( aVolume > DBL_MIN )
aQuality = aCoeff*aHeight*aSumArea/aVolume;
break;
}
case 5:{
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 3 ),P( 5 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 3 ),P( 4 ),P( 5 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 4 ),P( 5 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 2 ),P( 3 ),P( 4 ),P( 5 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
break;
}
case 6:{
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 4 ),P( 6 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 4 ),P( 3 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 5 ),P( 6 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 5 ),P( 3 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 2 ),P( 5 ),P( 4 ),P( 6 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 2 ),P( 5 ),P( 4 ),P( 3 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
break;
}
case 8:{
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 5 ),P( 3 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 5 ),P( 4 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 5 ),P( 7 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 5 ),P( 8 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 6 ),P( 3 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 6 ),P( 4 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 6 ),P( 7 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 6 ),P( 8 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 2 ),P( 6 ),P( 5 ),P( 3 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 2 ),P( 6 ),P( 5 ),P( 4 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 2 ),P( 6 ),P( 5 ),P( 7 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 2 ),P( 6 ),P( 5 ),P( 8 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 4 ),P( 8 ),P( 1 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 4 ),P( 8 ),P( 2 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 4 ),P( 8 ),P( 5 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 4 ),P( 8 ),P( 6 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 4 ),P( 7 ),P( 1 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 4 ),P( 7 ),P( 2 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 4 ),P( 7 ),P( 5 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 4 ),P( 7 ),P( 6 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 4 ),P( 8 ),P( 7 ),P( 1 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 4 ),P( 8 ),P( 7 ),P( 2 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 4 ),P( 8 ),P( 7 ),P( 5 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 4 ),P( 8 ),P( 7 ),P( 6 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 4 ),P( 8 ),P( 7 ),P( 2 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 4 ),P( 5 ),P( 8 ),P( 2 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 4 ),P( 5 ),P( 3 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 6 ),P( 7 ),P( 1 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 2 ),P( 3 ),P( 6 ),P( 4 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 5 ),P( 6 ),P( 8 ),P( 3 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 7 ),P( 8 ),P( 6 ),P( 1 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 1 ),P( 2 ),P( 4 ),P( 7 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
{
gp_XYZ aXYZ[4] = {P( 3 ),P( 4 ),P( 2 ),P( 5 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[4])),aQuality);
}
break;
}
case 12:
{
gp_XYZ aXYZ[8] = {P( 1 ),P( 2 ),P( 4 ),P( 5 ),P( 7 ),P( 8 ),P( 10 ),P( 11 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[8])),aQuality);
}
{
gp_XYZ aXYZ[8] = {P( 2 ),P( 3 ),P( 5 ),P( 6 ),P( 8 ),P( 9 ),P( 11 ),P( 12 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[8])),aQuality);
}
{
gp_XYZ aXYZ[8] = {P( 3 ),P( 4 ),P( 6 ),P( 1 ),P( 9 ),P( 10 ),P( 12 ),P( 7 )};
aQuality = std::max(GetValue(TSequenceOfXYZ(&aXYZ[0],&aXYZ[8])),aQuality);
}
break;
} // switch(nbNodes)
if ( nbNodes > 4 ) {
// avaluate aspect ratio of quadranle faces
AspectRatio aspect2D;
SMDS_VolumeTool::VolumeType type = SMDS_VolumeTool::GetType( nbNodes );
int nbFaces = SMDS_VolumeTool::NbFaces( type );
TSequenceOfXYZ points(4);
for ( int i = 0; i < nbFaces; ++i ) { // loop on faces of a volume
if ( SMDS_VolumeTool::NbFaceNodes( type, i ) != 4 )
continue;
const int* pInd = SMDS_VolumeTool::GetFaceNodesIndices( type, i, true );
for ( int p = 0; p < 4; ++p ) // loop on nodes of a quadranle face
points( p + 1 ) = P( pInd[ p ] + 1 );
aQuality = std::max( aQuality, aspect2D.GetValue( points ));
}
}
return aQuality;
}
double AspectRatio3D::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// the aspect ratio is in the range [1.0,infinity]
// 1.0 = good
// infinity = bad
return Value / 1000.;
}
SMDSAbs_ElementType AspectRatio3D::GetType() const
{
return SMDSAbs_Volume;
}
//================================================================================
/*
Class : Warping
Description : Functor for calculating warping
*/
//================================================================================
double Warping::GetValue( const TSequenceOfXYZ& P )
{
if ( P.size() != 4 )
return 0;
gp_XYZ G = ( P( 1 ) + P( 2 ) + P( 3 ) + P( 4 ) ) / 4.;
double A1 = ComputeA( P( 1 ), P( 2 ), P( 3 ), G );
double A2 = ComputeA( P( 2 ), P( 3 ), P( 4 ), G );
double A3 = ComputeA( P( 3 ), P( 4 ), P( 1 ), G );
double A4 = ComputeA( P( 4 ), P( 1 ), P( 2 ), G );
double val = Max( Max( A1, A2 ), Max( A3, A4 ) );
const double eps = 0.1; // val is in degrees
return val < eps ? 0. : val;
}
double Warping::ComputeA( const gp_XYZ& thePnt1,
const gp_XYZ& thePnt2,
const gp_XYZ& thePnt3,
const gp_XYZ& theG ) const
{
double aLen1 = gp_Pnt( thePnt1 ).Distance( gp_Pnt( thePnt2 ) );
double aLen2 = gp_Pnt( thePnt2 ).Distance( gp_Pnt( thePnt3 ) );
double L = Min( aLen1, aLen2 ) * 0.5;
if ( L < theEps )
return theInf;
gp_XYZ GI = ( thePnt2 + thePnt1 ) / 2. - theG;
gp_XYZ GJ = ( thePnt3 + thePnt2 ) / 2. - theG;
gp_XYZ N = GI.Crossed( GJ );
if ( N.Modulus() < gp::Resolution() )
return M_PI / 2;
N.Normalize();
double H = ( thePnt2 - theG ).Dot( N );
return asin( fabs( H / L ) ) * 180. / M_PI;
}
double Warping::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// the warp is in the range [0.0,PI/2]
// 0.0 = good (no warp)
// PI/2 = bad (face pliee)
return Value;
}
SMDSAbs_ElementType Warping::GetType() const
{
return SMDSAbs_Face;
}
//================================================================================
/*
Class : Taper
Description : Functor for calculating taper
*/
//================================================================================
double Taper::GetValue( const TSequenceOfXYZ& P )
{
if ( P.size() != 4 )
return 0.;
// Compute taper
double J1 = getArea( P( 4 ), P( 1 ), P( 2 ) );
double J2 = getArea( P( 3 ), P( 1 ), P( 2 ) );
double J3 = getArea( P( 2 ), P( 3 ), P( 4 ) );
double J4 = getArea( P( 3 ), P( 4 ), P( 1 ) );
double JA = 0.25 * ( J1 + J2 + J3 + J4 );
if ( JA <= theEps )
return theInf;
double T1 = fabs( ( J1 - JA ) / JA );
double T2 = fabs( ( J2 - JA ) / JA );
double T3 = fabs( ( J3 - JA ) / JA );
double T4 = fabs( ( J4 - JA ) / JA );
double val = Max( Max( T1, T2 ), Max( T3, T4 ) );
const double eps = 0.01;
return val < eps ? 0. : val;
}
double Taper::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// the taper is in the range [0.0,1.0]
// 0.0 = good (no taper)
// 1.0 = bad (les cotes opposes sont allignes)
return Value;
}
SMDSAbs_ElementType Taper::GetType() const
{
return SMDSAbs_Face;
}
//================================================================================
/*
Class : Skew
Description : Functor for calculating skew in degrees
*/
//================================================================================
static inline double skewAngle( const gp_XYZ& p1, const gp_XYZ& p2, const gp_XYZ& p3 )
{
gp_XYZ p12 = ( p2 + p1 ) / 2.;
gp_XYZ p23 = ( p3 + p2 ) / 2.;
gp_XYZ p31 = ( p3 + p1 ) / 2.;
gp_Vec v1( p31 - p2 ), v2( p12 - p23 );
return v1.Magnitude() < gp::Resolution() || v2.Magnitude() < gp::Resolution() ? 0. : v1.Angle( v2 );
}
double Skew::GetValue( const TSequenceOfXYZ& P )
{
if ( P.size() != 3 && P.size() != 4 )
return 0.;
// Compute skew
const double PI2 = M_PI / 2.;
if ( P.size() == 3 )
{
double A0 = fabs( PI2 - skewAngle( P( 3 ), P( 1 ), P( 2 ) ) );
double A1 = fabs( PI2 - skewAngle( P( 1 ), P( 2 ), P( 3 ) ) );
double A2 = fabs( PI2 - skewAngle( P( 2 ), P( 3 ), P( 1 ) ) );
return Max( A0, Max( A1, A2 ) ) * 180. / M_PI;
}
else
{
gp_XYZ p12 = ( P( 1 ) + P( 2 ) ) / 2.;
gp_XYZ p23 = ( P( 2 ) + P( 3 ) ) / 2.;
gp_XYZ p34 = ( P( 3 ) + P( 4 ) ) / 2.;
gp_XYZ p41 = ( P( 4 ) + P( 1 ) ) / 2.;
gp_Vec v1( p34 - p12 ), v2( p23 - p41 );
double A = v1.Magnitude() <= gp::Resolution() || v2.Magnitude() <= gp::Resolution()
? 0. : fabs( PI2 - v1.Angle( v2 ) );
double val = A * 180. / M_PI;
const double eps = 0.1; // val is in degrees
return val < eps ? 0. : val;
}
}
double Skew::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// the skew is in the range [0.0,PI/2].
// 0.0 = good
// PI/2 = bad
return Value;
}
SMDSAbs_ElementType Skew::GetType() const
{
return SMDSAbs_Face;
}
//================================================================================
/*
Class : Area
Description : Functor for calculating area
*/
//================================================================================
double Area::GetValue( const TSequenceOfXYZ& P )
{
double val = 0.0;
if ( P.size() > 2 )
{
gp_Vec aVec1( P(2) - P(1) );
gp_Vec aVec2( P(3) - P(1) );
gp_Vec SumVec = aVec1 ^ aVec2;
for (int i=4; i<=P.size(); i++)
{
gp_Vec aVec1( P(i-1) - P(1) );
gp_Vec aVec2( P(i) - P(1) );
gp_Vec tmp = aVec1 ^ aVec2;
SumVec.Add(tmp);
}
val = SumVec.Magnitude() * 0.5;
}
return val;
}
double Area::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// meaningless as it is not a quality control functor
return Value;
}
SMDSAbs_ElementType Area::GetType() const
{
return SMDSAbs_Face;
}
//================================================================================
/*
Class : Length
Description : Functor for calculating length of edge
*/
//================================================================================
double Length::GetValue( const TSequenceOfXYZ& P )
{
switch ( P.size() ) {
case 2: return getDistance( P( 1 ), P( 2 ) );
case 3: return getDistance( P( 1 ), P( 2 ) ) + getDistance( P( 2 ), P( 3 ) );
default: return 0.;
}
}
double Length::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// meaningless as it is not quality control functor
return Value;
}
SMDSAbs_ElementType Length::GetType() const
{
return SMDSAbs_Edge;
}
//================================================================================
/*
Class : Length2D
Description : Functor for calculating minimal length of edge
*/
//================================================================================
double Length2D::GetValue( long theElementId )
{
TSequenceOfXYZ P;
if ( GetPoints( theElementId, P ))
{
double aVal = 0;
int len = P.size();
SMDSAbs_EntityType aType = P.getElementEntity();
switch (aType) {
case SMDSEntity_Edge:
if (len == 2)
aVal = getDistance( P( 1 ), P( 2 ) );
break;
case SMDSEntity_Quad_Edge:
if (len == 3) // quadratic edge
aVal = getDistance(P( 1 ),P( 3 )) + getDistance(P( 3 ),P( 2 ));
break;
case SMDSEntity_Triangle:
if (len == 3){ // triangles
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 1 ));
aVal = Min(L1,Min(L2,L3));
}
break;
case SMDSEntity_Quadrangle:
if (len == 4){ // quadrangles
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 1 ));
aVal = Min(Min(L1,L2),Min(L3,L4));
}
break;
case SMDSEntity_Quad_Triangle:
case SMDSEntity_BiQuad_Triangle:
if (len >= 6){ // quadratic triangles
double L1 = getDistance(P( 1 ),P( 2 )) + getDistance(P( 2 ),P( 3 ));
double L2 = getDistance(P( 3 ),P( 4 )) + getDistance(P( 4 ),P( 5 ));
double L3 = getDistance(P( 5 ),P( 6 )) + getDistance(P( 6 ),P( 1 ));
aVal = Min(L1,Min(L2,L3));
}
break;
case SMDSEntity_Quad_Quadrangle:
case SMDSEntity_BiQuad_Quadrangle:
if (len >= 8){ // quadratic quadrangles
double L1 = getDistance(P( 1 ),P( 2 )) + getDistance(P( 2 ),P( 3 ));
double L2 = getDistance(P( 3 ),P( 4 )) + getDistance(P( 4 ),P( 5 ));
double L3 = getDistance(P( 5 ),P( 6 )) + getDistance(P( 6 ),P( 7 ));
double L4 = getDistance(P( 7 ),P( 8 )) + getDistance(P( 8 ),P( 1 ));
aVal = Min(Min(L1,L2),Min(L3,L4));
}<|fim▁hole|> break;
case SMDSEntity_Tetra:
if (len == 4){ // tetrahedra
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 1 ));
double L4 = getDistance(P( 1 ),P( 4 ));
double L5 = getDistance(P( 2 ),P( 4 ));
double L6 = getDistance(P( 3 ),P( 4 ));
aVal = Min(Min(Min(L1,L2),Min(L3,L4)),Min(L5,L6));
}
break;
case SMDSEntity_Pyramid:
if (len == 5){ // piramids
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 1 ));
double L5 = getDistance(P( 1 ),P( 5 ));
double L6 = getDistance(P( 2 ),P( 5 ));
double L7 = getDistance(P( 3 ),P( 5 ));
double L8 = getDistance(P( 4 ),P( 5 ));
aVal = Min(Min(Min(L1,L2),Min(L3,L4)),Min(L5,L6));
aVal = Min(aVal,Min(L7,L8));
}
break;
case SMDSEntity_Penta:
if (len == 6) { // pentaidres
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 1 ));
double L4 = getDistance(P( 4 ),P( 5 ));
double L5 = getDistance(P( 5 ),P( 6 ));
double L6 = getDistance(P( 6 ),P( 4 ));
double L7 = getDistance(P( 1 ),P( 4 ));
double L8 = getDistance(P( 2 ),P( 5 ));
double L9 = getDistance(P( 3 ),P( 6 ));
aVal = Min(Min(Min(L1,L2),Min(L3,L4)),Min(L5,L6));
aVal = Min(aVal,Min(Min(L7,L8),L9));
}
break;
case SMDSEntity_Hexa:
if (len == 8){ // hexahedron
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 1 ));
double L5 = getDistance(P( 5 ),P( 6 ));
double L6 = getDistance(P( 6 ),P( 7 ));
double L7 = getDistance(P( 7 ),P( 8 ));
double L8 = getDistance(P( 8 ),P( 5 ));
double L9 = getDistance(P( 1 ),P( 5 ));
double L10= getDistance(P( 2 ),P( 6 ));
double L11= getDistance(P( 3 ),P( 7 ));
double L12= getDistance(P( 4 ),P( 8 ));
aVal = Min(Min(Min(L1,L2),Min(L3,L4)),Min(L5,L6));
aVal = Min(aVal,Min(Min(L7,L8),Min(L9,L10)));
aVal = Min(aVal,Min(L11,L12));
}
break;
case SMDSEntity_Quad_Tetra:
if (len == 10){ // quadratic tetraidrs
double L1 = getDistance(P( 1 ),P( 5 )) + getDistance(P( 5 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 6 )) + getDistance(P( 6 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 7 )) + getDistance(P( 7 ),P( 1 ));
double L4 = getDistance(P( 1 ),P( 8 )) + getDistance(P( 8 ),P( 4 ));
double L5 = getDistance(P( 2 ),P( 9 )) + getDistance(P( 9 ),P( 4 ));
double L6 = getDistance(P( 3 ),P( 10 )) + getDistance(P( 10 ),P( 4 ));
aVal = Min(Min(Min(L1,L2),Min(L3,L4)),Min(L5,L6));
}
break;
case SMDSEntity_Quad_Pyramid:
if (len == 13){ // quadratic piramids
double L1 = getDistance(P( 1 ),P( 6 )) + getDistance(P( 6 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 7 )) + getDistance(P( 7 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 8 )) + getDistance(P( 8 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 9 )) + getDistance(P( 9 ),P( 1 ));
double L5 = getDistance(P( 1 ),P( 10 )) + getDistance(P( 10 ),P( 5 ));
double L6 = getDistance(P( 2 ),P( 11 )) + getDistance(P( 11 ),P( 5 ));
double L7 = getDistance(P( 3 ),P( 12 )) + getDistance(P( 12 ),P( 5 ));
double L8 = getDistance(P( 4 ),P( 13 )) + getDistance(P( 13 ),P( 5 ));
aVal = Min(Min(Min(L1,L2),Min(L3,L4)),Min(L5,L6));
aVal = Min(aVal,Min(L7,L8));
}
break;
case SMDSEntity_Quad_Penta:
if (len == 15){ // quadratic pentaidres
double L1 = getDistance(P( 1 ),P( 7 )) + getDistance(P( 7 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 8 )) + getDistance(P( 8 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 9 )) + getDistance(P( 9 ),P( 1 ));
double L4 = getDistance(P( 4 ),P( 10 )) + getDistance(P( 10 ),P( 5 ));
double L5 = getDistance(P( 5 ),P( 11 )) + getDistance(P( 11 ),P( 6 ));
double L6 = getDistance(P( 6 ),P( 12 )) + getDistance(P( 12 ),P( 4 ));
double L7 = getDistance(P( 1 ),P( 13 )) + getDistance(P( 13 ),P( 4 ));
double L8 = getDistance(P( 2 ),P( 14 )) + getDistance(P( 14 ),P( 5 ));
double L9 = getDistance(P( 3 ),P( 15 )) + getDistance(P( 15 ),P( 6 ));
aVal = Min(Min(Min(L1,L2),Min(L3,L4)),Min(L5,L6));
aVal = Min(aVal,Min(Min(L7,L8),L9));
}
break;
case SMDSEntity_Quad_Hexa:
case SMDSEntity_TriQuad_Hexa:
if (len >= 20) { // quadratic hexaider
double L1 = getDistance(P( 1 ),P( 9 )) + getDistance(P( 9 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 10 )) + getDistance(P( 10 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 11 )) + getDistance(P( 11 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 12 )) + getDistance(P( 12 ),P( 1 ));
double L5 = getDistance(P( 5 ),P( 13 )) + getDistance(P( 13 ),P( 6 ));
double L6 = getDistance(P( 6 ),P( 14 )) + getDistance(P( 14 ),P( 7 ));
double L7 = getDistance(P( 7 ),P( 15 )) + getDistance(P( 15 ),P( 8 ));
double L8 = getDistance(P( 8 ),P( 16 )) + getDistance(P( 16 ),P( 5 ));
double L9 = getDistance(P( 1 ),P( 17 )) + getDistance(P( 17 ),P( 5 ));
double L10= getDistance(P( 2 ),P( 18 )) + getDistance(P( 18 ),P( 6 ));
double L11= getDistance(P( 3 ),P( 19 )) + getDistance(P( 19 ),P( 7 ));
double L12= getDistance(P( 4 ),P( 20 )) + getDistance(P( 20 ),P( 8 ));
aVal = Min(Min(Min(L1,L2),Min(L3,L4)),Min(L5,L6));
aVal = Min(aVal,Min(Min(L7,L8),Min(L9,L10)));
aVal = Min(aVal,Min(L11,L12));
}
break;
case SMDSEntity_Polygon:
if ( len > 1 ) {
aVal = getDistance( P(1), P( P.size() ));
for ( size_t i = 1; i < P.size(); ++i )
aVal = Min( aVal, getDistance( P( i ), P( i+1 )));
}
break;
#ifndef VTK_NO_QUAD_POLY
case SMDSEntity_Quad_Polygon:
if ( len > 2 ) {
aVal = getDistance( P(1), P( P.size() )) + getDistance( P(P.size()), P( P.size()-1 ));
for ( size_t i = 1; i < P.size()-1; i += 2 )
aVal = Min( aVal, getDistance( P( i ), P( i+1 )) + getDistance( P( i+1 ), P( i+2 )));
}
break;
#endif
case SMDSEntity_Hexagonal_Prism:
if (len == 12) { // hexagonal prism
double L1 = getDistance(P( 1 ),P( 2 ));
double L2 = getDistance(P( 2 ),P( 3 ));
double L3 = getDistance(P( 3 ),P( 4 ));
double L4 = getDistance(P( 4 ),P( 5 ));
double L5 = getDistance(P( 5 ),P( 6 ));
double L6 = getDistance(P( 6 ),P( 1 ));
double L7 = getDistance(P( 7 ), P( 8 ));
double L8 = getDistance(P( 8 ), P( 9 ));
double L9 = getDistance(P( 9 ), P( 10 ));
double L10= getDistance(P( 10 ),P( 11 ));
double L11= getDistance(P( 11 ),P( 12 ));
double L12= getDistance(P( 12 ),P( 7 ));
double L13 = getDistance(P( 1 ),P( 7 ));
double L14 = getDistance(P( 2 ),P( 8 ));
double L15 = getDistance(P( 3 ),P( 9 ));
double L16 = getDistance(P( 4 ),P( 10 ));
double L17 = getDistance(P( 5 ),P( 11 ));
double L18 = getDistance(P( 6 ),P( 12 ));
aVal = Min(Min(Min(L1,L2),Min(L3,L4)),Min(L5,L6));
aVal = Min(aVal, Min(Min(Min(L7,L8),Min(L9,L10)),Min(L11,L12)));
aVal = Min(aVal, Min(Min(Min(L13,L14),Min(L15,L16)),Min(L17,L18)));
}
break;
case SMDSEntity_Polyhedra:
{
}
break;
default:
return 0;
}
if (aVal < 0 ) {
return 0.;
}
if ( myPrecision >= 0 )
{
double prec = pow( 10., (double)( myPrecision ) );
aVal = floor( aVal * prec + 0.5 ) / prec;
}
return aVal;
}
return 0.;
}
double Length2D::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// meaningless as it is not a quality control functor
return Value;
}
SMDSAbs_ElementType Length2D::GetType() const
{
return SMDSAbs_Face;
}
Length2D::Value::Value(double theLength,long thePntId1, long thePntId2):
myLength(theLength)
{
myPntId[0] = thePntId1; myPntId[1] = thePntId2;
if(thePntId1 > thePntId2){
myPntId[1] = thePntId1; myPntId[0] = thePntId2;
}
}
bool Length2D::Value::operator<(const Length2D::Value& x) const
{
if(myPntId[0] < x.myPntId[0]) return true;
if(myPntId[0] == x.myPntId[0])
if(myPntId[1] < x.myPntId[1]) return true;
return false;
}
void Length2D::GetValues(TValues& theValues)
{
TValues aValues;
SMDS_FaceIteratorPtr anIter = myMesh->facesIterator();
for(; anIter->more(); ){
const SMDS_MeshFace* anElem = anIter->next();
if(anElem->IsQuadratic()) {
const SMDS_VtkFace* F =
dynamic_cast<const SMDS_VtkFace*>(anElem);
// use special nodes iterator
SMDS_ElemIteratorPtr anIter = F->interlacedNodesElemIterator();
long aNodeId[4];
gp_Pnt P[4];
double aLength;
const SMDS_MeshElement* aNode;
if(anIter->more()){
aNode = anIter->next();
const SMDS_MeshNode* aNodes = (SMDS_MeshNode*) aNode;
P[0] = P[1] = gp_Pnt(aNodes->X(),aNodes->Y(),aNodes->Z());
aNodeId[0] = aNodeId[1] = aNode->GetID();
aLength = 0;
}
for(; anIter->more(); ){
const SMDS_MeshNode* N1 = static_cast<const SMDS_MeshNode*> (anIter->next());
P[2] = gp_Pnt(N1->X(),N1->Y(),N1->Z());
aNodeId[2] = N1->GetID();
aLength = P[1].Distance(P[2]);
if(!anIter->more()) break;
const SMDS_MeshNode* N2 = static_cast<const SMDS_MeshNode*> (anIter->next());
P[3] = gp_Pnt(N2->X(),N2->Y(),N2->Z());
aNodeId[3] = N2->GetID();
aLength += P[2].Distance(P[3]);
Value aValue1(aLength,aNodeId[1],aNodeId[2]);
Value aValue2(aLength,aNodeId[2],aNodeId[3]);
P[1] = P[3];
aNodeId[1] = aNodeId[3];
theValues.insert(aValue1);
theValues.insert(aValue2);
}
aLength += P[2].Distance(P[0]);
Value aValue1(aLength,aNodeId[1],aNodeId[2]);
Value aValue2(aLength,aNodeId[2],aNodeId[0]);
theValues.insert(aValue1);
theValues.insert(aValue2);
}
else {
SMDS_ElemIteratorPtr aNodesIter = anElem->nodesIterator();
long aNodeId[2];
gp_Pnt P[3];
double aLength;
const SMDS_MeshElement* aNode;
if(aNodesIter->more()){
aNode = aNodesIter->next();
const SMDS_MeshNode* aNodes = (SMDS_MeshNode*) aNode;
P[0] = P[1] = gp_Pnt(aNodes->X(),aNodes->Y(),aNodes->Z());
aNodeId[0] = aNodeId[1] = aNode->GetID();
aLength = 0;
}
for(; aNodesIter->more(); ){
aNode = aNodesIter->next();
const SMDS_MeshNode* aNodes = (SMDS_MeshNode*) aNode;
long anId = aNode->GetID();
P[2] = gp_Pnt(aNodes->X(),aNodes->Y(),aNodes->Z());
aLength = P[1].Distance(P[2]);
Value aValue(aLength,aNodeId[1],anId);
aNodeId[1] = anId;
P[1] = P[2];
theValues.insert(aValue);
}
aLength = P[0].Distance(P[1]);
Value aValue(aLength,aNodeId[0],aNodeId[1]);
theValues.insert(aValue);
}
}
}
//================================================================================
/*
Class : MultiConnection
Description : Functor for calculating number of faces conneted to the edge
*/
//================================================================================
double MultiConnection::GetValue( const TSequenceOfXYZ& P )
{
return 0;
}
double MultiConnection::GetValue( long theId )
{
return getNbMultiConnection( myMesh, theId );
}
double MultiConnection::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// meaningless as it is not quality control functor
return Value;
}
SMDSAbs_ElementType MultiConnection::GetType() const
{
return SMDSAbs_Edge;
}
//================================================================================
/*
Class : MultiConnection2D
Description : Functor for calculating number of faces conneted to the edge
*/
//================================================================================
double MultiConnection2D::GetValue( const TSequenceOfXYZ& P )
{
return 0;
}
double MultiConnection2D::GetValue( long theElementId )
{
int aResult = 0;
const SMDS_MeshElement* aFaceElem = myMesh->FindElement(theElementId);
SMDSAbs_ElementType aType = aFaceElem->GetType();
switch (aType) {
case SMDSAbs_Face:
{
int i = 0, len = aFaceElem->NbNodes();
SMDS_ElemIteratorPtr anIter = aFaceElem->nodesIterator();
if (!anIter) break;
const SMDS_MeshNode *aNode, *aNode0;
TColStd_MapOfInteger aMap, aMapPrev;
for (i = 0; i <= len; i++) {
aMapPrev = aMap;
aMap.Clear();
int aNb = 0;
if (anIter->more()) {
aNode = (SMDS_MeshNode*)anIter->next();
} else {
if (i == len)
aNode = aNode0;
else
break;
}
if (!aNode) break;
if (i == 0) aNode0 = aNode;
SMDS_ElemIteratorPtr anElemIter = aNode->GetInverseElementIterator();
while (anElemIter->more()) {
const SMDS_MeshElement* anElem = anElemIter->next();
if (anElem != 0 && anElem->GetType() == SMDSAbs_Face) {
int anId = anElem->GetID();
aMap.Add(anId);
if (aMapPrev.Contains(anId)) {
aNb++;
}
}
}
aResult = Max(aResult, aNb);
}
}
break;
default:
aResult = 0;
}
return aResult;
}
double MultiConnection2D::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// meaningless as it is not quality control functor
return Value;
}
SMDSAbs_ElementType MultiConnection2D::GetType() const
{
return SMDSAbs_Face;
}
MultiConnection2D::Value::Value(long thePntId1, long thePntId2)
{
myPntId[0] = thePntId1; myPntId[1] = thePntId2;
if(thePntId1 > thePntId2){
myPntId[1] = thePntId1; myPntId[0] = thePntId2;
}
}
bool MultiConnection2D::Value::operator<(const MultiConnection2D::Value& x) const
{
if(myPntId[0] < x.myPntId[0]) return true;
if(myPntId[0] == x.myPntId[0])
if(myPntId[1] < x.myPntId[1]) return true;
return false;
}
void MultiConnection2D::GetValues(MValues& theValues)
{
if ( !myMesh ) return;
SMDS_FaceIteratorPtr anIter = myMesh->facesIterator();
for(; anIter->more(); ){
const SMDS_MeshFace* anElem = anIter->next();
SMDS_ElemIteratorPtr aNodesIter;
if ( anElem->IsQuadratic() )
aNodesIter = dynamic_cast<const SMDS_VtkFace*>
(anElem)->interlacedNodesElemIterator();
else
aNodesIter = anElem->nodesIterator();
long aNodeId[3];
//int aNbConnects=0;
const SMDS_MeshNode* aNode0;
const SMDS_MeshNode* aNode1;
const SMDS_MeshNode* aNode2;
if(aNodesIter->more()){
aNode0 = (SMDS_MeshNode*) aNodesIter->next();
aNode1 = aNode0;
const SMDS_MeshNode* aNodes = (SMDS_MeshNode*) aNode1;
aNodeId[0] = aNodeId[1] = aNodes->GetID();
}
for(; aNodesIter->more(); ) {
aNode2 = (SMDS_MeshNode*) aNodesIter->next();
long anId = aNode2->GetID();
aNodeId[2] = anId;
Value aValue(aNodeId[1],aNodeId[2]);
MValues::iterator aItr = theValues.find(aValue);
if (aItr != theValues.end()){
aItr->second += 1;
//aNbConnects = nb;
}
else {
theValues[aValue] = 1;
//aNbConnects = 1;
}
//cout << "NodeIds: "<<aNodeId[1]<<","<<aNodeId[2]<<" nbconn="<<aNbConnects<<endl;
aNodeId[1] = aNodeId[2];
aNode1 = aNode2;
}
Value aValue(aNodeId[0],aNodeId[2]);
MValues::iterator aItr = theValues.find(aValue);
if (aItr != theValues.end()) {
aItr->second += 1;
//aNbConnects = nb;
}
else {
theValues[aValue] = 1;
//aNbConnects = 1;
}
//cout << "NodeIds: "<<aNodeId[0]<<","<<aNodeId[2]<<" nbconn="<<aNbConnects<<endl;
}
}
//================================================================================
/*
Class : BallDiameter
Description : Functor returning diameter of a ball element
*/
//================================================================================
double BallDiameter::GetValue( long theId )
{
double diameter = 0;
if ( const SMDS_BallElement* ball =
dynamic_cast<const SMDS_BallElement*>( myMesh->FindElement( theId )))
{
diameter = ball->GetDiameter();
}
return diameter;
}
double BallDiameter::GetBadRate( double Value, int /*nbNodes*/ ) const
{
// meaningless as it is not a quality control functor
return Value;
}
SMDSAbs_ElementType BallDiameter::GetType() const
{
return SMDSAbs_Ball;
}
/*
PREDICATES
*/
//================================================================================
/*
Class : BadOrientedVolume
Description : Predicate bad oriented volumes
*/
//================================================================================
BadOrientedVolume::BadOrientedVolume()
{
myMesh = 0;
}
void BadOrientedVolume::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool BadOrientedVolume::IsSatisfy( long theId )
{
if ( myMesh == 0 )
return false;
SMDS_VolumeTool vTool( myMesh->FindElement( theId ));
return !vTool.IsForward();
}
SMDSAbs_ElementType BadOrientedVolume::GetType() const
{
return SMDSAbs_Volume;
}
/*
Class : BareBorderVolume
*/
bool BareBorderVolume::IsSatisfy(long theElementId )
{
SMDS_VolumeTool myTool;
if ( myTool.Set( myMesh->FindElement(theElementId)))
{
for ( int iF = 0; iF < myTool.NbFaces(); ++iF )
if ( myTool.IsFreeFace( iF ))
{
const SMDS_MeshNode** n = myTool.GetFaceNodes(iF);
vector< const SMDS_MeshNode*> nodes( n, n+myTool.NbFaceNodes(iF));
if ( !myMesh->FindElement( nodes, SMDSAbs_Face, /*Nomedium=*/false))
return true;
}
}
return false;
}
//================================================================================
/*
Class : BareBorderFace
*/
//================================================================================
bool BareBorderFace::IsSatisfy(long theElementId )
{
bool ok = false;
if ( const SMDS_MeshElement* face = myMesh->FindElement(theElementId))
{
if ( face->GetType() == SMDSAbs_Face )
{
int nbN = face->NbCornerNodes();
for ( int i = 0; i < nbN && !ok; ++i )
{
// check if a link is shared by another face
const SMDS_MeshNode* n1 = face->GetNode( i );
const SMDS_MeshNode* n2 = face->GetNode( (i+1)%nbN );
SMDS_ElemIteratorPtr fIt = n1->GetInverseElementIterator( SMDSAbs_Face );
bool isShared = false;
while ( !isShared && fIt->more() )
{
const SMDS_MeshElement* f = fIt->next();
isShared = ( f != face && f->GetNodeIndex(n2) != -1 );
}
if ( !isShared )
{
const int iQuad = face->IsQuadratic();
myLinkNodes.resize( 2 + iQuad);
myLinkNodes[0] = n1;
myLinkNodes[1] = n2;
if ( iQuad )
myLinkNodes[2] = face->GetNode( i+nbN );
ok = !myMesh->FindElement( myLinkNodes, SMDSAbs_Edge, /*noMedium=*/false);
}
}
}
}
return ok;
}
//================================================================================
/*
Class : OverConstrainedVolume
*/
//================================================================================
bool OverConstrainedVolume::IsSatisfy(long theElementId )
{
// An element is over-constrained if it has N-1 free borders where
// N is the number of edges/faces for a 2D/3D element.
SMDS_VolumeTool myTool;
if ( myTool.Set( myMesh->FindElement(theElementId)))
{
int nbSharedFaces = 0;
for ( int iF = 0; iF < myTool.NbFaces(); ++iF )
if ( !myTool.IsFreeFace( iF ) && ++nbSharedFaces > 1 )
break;
return ( nbSharedFaces == 1 );
}
return false;
}
//================================================================================
/*
Class : OverConstrainedFace
*/
//================================================================================
bool OverConstrainedFace::IsSatisfy(long theElementId )
{
// An element is over-constrained if it has N-1 free borders where
// N is the number of edges/faces for a 2D/3D element.
if ( const SMDS_MeshElement* face = myMesh->FindElement(theElementId))
if ( face->GetType() == SMDSAbs_Face )
{
int nbSharedBorders = 0;
int nbN = face->NbCornerNodes();
for ( int i = 0; i < nbN; ++i )
{
// check if a link is shared by another face
const SMDS_MeshNode* n1 = face->GetNode( i );
const SMDS_MeshNode* n2 = face->GetNode( (i+1)%nbN );
SMDS_ElemIteratorPtr fIt = n1->GetInverseElementIterator( SMDSAbs_Face );
bool isShared = false;
while ( !isShared && fIt->more() )
{
const SMDS_MeshElement* f = fIt->next();
isShared = ( f != face && f->GetNodeIndex(n2) != -1 );
}
if ( isShared && ++nbSharedBorders > 1 )
break;
}
return ( nbSharedBorders == 1 );
}
return false;
}
//================================================================================
/*
Class : CoincidentNodes
Description : Predicate of Coincident nodes
*/
//================================================================================
CoincidentNodes::CoincidentNodes()
{
myToler = 1e-5;
}
bool CoincidentNodes::IsSatisfy( long theElementId )
{
return myCoincidentIDs.Contains( theElementId );
}
SMDSAbs_ElementType CoincidentNodes::GetType() const
{
return SMDSAbs_Node;
}
void CoincidentNodes::SetMesh( const SMDS_Mesh* theMesh )
{
myMeshModifTracer.SetMesh( theMesh );
if ( myMeshModifTracer.IsMeshModified() )
{
TIDSortedNodeSet nodesToCheck;
SMDS_NodeIteratorPtr nIt = theMesh->nodesIterator(/*idInceasingOrder=*/true);
while ( nIt->more() )
nodesToCheck.insert( nodesToCheck.end(), nIt->next() );
list< list< const SMDS_MeshNode*> > nodeGroups;
SMESH_OctreeNode::FindCoincidentNodes ( nodesToCheck, &nodeGroups, myToler );
myCoincidentIDs.Clear();
list< list< const SMDS_MeshNode*> >::iterator groupIt = nodeGroups.begin();
for ( ; groupIt != nodeGroups.end(); ++groupIt )
{
list< const SMDS_MeshNode*>& coincNodes = *groupIt;
list< const SMDS_MeshNode*>::iterator n = coincNodes.begin();
for ( ; n != coincNodes.end(); ++n )
myCoincidentIDs.Add( (*n)->GetID() );
}
}
}
//================================================================================
/*
Class : CoincidentElements
Description : Predicate of Coincident Elements
Note : This class is suitable only for visualization of Coincident Elements
*/
//================================================================================
CoincidentElements::CoincidentElements()
{
myMesh = 0;
}
void CoincidentElements::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool CoincidentElements::IsSatisfy( long theElementId )
{
if ( !myMesh ) return false;
if ( const SMDS_MeshElement* e = myMesh->FindElement( theElementId ))
{
if ( e->GetType() != GetType() ) return false;
set< const SMDS_MeshNode* > elemNodes( e->begin_nodes(), e->end_nodes() );
const int nbNodes = e->NbNodes();
SMDS_ElemIteratorPtr invIt = (*elemNodes.begin())->GetInverseElementIterator( GetType() );
while ( invIt->more() )
{
const SMDS_MeshElement* e2 = invIt->next();
if ( e2 == e || e2->NbNodes() != nbNodes ) continue;
bool sameNodes = true;
for ( size_t i = 0; i < elemNodes.size() && sameNodes; ++i )
sameNodes = ( elemNodes.count( e2->GetNode( i )));
if ( sameNodes )
return true;
}
}
return false;
}
SMDSAbs_ElementType CoincidentElements1D::GetType() const
{
return SMDSAbs_Edge;
}
SMDSAbs_ElementType CoincidentElements2D::GetType() const
{
return SMDSAbs_Face;
}
SMDSAbs_ElementType CoincidentElements3D::GetType() const
{
return SMDSAbs_Volume;
}
//================================================================================
/*
Class : FreeBorders
Description : Predicate for free borders
*/
//================================================================================
FreeBorders::FreeBorders()
{
myMesh = 0;
}
void FreeBorders::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool FreeBorders::IsSatisfy( long theId )
{
return getNbMultiConnection( myMesh, theId ) == 1;
}
SMDSAbs_ElementType FreeBorders::GetType() const
{
return SMDSAbs_Edge;
}
//================================================================================
/*
Class : FreeEdges
Description : Predicate for free Edges
*/
//================================================================================
FreeEdges::FreeEdges()
{
myMesh = 0;
}
void FreeEdges::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool FreeEdges::IsFreeEdge( const SMDS_MeshNode** theNodes, const int theFaceId )
{
TColStd_MapOfInteger aMap;
for ( int i = 0; i < 2; i++ )
{
SMDS_ElemIteratorPtr anElemIter = theNodes[ i ]->GetInverseElementIterator(SMDSAbs_Face);
while( anElemIter->more() )
{
if ( const SMDS_MeshElement* anElem = anElemIter->next())
{
const int anId = anElem->GetID();
if ( anId != theFaceId && !aMap.Add( anId ))
return false;
}
}
}
return true;
}
bool FreeEdges::IsSatisfy( long theId )
{
if ( myMesh == 0 )
return false;
const SMDS_MeshElement* aFace = myMesh->FindElement( theId );
if ( aFace == 0 || aFace->GetType() != SMDSAbs_Face || aFace->NbNodes() < 3 )
return false;
SMDS_NodeIteratorPtr anIter = aFace->interlacedNodesIterator();
if ( !anIter )
return false;
int i = 0, nbNodes = aFace->NbNodes();
std::vector <const SMDS_MeshNode*> aNodes( nbNodes+1 );
while( anIter->more() )
if ( ! ( aNodes[ i++ ] = anIter->next() ))
return false;
aNodes[ nbNodes ] = aNodes[ 0 ];
for ( i = 0; i < nbNodes; i++ )
if ( IsFreeEdge( &aNodes[ i ], theId ) )
return true;
return false;
}
SMDSAbs_ElementType FreeEdges::GetType() const
{
return SMDSAbs_Face;
}
FreeEdges::Border::Border(long theElemId, long thePntId1, long thePntId2):
myElemId(theElemId)
{
myPntId[0] = thePntId1; myPntId[1] = thePntId2;
if(thePntId1 > thePntId2){
myPntId[1] = thePntId1; myPntId[0] = thePntId2;
}
}
bool FreeEdges::Border::operator<(const FreeEdges::Border& x) const{
if(myPntId[0] < x.myPntId[0]) return true;
if(myPntId[0] == x.myPntId[0])
if(myPntId[1] < x.myPntId[1]) return true;
return false;
}
inline void UpdateBorders(const FreeEdges::Border& theBorder,
FreeEdges::TBorders& theRegistry,
FreeEdges::TBorders& theContainer)
{
if(theRegistry.find(theBorder) == theRegistry.end()){
theRegistry.insert(theBorder);
theContainer.insert(theBorder);
}else{
theContainer.erase(theBorder);
}
}
void FreeEdges::GetBoreders(TBorders& theBorders)
{
TBorders aRegistry;
SMDS_FaceIteratorPtr anIter = myMesh->facesIterator();
for(; anIter->more(); ){
const SMDS_MeshFace* anElem = anIter->next();
long anElemId = anElem->GetID();
SMDS_ElemIteratorPtr aNodesIter;
if ( anElem->IsQuadratic() )
aNodesIter = static_cast<const SMDS_VtkFace*>(anElem)->
interlacedNodesElemIterator();
else
aNodesIter = anElem->nodesIterator();
long aNodeId[2];
const SMDS_MeshElement* aNode;
if(aNodesIter->more()){
aNode = aNodesIter->next();
aNodeId[0] = aNodeId[1] = aNode->GetID();
}
for(; aNodesIter->more(); ){
aNode = aNodesIter->next();
long anId = aNode->GetID();
Border aBorder(anElemId,aNodeId[1],anId);
aNodeId[1] = anId;
UpdateBorders(aBorder,aRegistry,theBorders);
}
Border aBorder(anElemId,aNodeId[0],aNodeId[1]);
UpdateBorders(aBorder,aRegistry,theBorders);
}
}
//================================================================================
/*
Class : FreeNodes
Description : Predicate for free nodes
*/
//================================================================================
FreeNodes::FreeNodes()
{
myMesh = 0;
}
void FreeNodes::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool FreeNodes::IsSatisfy( long theNodeId )
{
const SMDS_MeshNode* aNode = myMesh->FindNode( theNodeId );
if (!aNode)
return false;
return (aNode->NbInverseElements() < 1);
}
SMDSAbs_ElementType FreeNodes::GetType() const
{
return SMDSAbs_Node;
}
//================================================================================
/*
Class : FreeFaces
Description : Predicate for free faces
*/
//================================================================================
FreeFaces::FreeFaces()
{
myMesh = 0;
}
void FreeFaces::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool FreeFaces::IsSatisfy( long theId )
{
if (!myMesh) return false;
// check that faces nodes refers to less than two common volumes
const SMDS_MeshElement* aFace = myMesh->FindElement( theId );
if ( !aFace || aFace->GetType() != SMDSAbs_Face )
return false;
int nbNode = aFace->NbNodes();
// collect volumes to check that number of volumes with count equal nbNode not less than 2
typedef map< SMDS_MeshElement*, int > TMapOfVolume; // map of volume counters
typedef map< SMDS_MeshElement*, int >::iterator TItrMapOfVolume; // iterator
TMapOfVolume mapOfVol;
SMDS_ElemIteratorPtr nodeItr = aFace->nodesIterator();
while ( nodeItr->more() ) {
const SMDS_MeshNode* aNode = static_cast<const SMDS_MeshNode*>(nodeItr->next());
if ( !aNode ) continue;
SMDS_ElemIteratorPtr volItr = aNode->GetInverseElementIterator(SMDSAbs_Volume);
while ( volItr->more() ) {
SMDS_MeshElement* aVol = (SMDS_MeshElement*)volItr->next();
TItrMapOfVolume itr = mapOfVol.insert(make_pair(aVol, 0)).first;
(*itr).second++;
}
}
int nbVol = 0;
TItrMapOfVolume volItr = mapOfVol.begin();
TItrMapOfVolume volEnd = mapOfVol.end();
for ( ; volItr != volEnd; ++volItr )
if ( (*volItr).second >= nbNode )
nbVol++;
// face is not free if number of volumes constructed on thier nodes more than one
return (nbVol < 2);
}
SMDSAbs_ElementType FreeFaces::GetType() const
{
return SMDSAbs_Face;
}
//================================================================================
/*
Class : LinearOrQuadratic
Description : Predicate to verify whether a mesh element is linear
*/
//================================================================================
LinearOrQuadratic::LinearOrQuadratic()
{
myMesh = 0;
}
void LinearOrQuadratic::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool LinearOrQuadratic::IsSatisfy( long theId )
{
if (!myMesh) return false;
const SMDS_MeshElement* anElem = myMesh->FindElement( theId );
if ( !anElem || (myType != SMDSAbs_All && anElem->GetType() != myType) )
return false;
return (!anElem->IsQuadratic());
}
void LinearOrQuadratic::SetType( SMDSAbs_ElementType theType )
{
myType = theType;
}
SMDSAbs_ElementType LinearOrQuadratic::GetType() const
{
return myType;
}
//================================================================================
/*
Class : GroupColor
Description : Functor for check color of group to whic mesh element belongs to
*/
//================================================================================
GroupColor::GroupColor()
{
}
bool GroupColor::IsSatisfy( long theId )
{
return myIDs.count( theId );
}
void GroupColor::SetType( SMDSAbs_ElementType theType )
{
myType = theType;
}
SMDSAbs_ElementType GroupColor::GetType() const
{
return myType;
}
static bool isEqual( const Quantity_Color& theColor1,
const Quantity_Color& theColor2 )
{
// tolerance to compare colors
const double tol = 5*1e-3;
return ( fabs( theColor1.Red() - theColor2.Red() ) < tol &&
fabs( theColor1.Green() - theColor2.Green() ) < tol &&
fabs( theColor1.Blue() - theColor2.Blue() ) < tol );
}
void GroupColor::SetMesh( const SMDS_Mesh* theMesh )
{
myIDs.clear();
const SMESHDS_Mesh* aMesh = dynamic_cast<const SMESHDS_Mesh*>(theMesh);
if ( !aMesh )
return;
int nbGrp = aMesh->GetNbGroups();
if ( !nbGrp )
return;
// iterates on groups and find necessary elements ids
const std::set<SMESHDS_GroupBase*>& aGroups = aMesh->GetGroups();
set<SMESHDS_GroupBase*>::const_iterator GrIt = aGroups.begin();
for (; GrIt != aGroups.end(); GrIt++)
{
SMESHDS_GroupBase* aGrp = (*GrIt);
if ( !aGrp )
continue;
// check type and color of group
if ( !isEqual( myColor, aGrp->GetColor() ))
continue;
// IPAL52867 (prevent infinite recursion via GroupOnFilter)
if ( SMESHDS_GroupOnFilter * gof = dynamic_cast< SMESHDS_GroupOnFilter* >( aGrp ))
if ( gof->GetPredicate().get() == this )
continue;
SMDSAbs_ElementType aGrpElType = (SMDSAbs_ElementType)aGrp->GetType();
if ( myType == aGrpElType || (myType == SMDSAbs_All && aGrpElType != SMDSAbs_Node) ) {
// add elements IDS into control
int aSize = aGrp->Extent();
for (int i = 0; i < aSize; i++)
myIDs.insert( aGrp->GetID(i+1) );
}
}
}
void GroupColor::SetColorStr( const TCollection_AsciiString& theStr )
{
Kernel_Utils::Localizer loc;
TCollection_AsciiString aStr = theStr;
aStr.RemoveAll( ' ' );
aStr.RemoveAll( '\t' );
for ( int aPos = aStr.Search( ";;" ); aPos != -1; aPos = aStr.Search( ";;" ) )
aStr.Remove( aPos, 2 );
Standard_Real clr[3];
clr[0] = clr[1] = clr[2] = 0.;
for ( int i = 0; i < 3; i++ ) {
TCollection_AsciiString tmpStr = aStr.Token( ";", i+1 );
if ( !tmpStr.IsEmpty() && tmpStr.IsRealValue() )
clr[i] = tmpStr.RealValue();
}
myColor = Quantity_Color( clr[0], clr[1], clr[2], Quantity_TOC_RGB );
}
//=======================================================================
// name : GetRangeStr
// Purpose : Get range as a string.
// Example: "1,2,3,50-60,63,67,70-"
//=======================================================================
void GroupColor::GetColorStr( TCollection_AsciiString& theResStr ) const
{
theResStr.Clear();
theResStr += TCollection_AsciiString( myColor.Red() );
theResStr += TCollection_AsciiString( ";" ) + TCollection_AsciiString( myColor.Green() );
theResStr += TCollection_AsciiString( ";" ) + TCollection_AsciiString( myColor.Blue() );
}
//================================================================================
/*
Class : ElemGeomType
Description : Predicate to check element geometry type
*/
//================================================================================
ElemGeomType::ElemGeomType()
{
myMesh = 0;
myType = SMDSAbs_All;
myGeomType = SMDSGeom_TRIANGLE;
}
void ElemGeomType::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool ElemGeomType::IsSatisfy( long theId )
{
if (!myMesh) return false;
const SMDS_MeshElement* anElem = myMesh->FindElement( theId );
if ( !anElem )
return false;
const SMDSAbs_ElementType anElemType = anElem->GetType();
if ( myType != SMDSAbs_All && anElemType != myType )
return false;
bool isOk = ( anElem->GetGeomType() == myGeomType );
return isOk;
}
void ElemGeomType::SetType( SMDSAbs_ElementType theType )
{
myType = theType;
}
SMDSAbs_ElementType ElemGeomType::GetType() const
{
return myType;
}
void ElemGeomType::SetGeomType( SMDSAbs_GeometryType theType )
{
myGeomType = theType;
}
SMDSAbs_GeometryType ElemGeomType::GetGeomType() const
{
return myGeomType;
}
//================================================================================
/*
Class : ElemEntityType
Description : Predicate to check element entity type
*/
//================================================================================
ElemEntityType::ElemEntityType():
myMesh( 0 ),
myType( SMDSAbs_All ),
myEntityType( SMDSEntity_0D )
{
}
void ElemEntityType::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
bool ElemEntityType::IsSatisfy( long theId )
{
if ( !myMesh ) return false;
if ( myType == SMDSAbs_Node )
return myMesh->FindNode( theId );
const SMDS_MeshElement* anElem = myMesh->FindElement( theId );
return ( anElem &&
myEntityType == anElem->GetEntityType() );
}
void ElemEntityType::SetType( SMDSAbs_ElementType theType )
{
myType = theType;
}
SMDSAbs_ElementType ElemEntityType::GetType() const
{
return myType;
}
void ElemEntityType::SetElemEntityType( SMDSAbs_EntityType theEntityType )
{
myEntityType = theEntityType;
}
SMDSAbs_EntityType ElemEntityType::GetElemEntityType() const
{
return myEntityType;
}
//================================================================================
/*!
* \brief Class ConnectedElements
*/
//================================================================================
ConnectedElements::ConnectedElements():
myNodeID(0), myType( SMDSAbs_All ), myOkIDsReady( false ) {}
SMDSAbs_ElementType ConnectedElements::GetType() const
{ return myType; }
int ConnectedElements::GetNode() const
{ return myXYZ.empty() ? myNodeID : 0; } // myNodeID can be found by myXYZ
std::vector<double> ConnectedElements::GetPoint() const
{ return myXYZ; }
void ConnectedElements::clearOkIDs()
{ myOkIDsReady = false; myOkIDs.clear(); }
void ConnectedElements::SetType( SMDSAbs_ElementType theType )
{
if ( myType != theType || myMeshModifTracer.IsMeshModified() )
clearOkIDs();
myType = theType;
}
void ConnectedElements::SetMesh( const SMDS_Mesh* theMesh )
{
myMeshModifTracer.SetMesh( theMesh );
if ( myMeshModifTracer.IsMeshModified() )
{
clearOkIDs();
if ( !myXYZ.empty() )
SetPoint( myXYZ[0], myXYZ[1], myXYZ[2] ); // find a node near myXYZ it in a new mesh
}
}
void ConnectedElements::SetNode( int nodeID )
{
myNodeID = nodeID;
myXYZ.clear();
bool isSameDomain = false;
if ( myOkIDsReady && myMeshModifTracer.GetMesh() && !myMeshModifTracer.IsMeshModified() )
if ( const SMDS_MeshNode* n = myMeshModifTracer.GetMesh()->FindNode( myNodeID ))
{
SMDS_ElemIteratorPtr eIt = n->GetInverseElementIterator( myType );
while ( !isSameDomain && eIt->more() )
isSameDomain = IsSatisfy( eIt->next()->GetID() );
}
if ( !isSameDomain )
clearOkIDs();
}
void ConnectedElements::SetPoint( double x, double y, double z )
{
myXYZ.resize(3);
myXYZ[0] = x;
myXYZ[1] = y;
myXYZ[2] = z;
myNodeID = 0;
bool isSameDomain = false;
// find myNodeID by myXYZ if possible
if ( myMeshModifTracer.GetMesh() )
{
auto_ptr<SMESH_ElementSearcher> searcher
( SMESH_MeshAlgos::GetElementSearcher( (SMDS_Mesh&) *myMeshModifTracer.GetMesh() ));
vector< const SMDS_MeshElement* > foundElems;
searcher->FindElementsByPoint( gp_Pnt(x,y,z), SMDSAbs_All, foundElems );
if ( !foundElems.empty() )
{
myNodeID = foundElems[0]->GetNode(0)->GetID();
if ( myOkIDsReady && !myMeshModifTracer.IsMeshModified() )
isSameDomain = IsSatisfy( foundElems[0]->GetID() );
}
}
if ( !isSameDomain )
clearOkIDs();
}
bool ConnectedElements::IsSatisfy( long theElementId )
{
// Here we do NOT check if the mesh has changed, we do it in Set...() only!!!
if ( !myOkIDsReady )
{
if ( !myMeshModifTracer.GetMesh() )
return false;
const SMDS_MeshNode* node0 = myMeshModifTracer.GetMesh()->FindNode( myNodeID );
if ( !node0 )
return false;
list< const SMDS_MeshNode* > nodeQueue( 1, node0 );
std::set< int > checkedNodeIDs;
// algo:
// foreach node in nodeQueue:
// foreach element sharing a node:
// add ID of an element of myType to myOkIDs;
// push all element nodes absent from checkedNodeIDs to nodeQueue;
while ( !nodeQueue.empty() )
{
const SMDS_MeshNode* node = nodeQueue.front();
nodeQueue.pop_front();
// loop on elements sharing the node
SMDS_ElemIteratorPtr eIt = node->GetInverseElementIterator();
while ( eIt->more() )
{
// keep elements of myType
const SMDS_MeshElement* element = eIt->next();
if ( element->GetType() == myType )
myOkIDs.insert( myOkIDs.end(), element->GetID() );
// enqueue nodes of the element
SMDS_ElemIteratorPtr nIt = element->nodesIterator();
while ( nIt->more() )
{
const SMDS_MeshNode* n = static_cast< const SMDS_MeshNode* >( nIt->next() );
if ( checkedNodeIDs.insert( n->GetID() ).second )
nodeQueue.push_back( n );
}
}
}
if ( myType == SMDSAbs_Node )
std::swap( myOkIDs, checkedNodeIDs );
size_t totalNbElems = myMeshModifTracer.GetMesh()->GetMeshInfo().NbElements( myType );
if ( myOkIDs.size() == totalNbElems )
myOkIDs.clear();
myOkIDsReady = true;
}
return myOkIDs.empty() ? true : myOkIDs.count( theElementId );
}
//================================================================================
/*!
* \brief Class CoplanarFaces
*/
//================================================================================
CoplanarFaces::CoplanarFaces()
: myFaceID(0), myToler(0)
{
}
void CoplanarFaces::SetMesh( const SMDS_Mesh* theMesh )
{
myMeshModifTracer.SetMesh( theMesh );
if ( myMeshModifTracer.IsMeshModified() )
{
// Build a set of coplanar face ids
myCoplanarIDs.clear();
if ( !myMeshModifTracer.GetMesh() || !myFaceID || !myToler )
return;
const SMDS_MeshElement* face = myMeshModifTracer.GetMesh()->FindElement( myFaceID );
if ( !face || face->GetType() != SMDSAbs_Face )
return;
bool normOK;
gp_Vec myNorm = getNormale( static_cast<const SMDS_MeshFace*>(face), &normOK );
if (!normOK)
return;
const double radianTol = myToler * M_PI / 180.;
std::set< SMESH_TLink > checkedLinks;
std::list< pair< const SMDS_MeshElement*, gp_Vec > > faceQueue;
faceQueue.push_back( make_pair( face, myNorm ));
while ( !faceQueue.empty() )
{
face = faceQueue.front().first;
myNorm = faceQueue.front().second;
faceQueue.pop_front();
for ( int i = 0, nbN = face->NbCornerNodes(); i < nbN; ++i )
{
const SMDS_MeshNode* n1 = face->GetNode( i );
const SMDS_MeshNode* n2 = face->GetNode(( i+1 )%nbN);
if ( !checkedLinks.insert( SMESH_TLink( n1, n2 )).second )
continue;
SMDS_ElemIteratorPtr fIt = n1->GetInverseElementIterator(SMDSAbs_Face);
while ( fIt->more() )
{
const SMDS_MeshElement* f = fIt->next();
if ( f->GetNodeIndex( n2 ) > -1 )
{
gp_Vec norm = getNormale( static_cast<const SMDS_MeshFace*>(f), &normOK );
if (!normOK || myNorm.Angle( norm ) <= radianTol)
{
myCoplanarIDs.insert( f->GetID() );
faceQueue.push_back( make_pair( f, norm ));
}
}
}
}
}
}
}
bool CoplanarFaces::IsSatisfy( long theElementId )
{
return myCoplanarIDs.count( theElementId );
}
/*
*Class : RangeOfIds
*Description : Predicate for Range of Ids.
* Range may be specified with two ways.
* 1. Using AddToRange method
* 2. With SetRangeStr method. Parameter of this method is a string
* like as "1,2,3,50-60,63,67,70-"
*/
//=======================================================================
// name : RangeOfIds
// Purpose : Constructor
//=======================================================================
RangeOfIds::RangeOfIds()
{
myMesh = 0;
myType = SMDSAbs_All;
}
//=======================================================================
// name : SetMesh
// Purpose : Set mesh
//=======================================================================
void RangeOfIds::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
}
//=======================================================================
// name : AddToRange
// Purpose : Add ID to the range
//=======================================================================
bool RangeOfIds::AddToRange( long theEntityId )
{
myIds.Add( theEntityId );
return true;
}
//=======================================================================
// name : GetRangeStr
// Purpose : Get range as a string.
// Example: "1,2,3,50-60,63,67,70-"
//=======================================================================
void RangeOfIds::GetRangeStr( TCollection_AsciiString& theResStr )
{
theResStr.Clear();
TColStd_SequenceOfInteger anIntSeq;
TColStd_SequenceOfAsciiString aStrSeq;
TColStd_MapIteratorOfMapOfInteger anIter( myIds );
for ( ; anIter.More(); anIter.Next() )
{
int anId = anIter.Key();
TCollection_AsciiString aStr( anId );
anIntSeq.Append( anId );
aStrSeq.Append( aStr );
}
for ( int i = 1, n = myMin.Length(); i <= n; i++ )
{
int aMinId = myMin( i );
int aMaxId = myMax( i );
TCollection_AsciiString aStr;
if ( aMinId != IntegerFirst() )
aStr += aMinId;
aStr += "-";
if ( aMaxId != IntegerLast() )
aStr += aMaxId;
// find position of the string in result sequence and insert string in it
if ( anIntSeq.Length() == 0 )
{
anIntSeq.Append( aMinId );
aStrSeq.Append( aStr );
}
else
{
if ( aMinId < anIntSeq.First() )
{
anIntSeq.Prepend( aMinId );
aStrSeq.Prepend( aStr );
}
else if ( aMinId > anIntSeq.Last() )
{
anIntSeq.Append( aMinId );
aStrSeq.Append( aStr );
}
else
for ( int j = 1, k = anIntSeq.Length(); j <= k; j++ )
if ( aMinId < anIntSeq( j ) )
{
anIntSeq.InsertBefore( j, aMinId );
aStrSeq.InsertBefore( j, aStr );
break;
}
}
}
if ( aStrSeq.Length() == 0 )
return;
theResStr = aStrSeq( 1 );
for ( int j = 2, k = aStrSeq.Length(); j <= k; j++ )
{
theResStr += ",";
theResStr += aStrSeq( j );
}
}
//=======================================================================
// name : SetRangeStr
// Purpose : Define range with string
// Example of entry string: "1,2,3,50-60,63,67,70-"
//=======================================================================
bool RangeOfIds::SetRangeStr( const TCollection_AsciiString& theStr )
{
myMin.Clear();
myMax.Clear();
myIds.Clear();
TCollection_AsciiString aStr = theStr;
//aStr.RemoveAll( ' ' );
//aStr.RemoveAll( '\t' );
for ( int i = 1; i <= aStr.Length(); ++i )
if ( isspace( aStr.Value( i )))
aStr.SetValue( i, ',');
for ( int aPos = aStr.Search( ",," ); aPos != -1; aPos = aStr.Search( ",," ) )
aStr.Remove( aPos, 1 );
TCollection_AsciiString tmpStr = aStr.Token( ",", 1 );
int i = 1;
while ( tmpStr != "" )
{
tmpStr = aStr.Token( ",", i++ );
int aPos = tmpStr.Search( '-' );
if ( aPos == -1 )
{
if ( tmpStr.IsIntegerValue() )
myIds.Add( tmpStr.IntegerValue() );
else
return false;
}
else
{
TCollection_AsciiString aMaxStr = tmpStr.Split( aPos );
TCollection_AsciiString aMinStr = tmpStr;
while ( aMinStr.Search( "-" ) != -1 ) aMinStr.RemoveAll( '-' );
while ( aMaxStr.Search( "-" ) != -1 ) aMaxStr.RemoveAll( '-' );
if ( (!aMinStr.IsEmpty() && !aMinStr.IsIntegerValue()) ||
(!aMaxStr.IsEmpty() && !aMaxStr.IsIntegerValue()) )
return false;
myMin.Append( aMinStr.IsEmpty() ? IntegerFirst() : aMinStr.IntegerValue() );
myMax.Append( aMaxStr.IsEmpty() ? IntegerLast() : aMaxStr.IntegerValue() );
}
}
return true;
}
//=======================================================================
// name : GetType
// Purpose : Get type of supported entities
//=======================================================================
SMDSAbs_ElementType RangeOfIds::GetType() const
{
return myType;
}
//=======================================================================
// name : SetType
// Purpose : Set type of supported entities
//=======================================================================
void RangeOfIds::SetType( SMDSAbs_ElementType theType )
{
myType = theType;
}
//=======================================================================
// name : IsSatisfy
// Purpose : Verify whether entity satisfies to this rpedicate
//=======================================================================
bool RangeOfIds::IsSatisfy( long theId )
{
if ( !myMesh )
return false;
if ( myType == SMDSAbs_Node )
{
if ( myMesh->FindNode( theId ) == 0 )
return false;
}
else
{
const SMDS_MeshElement* anElem = myMesh->FindElement( theId );
if ( anElem == 0 || (myType != anElem->GetType() && myType != SMDSAbs_All ))
return false;
}
if ( myIds.Contains( theId ) )
return true;
for ( int i = 1, n = myMin.Length(); i <= n; i++ )
if ( theId >= myMin( i ) && theId <= myMax( i ) )
return true;
return false;
}
/*
Class : Comparator
Description : Base class for comparators
*/
Comparator::Comparator():
myMargin(0)
{}
Comparator::~Comparator()
{}
void Comparator::SetMesh( const SMDS_Mesh* theMesh )
{
if ( myFunctor )
myFunctor->SetMesh( theMesh );
}
void Comparator::SetMargin( double theValue )
{
myMargin = theValue;
}
void Comparator::SetNumFunctor( NumericalFunctorPtr theFunct )
{
myFunctor = theFunct;
}
SMDSAbs_ElementType Comparator::GetType() const
{
return myFunctor ? myFunctor->GetType() : SMDSAbs_All;
}
double Comparator::GetMargin()
{
return myMargin;
}
/*
Class : LessThan
Description : Comparator "<"
*/
bool LessThan::IsSatisfy( long theId )
{
return myFunctor && myFunctor->GetValue( theId ) < myMargin;
}
/*
Class : MoreThan
Description : Comparator ">"
*/
bool MoreThan::IsSatisfy( long theId )
{
return myFunctor && myFunctor->GetValue( theId ) > myMargin;
}
/*
Class : EqualTo
Description : Comparator "="
*/
EqualTo::EqualTo():
myToler(Precision::Confusion())
{}
bool EqualTo::IsSatisfy( long theId )
{
return myFunctor && fabs( myFunctor->GetValue( theId ) - myMargin ) < myToler;
}
void EqualTo::SetTolerance( double theToler )
{
myToler = theToler;
}
double EqualTo::GetTolerance()
{
return myToler;
}
/*
Class : LogicalNOT
Description : Logical NOT predicate
*/
LogicalNOT::LogicalNOT()
{}
LogicalNOT::~LogicalNOT()
{}
bool LogicalNOT::IsSatisfy( long theId )
{
return myPredicate && !myPredicate->IsSatisfy( theId );
}
void LogicalNOT::SetMesh( const SMDS_Mesh* theMesh )
{
if ( myPredicate )
myPredicate->SetMesh( theMesh );
}
void LogicalNOT::SetPredicate( PredicatePtr thePred )
{
myPredicate = thePred;
}
SMDSAbs_ElementType LogicalNOT::GetType() const
{
return myPredicate ? myPredicate->GetType() : SMDSAbs_All;
}
/*
Class : LogicalBinary
Description : Base class for binary logical predicate
*/
LogicalBinary::LogicalBinary()
{}
LogicalBinary::~LogicalBinary()
{}
void LogicalBinary::SetMesh( const SMDS_Mesh* theMesh )
{
if ( myPredicate1 )
myPredicate1->SetMesh( theMesh );
if ( myPredicate2 )
myPredicate2->SetMesh( theMesh );
}
void LogicalBinary::SetPredicate1( PredicatePtr thePredicate )
{
myPredicate1 = thePredicate;
}
void LogicalBinary::SetPredicate2( PredicatePtr thePredicate )
{
myPredicate2 = thePredicate;
}
SMDSAbs_ElementType LogicalBinary::GetType() const
{
if ( !myPredicate1 || !myPredicate2 )
return SMDSAbs_All;
SMDSAbs_ElementType aType1 = myPredicate1->GetType();
SMDSAbs_ElementType aType2 = myPredicate2->GetType();
return aType1 == aType2 ? aType1 : SMDSAbs_All;
}
/*
Class : LogicalAND
Description : Logical AND
*/
bool LogicalAND::IsSatisfy( long theId )
{
return
myPredicate1 &&
myPredicate2 &&
myPredicate1->IsSatisfy( theId ) &&
myPredicate2->IsSatisfy( theId );
}
/*
Class : LogicalOR
Description : Logical OR
*/
bool LogicalOR::IsSatisfy( long theId )
{
return
myPredicate1 &&
myPredicate2 &&
(myPredicate1->IsSatisfy( theId ) ||
myPredicate2->IsSatisfy( theId ));
}
/*
FILTER
*/
// #ifdef WITH_TBB
// #include <tbb/parallel_for.h>
// #include <tbb/enumerable_thread_specific.h>
// namespace Parallel
// {
// typedef tbb::enumerable_thread_specific< TIdSequence > TIdSeq;
// struct Predicate
// {
// const SMDS_Mesh* myMesh;
// PredicatePtr myPredicate;
// TIdSeq & myOKIds;
// Predicate( const SMDS_Mesh* m, PredicatePtr p, TIdSeq & ids ):
// myMesh(m), myPredicate(p->Duplicate()), myOKIds(ids) {}
// void operator() ( const tbb::blocked_range<size_t>& r ) const
// {
// for ( size_t i = r.begin(); i != r.end(); ++i )
// if ( myPredicate->IsSatisfy( i ))
// myOKIds.local().push_back();
// }
// }
// }
// #endif
Filter::Filter()
{}
Filter::~Filter()
{}
void Filter::SetPredicate( PredicatePtr thePredicate )
{
myPredicate = thePredicate;
}
void Filter::GetElementsId( const SMDS_Mesh* theMesh,
PredicatePtr thePredicate,
TIdSequence& theSequence )
{
theSequence.clear();
if ( !theMesh || !thePredicate )
return;
thePredicate->SetMesh( theMesh );
SMDS_ElemIteratorPtr elemIt = theMesh->elementsIterator( thePredicate->GetType() );
if ( elemIt ) {
while ( elemIt->more() ) {
const SMDS_MeshElement* anElem = elemIt->next();
long anId = anElem->GetID();
if ( thePredicate->IsSatisfy( anId ) )
theSequence.push_back( anId );
}
}
}
void Filter::GetElementsId( const SMDS_Mesh* theMesh,
Filter::TIdSequence& theSequence )
{
GetElementsId(theMesh,myPredicate,theSequence);
}
/*
ManifoldPart
*/
typedef std::set<SMDS_MeshFace*> TMapOfFacePtr;
/*
Internal class Link
*/
ManifoldPart::Link::Link( SMDS_MeshNode* theNode1,
SMDS_MeshNode* theNode2 )
{
myNode1 = theNode1;
myNode2 = theNode2;
}
ManifoldPart::Link::~Link()
{
myNode1 = 0;
myNode2 = 0;
}
bool ManifoldPart::Link::IsEqual( const ManifoldPart::Link& theLink ) const
{
if ( myNode1 == theLink.myNode1 &&
myNode2 == theLink.myNode2 )
return true;
else if ( myNode1 == theLink.myNode2 &&
myNode2 == theLink.myNode1 )
return true;
else
return false;
}
bool ManifoldPart::Link::operator<( const ManifoldPart::Link& x ) const
{
if(myNode1 < x.myNode1) return true;
if(myNode1 == x.myNode1)
if(myNode2 < x.myNode2) return true;
return false;
}
bool ManifoldPart::IsEqual( const ManifoldPart::Link& theLink1,
const ManifoldPart::Link& theLink2 )
{
return theLink1.IsEqual( theLink2 );
}
ManifoldPart::ManifoldPart()
{
myMesh = 0;
myAngToler = Precision::Angular();
myIsOnlyManifold = true;
}
ManifoldPart::~ManifoldPart()
{
myMesh = 0;
}
void ManifoldPart::SetMesh( const SMDS_Mesh* theMesh )
{
myMesh = theMesh;
process();
}
SMDSAbs_ElementType ManifoldPart::GetType() const
{ return SMDSAbs_Face; }
bool ManifoldPart::IsSatisfy( long theElementId )
{
return myMapIds.Contains( theElementId );
}
void ManifoldPart::SetAngleTolerance( const double theAngToler )
{ myAngToler = theAngToler; }
double ManifoldPart::GetAngleTolerance() const
{ return myAngToler; }
void ManifoldPart::SetIsOnlyManifold( const bool theIsOnly )
{ myIsOnlyManifold = theIsOnly; }
void ManifoldPart::SetStartElem( const long theStartId )
{ myStartElemId = theStartId; }
bool ManifoldPart::process()
{
myMapIds.Clear();
myMapBadGeomIds.Clear();
myAllFacePtr.clear();
myAllFacePtrIntDMap.clear();
if ( !myMesh )
return false;
// collect all faces into own map
SMDS_FaceIteratorPtr anFaceItr = myMesh->facesIterator();
for (; anFaceItr->more(); )
{
SMDS_MeshFace* aFacePtr = (SMDS_MeshFace*)anFaceItr->next();
myAllFacePtr.push_back( aFacePtr );
myAllFacePtrIntDMap[aFacePtr] = myAllFacePtr.size()-1;
}
SMDS_MeshFace* aStartFace = (SMDS_MeshFace*)myMesh->FindElement( myStartElemId );
if ( !aStartFace )
return false;
// the map of non manifold links and bad geometry
TMapOfLink aMapOfNonManifold;
TColStd_MapOfInteger aMapOfTreated;
// begin cycle on faces from start index and run on vector till the end
// and from begin to start index to cover whole vector
const int aStartIndx = myAllFacePtrIntDMap[aStartFace];
bool isStartTreat = false;
for ( int fi = aStartIndx; !isStartTreat || fi != aStartIndx ; fi++ )
{
if ( fi == aStartIndx )
isStartTreat = true;
// as result next time when fi will be equal to aStartIndx
SMDS_MeshFace* aFacePtr = myAllFacePtr[ fi ];
if ( aMapOfTreated.Contains( aFacePtr->GetID() ) )
continue;
aMapOfTreated.Add( aFacePtr->GetID() );
TColStd_MapOfInteger aResFaces;
if ( !findConnected( myAllFacePtrIntDMap, aFacePtr,
aMapOfNonManifold, aResFaces ) )
continue;
TColStd_MapIteratorOfMapOfInteger anItr( aResFaces );
for ( ; anItr.More(); anItr.Next() )
{
int aFaceId = anItr.Key();
aMapOfTreated.Add( aFaceId );
myMapIds.Add( aFaceId );
}
if ( fi == ( myAllFacePtr.size() - 1 ) )
fi = 0;
} // end run on vector of faces
return !myMapIds.IsEmpty();
}
static void getLinks( const SMDS_MeshFace* theFace,
ManifoldPart::TVectorOfLink& theLinks )
{
int aNbNode = theFace->NbNodes();
SMDS_ElemIteratorPtr aNodeItr = theFace->nodesIterator();
int i = 1;
SMDS_MeshNode* aNode = 0;
for ( ; aNodeItr->more() && i <= aNbNode; )
{
SMDS_MeshNode* aN1 = (SMDS_MeshNode*)aNodeItr->next();
if ( i == 1 )
aNode = aN1;
i++;
SMDS_MeshNode* aN2 = ( i >= aNbNode ) ? aNode : (SMDS_MeshNode*)aNodeItr->next();
i++;
ManifoldPart::Link aLink( aN1, aN2 );
theLinks.push_back( aLink );
}
}
bool ManifoldPart::findConnected
( const ManifoldPart::TDataMapFacePtrInt& theAllFacePtrInt,
SMDS_MeshFace* theStartFace,
ManifoldPart::TMapOfLink& theNonManifold,
TColStd_MapOfInteger& theResFaces )
{
theResFaces.Clear();
if ( !theAllFacePtrInt.size() )
return false;
if ( getNormale( theStartFace ).SquareModulus() <= gp::Resolution() )
{
myMapBadGeomIds.Add( theStartFace->GetID() );
return false;
}
ManifoldPart::TMapOfLink aMapOfBoundary, aMapToSkip;
ManifoldPart::TVectorOfLink aSeqOfBoundary;
theResFaces.Add( theStartFace->GetID() );
ManifoldPart::TDataMapOfLinkFacePtr aDMapLinkFace;
expandBoundary( aMapOfBoundary, aSeqOfBoundary,
aDMapLinkFace, theNonManifold, theStartFace );
bool isDone = false;
while ( !isDone && aMapOfBoundary.size() != 0 )
{
bool isToReset = false;
ManifoldPart::TVectorOfLink::iterator pLink = aSeqOfBoundary.begin();
for ( ; !isToReset && pLink != aSeqOfBoundary.end(); ++pLink )
{
ManifoldPart::Link aLink = *pLink;
if ( aMapToSkip.find( aLink ) != aMapToSkip.end() )
continue;
// each link could be treated only once
aMapToSkip.insert( aLink );
ManifoldPart::TVectorOfFacePtr aFaces;
// find next
if ( myIsOnlyManifold &&
(theNonManifold.find( aLink ) != theNonManifold.end()) )
continue;
else
{
getFacesByLink( aLink, aFaces );
// filter the element to keep only indicated elements
ManifoldPart::TVectorOfFacePtr aFiltered;
ManifoldPart::TVectorOfFacePtr::iterator pFace = aFaces.begin();
for ( ; pFace != aFaces.end(); ++pFace )
{
SMDS_MeshFace* aFace = *pFace;
if ( myAllFacePtrIntDMap.find( aFace ) != myAllFacePtrIntDMap.end() )
aFiltered.push_back( aFace );
}
aFaces = aFiltered;
if ( aFaces.size() < 2 ) // no neihgbour faces
continue;
else if ( myIsOnlyManifold && aFaces.size() > 2 ) // non manifold case
{
theNonManifold.insert( aLink );
continue;
}
}
// compare normal with normals of neighbor element
SMDS_MeshFace* aPrevFace = aDMapLinkFace[ aLink ];
ManifoldPart::TVectorOfFacePtr::iterator pFace = aFaces.begin();
for ( ; pFace != aFaces.end(); ++pFace )
{
SMDS_MeshFace* aNextFace = *pFace;
if ( aPrevFace == aNextFace )
continue;
int anNextFaceID = aNextFace->GetID();
if ( myIsOnlyManifold && theResFaces.Contains( anNextFaceID ) )
// should not be with non manifold restriction. probably bad topology
continue;
// check if face was treated and skipped
if ( myMapBadGeomIds.Contains( anNextFaceID ) ||
!isInPlane( aPrevFace, aNextFace ) )
continue;
// add new element to connected and extend the boundaries.
theResFaces.Add( anNextFaceID );
expandBoundary( aMapOfBoundary, aSeqOfBoundary,
aDMapLinkFace, theNonManifold, aNextFace );
isToReset = true;
}
}
isDone = !isToReset;
}
return !theResFaces.IsEmpty();
}
bool ManifoldPart::isInPlane( const SMDS_MeshFace* theFace1,
const SMDS_MeshFace* theFace2 )
{
gp_Dir aNorm1 = gp_Dir( getNormale( theFace1 ) );
gp_XYZ aNorm2XYZ = getNormale( theFace2 );
if ( aNorm2XYZ.SquareModulus() <= gp::Resolution() )
{
myMapBadGeomIds.Add( theFace2->GetID() );
return false;
}
if ( aNorm1.IsParallel( gp_Dir( aNorm2XYZ ), myAngToler ) )
return true;
return false;
}
void ManifoldPart::expandBoundary
( ManifoldPart::TMapOfLink& theMapOfBoundary,
ManifoldPart::TVectorOfLink& theSeqOfBoundary,
ManifoldPart::TDataMapOfLinkFacePtr& theDMapLinkFacePtr,
ManifoldPart::TMapOfLink& theNonManifold,
SMDS_MeshFace* theNextFace ) const
{
ManifoldPart::TVectorOfLink aLinks;
getLinks( theNextFace, aLinks );
int aNbLink = (int)aLinks.size();
for ( int i = 0; i < aNbLink; i++ )
{
ManifoldPart::Link aLink = aLinks[ i ];
if ( myIsOnlyManifold && (theNonManifold.find( aLink ) != theNonManifold.end()) )
continue;
if ( theMapOfBoundary.find( aLink ) != theMapOfBoundary.end() )
{
if ( myIsOnlyManifold )
{
// remove from boundary
theMapOfBoundary.erase( aLink );
ManifoldPart::TVectorOfLink::iterator pLink = theSeqOfBoundary.begin();
for ( ; pLink != theSeqOfBoundary.end(); ++pLink )
{
ManifoldPart::Link aBoundLink = *pLink;
if ( aBoundLink.IsEqual( aLink ) )
{
theSeqOfBoundary.erase( pLink );
break;
}
}
}
}
else
{
theMapOfBoundary.insert( aLink );
theSeqOfBoundary.push_back( aLink );
theDMapLinkFacePtr[ aLink ] = theNextFace;
}
}
}
void ManifoldPart::getFacesByLink( const ManifoldPart::Link& theLink,
ManifoldPart::TVectorOfFacePtr& theFaces ) const
{
std::set<SMDS_MeshCell *> aSetOfFaces;
// take all faces that shared first node
SMDS_ElemIteratorPtr anItr = theLink.myNode1->facesIterator();
for ( ; anItr->more(); )
{
SMDS_MeshFace* aFace = (SMDS_MeshFace*)anItr->next();
if ( !aFace )
continue;
aSetOfFaces.insert( aFace );
}
// take all faces that shared second node
anItr = theLink.myNode2->facesIterator();
// find the common part of two sets
for ( ; anItr->more(); )
{
SMDS_MeshFace* aFace = (SMDS_MeshFace*)anItr->next();
if ( aSetOfFaces.count( aFace ) )
theFaces.push_back( aFace );
}
}
/*
Class : BelongToMeshGroup
Description : Verify whether a mesh element is included into a mesh group
*/
BelongToMeshGroup::BelongToMeshGroup(): myGroup( 0 )
{
}
void BelongToMeshGroup::SetGroup( SMESHDS_GroupBase* g )
{
myGroup = g;
}
void BelongToMeshGroup::SetStoreName( const std::string& sn )
{
myStoreName = sn;
}
void BelongToMeshGroup::SetMesh( const SMDS_Mesh* theMesh )
{
if ( myGroup && myGroup->GetMesh() != theMesh )
{
myGroup = 0;
}
if ( !myGroup && !myStoreName.empty() )
{
if ( const SMESHDS_Mesh* aMesh = dynamic_cast<const SMESHDS_Mesh*>(theMesh))
{
const std::set<SMESHDS_GroupBase*>& grps = aMesh->GetGroups();
std::set<SMESHDS_GroupBase*>::const_iterator g = grps.begin();
for ( ; g != grps.end() && !myGroup; ++g )
if ( *g && myStoreName == (*g)->GetStoreName() )
myGroup = *g;
}
}
if ( myGroup )
{
myGroup->IsEmpty(); // make GroupOnFilter update its predicate
}
}
bool BelongToMeshGroup::IsSatisfy( long theElementId )
{
return myGroup ? myGroup->Contains( theElementId ) : false;
}
SMDSAbs_ElementType BelongToMeshGroup::GetType() const
{
return myGroup ? myGroup->GetType() : SMDSAbs_All;
}
/*
ElementsOnSurface
*/
ElementsOnSurface::ElementsOnSurface()
{
myIds.Clear();
myType = SMDSAbs_All;
mySurf.Nullify();
myToler = Precision::Confusion();
myUseBoundaries = false;
}
ElementsOnSurface::~ElementsOnSurface()
{
}
void ElementsOnSurface::SetMesh( const SMDS_Mesh* theMesh )
{
myMeshModifTracer.SetMesh( theMesh );
if ( myMeshModifTracer.IsMeshModified())
process();
}
bool ElementsOnSurface::IsSatisfy( long theElementId )
{
return myIds.Contains( theElementId );
}
SMDSAbs_ElementType ElementsOnSurface::GetType() const
{ return myType; }
void ElementsOnSurface::SetTolerance( const double theToler )
{
if ( myToler != theToler )
myIds.Clear();
myToler = theToler;
}
double ElementsOnSurface::GetTolerance() const
{ return myToler; }
void ElementsOnSurface::SetUseBoundaries( bool theUse )
{
if ( myUseBoundaries != theUse ) {
myUseBoundaries = theUse;
SetSurface( mySurf, myType );
}
}
void ElementsOnSurface::SetSurface( const TopoDS_Shape& theShape,
const SMDSAbs_ElementType theType )
{
myIds.Clear();
myType = theType;
mySurf.Nullify();
if ( theShape.IsNull() || theShape.ShapeType() != TopAbs_FACE )
return;
mySurf = TopoDS::Face( theShape );
BRepAdaptor_Surface SA( mySurf, myUseBoundaries );
Standard_Real
u1 = SA.FirstUParameter(),
u2 = SA.LastUParameter(),
v1 = SA.FirstVParameter(),
v2 = SA.LastVParameter();
Handle(Geom_Surface) surf = BRep_Tool::Surface( mySurf );
myProjector.Init( surf, u1,u2, v1,v2 );
process();
}
void ElementsOnSurface::process()
{
myIds.Clear();
if ( mySurf.IsNull() )
return;
if ( !myMeshModifTracer.GetMesh() )
return;
myIds.ReSize( myMeshModifTracer.GetMesh()->GetMeshInfo().NbElements( myType ));
SMDS_ElemIteratorPtr anIter = myMeshModifTracer.GetMesh()->elementsIterator( myType );
for(; anIter->more(); )
process( anIter->next() );
}
void ElementsOnSurface::process( const SMDS_MeshElement* theElemPtr )
{
SMDS_ElemIteratorPtr aNodeItr = theElemPtr->nodesIterator();
bool isSatisfy = true;
for ( ; aNodeItr->more(); )
{
SMDS_MeshNode* aNode = (SMDS_MeshNode*)aNodeItr->next();
if ( !isOnSurface( aNode ) )
{
isSatisfy = false;
break;
}
}
if ( isSatisfy )
myIds.Add( theElemPtr->GetID() );
}
bool ElementsOnSurface::isOnSurface( const SMDS_MeshNode* theNode )
{
if ( mySurf.IsNull() )
return false;
gp_Pnt aPnt( theNode->X(), theNode->Y(), theNode->Z() );
// double aToler2 = myToler * myToler;
// if ( mySurf->IsKind(STANDARD_TYPE(Geom_Plane)))
// {
// gp_Pln aPln = Handle(Geom_Plane)::DownCast(mySurf)->Pln();
// if ( aPln.SquareDistance( aPnt ) > aToler2 )
// return false;
// }
// else if ( mySurf->IsKind(STANDARD_TYPE(Geom_CylindricalSurface)))
// {
// gp_Cylinder aCyl = Handle(Geom_CylindricalSurface)::DownCast(mySurf)->Cylinder();
// double aRad = aCyl.Radius();
// gp_Ax3 anAxis = aCyl.Position();
// gp_XYZ aLoc = aCyl.Location().XYZ();
// double aXDist = anAxis.XDirection().XYZ() * ( aPnt.XYZ() - aLoc );
// double aYDist = anAxis.YDirection().XYZ() * ( aPnt.XYZ() - aLoc );
// if ( fabs(aXDist*aXDist + aYDist*aYDist - aRad*aRad) > aToler2 )
// return false;
// }
// else
// return false;
myProjector.Perform( aPnt );
bool isOn = ( myProjector.IsDone() && myProjector.LowerDistance() <= myToler );
return isOn;
}
/*
ElementsOnShape
*/
ElementsOnShape::ElementsOnShape()
: //myMesh(0),
myType(SMDSAbs_All),
myToler(Precision::Confusion()),
myAllNodesFlag(false)
{
}
ElementsOnShape::~ElementsOnShape()
{
clearClassifiers();
}
SMDSAbs_ElementType ElementsOnShape::GetType() const
{
return myType;
}
void ElementsOnShape::SetTolerance (const double theToler)
{
if (myToler != theToler) {
myToler = theToler;
SetShape(myShape, myType);
}
}
double ElementsOnShape::GetTolerance() const
{
return myToler;
}
void ElementsOnShape::SetAllNodes (bool theAllNodes)
{
myAllNodesFlag = theAllNodes;
}
void ElementsOnShape::SetMesh (const SMDS_Mesh* theMesh)
{
myMeshModifTracer.SetMesh( theMesh );
if ( myMeshModifTracer.IsMeshModified())
{
size_t nbNodes = theMesh ? theMesh->NbNodes() : 0;
if ( myNodeIsChecked.size() == nbNodes )
{
std::fill( myNodeIsChecked.begin(), myNodeIsChecked.end(), false );
}
else
{
SMESHUtils::FreeVector( myNodeIsChecked );
SMESHUtils::FreeVector( myNodeIsOut );
myNodeIsChecked.resize( nbNodes, false );
myNodeIsOut.resize( nbNodes );
}
}
}
bool ElementsOnShape::getNodeIsOut( const SMDS_MeshNode* n, bool& isOut )
{
if ( n->GetID() >= (int) myNodeIsChecked.size() ||
!myNodeIsChecked[ n->GetID() ])
return false;
isOut = myNodeIsOut[ n->GetID() ];
return true;
}
void ElementsOnShape::setNodeIsOut( const SMDS_MeshNode* n, bool isOut )
{
if ( n->GetID() < (int) myNodeIsChecked.size() )
{
myNodeIsChecked[ n->GetID() ] = true;
myNodeIsOut [ n->GetID() ] = isOut;
}
}
void ElementsOnShape::SetShape (const TopoDS_Shape& theShape,
const SMDSAbs_ElementType theType)
{
myType = theType;
myShape = theShape;
if ( myShape.IsNull() ) return;
TopTools_IndexedMapOfShape shapesMap;
TopAbs_ShapeEnum shapeTypes[4] = { TopAbs_SOLID, TopAbs_FACE, TopAbs_EDGE, TopAbs_VERTEX };
TopExp_Explorer sub;
for ( int i = 0; i < 4; ++i )
{
if ( shapesMap.IsEmpty() )
for ( sub.Init( myShape, shapeTypes[i] ); sub.More(); sub.Next() )
shapesMap.Add( sub.Current() );
if ( i > 0 )
for ( sub.Init( myShape, shapeTypes[i], shapeTypes[i-1] ); sub.More(); sub.Next() )
shapesMap.Add( sub.Current() );
}
clearClassifiers();
myClassifiers.resize( shapesMap.Extent() );
for ( int i = 0; i < shapesMap.Extent(); ++i )
myClassifiers[ i ] = new TClassifier( shapesMap( i+1 ), myToler );
if ( theType == SMDSAbs_Node )
{
SMESHUtils::FreeVector( myNodeIsChecked );
SMESHUtils::FreeVector( myNodeIsOut );
}
else
{
std::fill( myNodeIsChecked.begin(), myNodeIsChecked.end(), false );
}
}
void ElementsOnShape::clearClassifiers()
{
for ( size_t i = 0; i < myClassifiers.size(); ++i )
delete myClassifiers[ i ];
myClassifiers.clear();
}
bool ElementsOnShape::IsSatisfy (long elemId)
{
const SMDS_Mesh* mesh = myMeshModifTracer.GetMesh();
const SMDS_MeshElement* elem =
( myType == SMDSAbs_Node ? mesh->FindNode( elemId ) : mesh->FindElement( elemId ));
if ( !elem || myClassifiers.empty() )
return false;
bool isSatisfy = myAllNodesFlag, isNodeOut;
gp_XYZ centerXYZ (0, 0, 0);
SMDS_ElemIteratorPtr aNodeItr = elem->nodesIterator();
while (aNodeItr->more() && (isSatisfy == myAllNodesFlag))
{
SMESH_TNodeXYZ aPnt( aNodeItr->next() );
centerXYZ += aPnt;
isNodeOut = true;
if ( !getNodeIsOut( aPnt._node, isNodeOut ))
{
for ( size_t i = 0; i < myClassifiers.size() && isNodeOut; ++i )
isNodeOut = myClassifiers[i]->IsOut( aPnt );
setNodeIsOut( aPnt._node, isNodeOut );
}
isSatisfy = !isNodeOut;
}
// Check the center point for volumes MantisBug 0020168
if (isSatisfy &&
myAllNodesFlag &&
myClassifiers[0]->ShapeType() == TopAbs_SOLID)
{
centerXYZ /= elem->NbNodes();
isSatisfy = false;
for ( size_t i = 0; i < myClassifiers.size() && !isSatisfy; ++i )
isSatisfy = ! myClassifiers[i]->IsOut( centerXYZ );
}
return isSatisfy;
}
TopAbs_ShapeEnum ElementsOnShape::TClassifier::ShapeType() const
{
return myShape.ShapeType();
}
bool ElementsOnShape::TClassifier::IsOut(const gp_Pnt& p)
{
return (this->*myIsOutFun)( p );
}
void ElementsOnShape::TClassifier::Init (const TopoDS_Shape& theShape, double theTol)
{
myShape = theShape;
myTol = theTol;
switch ( myShape.ShapeType() )
{
case TopAbs_SOLID: {
if ( isBox( theShape ))
{
myIsOutFun = & ElementsOnShape::TClassifier::isOutOfBox;
}
else
{
mySolidClfr.Load(theShape);
myIsOutFun = & ElementsOnShape::TClassifier::isOutOfSolid;
}
break;
}
case TopAbs_FACE: {
Standard_Real u1,u2,v1,v2;
Handle(Geom_Surface) surf = BRep_Tool::Surface( TopoDS::Face( theShape ));
surf->Bounds( u1,u2,v1,v2 );
myProjFace.Init(surf, u1,u2, v1,v2, myTol );
myIsOutFun = & ElementsOnShape::TClassifier::isOutOfFace;
break;
}
case TopAbs_EDGE: {
Standard_Real u1, u2;
Handle(Geom_Curve) curve = BRep_Tool::Curve( TopoDS::Edge(theShape), u1, u2);
myProjEdge.Init(curve, u1, u2);
myIsOutFun = & ElementsOnShape::TClassifier::isOutOfEdge;
break;
}
case TopAbs_VERTEX:{
myVertexXYZ = BRep_Tool::Pnt( TopoDS::Vertex( theShape ) );
myIsOutFun = & ElementsOnShape::TClassifier::isOutOfVertex;
break;
}
default:
throw SALOME_Exception("Programmer error in usage of ElementsOnShape::TClassifier");
}
}
bool ElementsOnShape::TClassifier::isOutOfSolid (const gp_Pnt& p)
{
mySolidClfr.Perform( p, myTol );
return ( mySolidClfr.State() != TopAbs_IN && mySolidClfr.State() != TopAbs_ON );
}
bool ElementsOnShape::TClassifier::isOutOfBox (const gp_Pnt& p)
{
return myBox.IsOut( p.XYZ() );
}
bool ElementsOnShape::TClassifier::isOutOfFace (const gp_Pnt& p)
{
myProjFace.Perform( p );
if ( myProjFace.IsDone() && myProjFace.LowerDistance() <= myTol )
{
// check relatively to the face
Quantity_Parameter u, v;
myProjFace.LowerDistanceParameters(u, v);
gp_Pnt2d aProjPnt (u, v);
BRepClass_FaceClassifier aClsf ( TopoDS::Face( myShape ), aProjPnt, myTol );
if ( aClsf.State() == TopAbs_IN || aClsf.State() == TopAbs_ON )
return false;
}
return true;
}
bool ElementsOnShape::TClassifier::isOutOfEdge (const gp_Pnt& p)
{
myProjEdge.Perform( p );
return ! ( myProjEdge.NbPoints() > 0 && myProjEdge.LowerDistance() <= myTol );
}
bool ElementsOnShape::TClassifier::isOutOfVertex(const gp_Pnt& p)
{
return ( myVertexXYZ.Distance( p ) > myTol );
}
bool ElementsOnShape::TClassifier::isBox (const TopoDS_Shape& theShape)
{
TopTools_IndexedMapOfShape vMap;
TopExp::MapShapes( theShape, TopAbs_VERTEX, vMap );
if ( vMap.Extent() != 8 )
return false;
myBox.Clear();
for ( int i = 1; i <= 8; ++i )
myBox.Add( BRep_Tool::Pnt( TopoDS::Vertex( vMap( i ))).XYZ() );
gp_XYZ pMin = myBox.CornerMin(), pMax = myBox.CornerMax();
for ( int i = 1; i <= 8; ++i )
{
gp_Pnt p = BRep_Tool::Pnt( TopoDS::Vertex( vMap( i )));
for ( int iC = 1; iC <= 3; ++ iC )
{
double d1 = Abs( pMin.Coord( iC ) - p.Coord( iC ));
double d2 = Abs( pMax.Coord( iC ) - p.Coord( iC ));
if ( Min( d1, d2 ) > myTol )
return false;
}
}
myBox.Enlarge( myTol );
return true;
}
/*
Class : BelongToGeom
Description : Predicate for verifying whether entity belongs to
specified geometrical support
*/
BelongToGeom::BelongToGeom()
: myMeshDS(NULL),
myType(SMDSAbs_All),
myIsSubshape(false),
myTolerance(Precision::Confusion())
{}
void BelongToGeom::SetMesh( const SMDS_Mesh* theMesh )
{
myMeshDS = dynamic_cast<const SMESHDS_Mesh*>(theMesh);
init();
}
void BelongToGeom::SetGeom( const TopoDS_Shape& theShape )
{
myShape = theShape;
init();
}
static bool IsSubShape (const TopTools_IndexedMapOfShape& theMap,
const TopoDS_Shape& theShape)
{
if (theMap.Contains(theShape)) return true;
if (theShape.ShapeType() == TopAbs_COMPOUND ||
theShape.ShapeType() == TopAbs_COMPSOLID)
{
TopoDS_Iterator anIt (theShape, Standard_True, Standard_True);
for (; anIt.More(); anIt.Next())
{
if (!IsSubShape(theMap, anIt.Value())) {
return false;
}
}
return true;
}
return false;
}
void BelongToGeom::init()
{
if (!myMeshDS || myShape.IsNull()) return;
// is sub-shape of main shape?
TopoDS_Shape aMainShape = myMeshDS->ShapeToMesh();
if (aMainShape.IsNull()) {
myIsSubshape = false;
}
else {
TopTools_IndexedMapOfShape aMap;
TopExp::MapShapes(aMainShape, aMap);
myIsSubshape = IsSubShape(aMap, myShape);
}
//if (!myIsSubshape) // to be always ready to check an element not bound to geometry
{
myElementsOnShapePtr.reset(new ElementsOnShape());
myElementsOnShapePtr->SetTolerance(myTolerance);
myElementsOnShapePtr->SetAllNodes(true); // "belong", while false means "lays on"
myElementsOnShapePtr->SetMesh(myMeshDS);
myElementsOnShapePtr->SetShape(myShape, myType);
}
}
static bool IsContains( const SMESHDS_Mesh* theMeshDS,
const TopoDS_Shape& theShape,
const SMDS_MeshElement* theElem,
TopAbs_ShapeEnum theFindShapeEnum,
TopAbs_ShapeEnum theAvoidShapeEnum = TopAbs_SHAPE )
{
TopExp_Explorer anExp( theShape,theFindShapeEnum,theAvoidShapeEnum );
while( anExp.More() )
{
const TopoDS_Shape& aShape = anExp.Current();
if( SMESHDS_SubMesh* aSubMesh = theMeshDS->MeshElements( aShape ) ){
if( aSubMesh->Contains( theElem ) )
return true;
}
anExp.Next();
}
return false;
}
bool BelongToGeom::IsSatisfy (long theId)
{
if (myMeshDS == 0 || myShape.IsNull())
return false;
if (!myIsSubshape)
{
return myElementsOnShapePtr->IsSatisfy(theId);
}
// Case of submesh
if (myType == SMDSAbs_Node)
{
if( const SMDS_MeshNode* aNode = myMeshDS->FindNode( theId ) )
{
if ( aNode->getshapeId() < 1 )
return myElementsOnShapePtr->IsSatisfy(theId);
const SMDS_PositionPtr& aPosition = aNode->GetPosition();
SMDS_TypeOfPosition aTypeOfPosition = aPosition->GetTypeOfPosition();
switch( aTypeOfPosition )
{
case SMDS_TOP_VERTEX : return ( IsContains( myMeshDS,myShape,aNode,TopAbs_VERTEX ));
case SMDS_TOP_EDGE : return ( IsContains( myMeshDS,myShape,aNode,TopAbs_EDGE ));
case SMDS_TOP_FACE : return ( IsContains( myMeshDS,myShape,aNode,TopAbs_FACE ));
case SMDS_TOP_3DSPACE: return ( IsContains( myMeshDS,myShape,aNode,TopAbs_SOLID ) ||
IsContains( myMeshDS,myShape,aNode,TopAbs_SHELL ));
}
}
}
else
{
if ( const SMDS_MeshElement* anElem = myMeshDS->FindElement( theId ))
{
if ( anElem->getshapeId() < 1 )
return myElementsOnShapePtr->IsSatisfy(theId);
if( myType == SMDSAbs_All )
{
return ( IsContains( myMeshDS,myShape,anElem,TopAbs_EDGE ) ||
IsContains( myMeshDS,myShape,anElem,TopAbs_FACE ) ||
IsContains( myMeshDS,myShape,anElem,TopAbs_SOLID )||
IsContains( myMeshDS,myShape,anElem,TopAbs_SHELL ));
}
else if( myType == anElem->GetType() )
{
switch( myType )
{
case SMDSAbs_Edge : return ( IsContains( myMeshDS,myShape,anElem,TopAbs_EDGE ));
case SMDSAbs_Face : return ( IsContains( myMeshDS,myShape,anElem,TopAbs_FACE ));
case SMDSAbs_Volume: return ( IsContains( myMeshDS,myShape,anElem,TopAbs_SOLID )||
IsContains( myMeshDS,myShape,anElem,TopAbs_SHELL ));
}
}
}
}
return false;
}
void BelongToGeom::SetType (SMDSAbs_ElementType theType)
{
myType = theType;
init();
}
SMDSAbs_ElementType BelongToGeom::GetType() const
{
return myType;
}
TopoDS_Shape BelongToGeom::GetShape()
{
return myShape;
}
const SMESHDS_Mesh* BelongToGeom::GetMeshDS() const
{
return myMeshDS;
}
void BelongToGeom::SetTolerance (double theTolerance)
{
myTolerance = theTolerance;
if (!myIsSubshape)
init();
}
double BelongToGeom::GetTolerance()
{
return myTolerance;
}
/*
Class : LyingOnGeom
Description : Predicate for verifying whether entiy lying or partially lying on
specified geometrical support
*/
LyingOnGeom::LyingOnGeom()
: myMeshDS(NULL),
myType(SMDSAbs_All),
myIsSubshape(false),
myTolerance(Precision::Confusion())
{}
void LyingOnGeom::SetMesh( const SMDS_Mesh* theMesh )
{
myMeshDS = dynamic_cast<const SMESHDS_Mesh*>(theMesh);
init();
}
void LyingOnGeom::SetGeom( const TopoDS_Shape& theShape )
{
myShape = theShape;
init();
}
void LyingOnGeom::init()
{
if (!myMeshDS || myShape.IsNull()) return;
// is sub-shape of main shape?
TopoDS_Shape aMainShape = myMeshDS->ShapeToMesh();
if (aMainShape.IsNull()) {
myIsSubshape = false;
}
else {
myIsSubshape = myMeshDS->IsGroupOfSubShapes( myShape );
}
if (myIsSubshape)
{
TopTools_IndexedMapOfShape shapes;
TopExp::MapShapes( myShape, shapes );
mySubShapesIDs.Clear();
for ( int i = 1; i <= shapes.Extent(); ++i )
{
int subID = myMeshDS->ShapeToIndex( shapes( i ));
if ( subID > 0 )
mySubShapesIDs.Add( subID );
}
}
else
{
myElementsOnShapePtr.reset(new ElementsOnShape());
myElementsOnShapePtr->SetTolerance(myTolerance);
myElementsOnShapePtr->SetAllNodes(false); // lays on, while true means "belong"
myElementsOnShapePtr->SetMesh(myMeshDS);
myElementsOnShapePtr->SetShape(myShape, myType);
}
}
bool LyingOnGeom::IsSatisfy( long theId )
{
if ( myMeshDS == 0 || myShape.IsNull() )
return false;
if (!myIsSubshape)
{
return myElementsOnShapePtr->IsSatisfy(theId);
}
// Case of sub-mesh
const SMDS_MeshElement* elem =
( myType == SMDSAbs_Node ) ? myMeshDS->FindNode( theId ) : myMeshDS->FindElement( theId );
if ( mySubShapesIDs.Contains( elem->getshapeId() ))
return true;
if ( elem->GetType() != SMDSAbs_Node )
{
SMDS_ElemIteratorPtr nodeItr = elem->nodesIterator();
while ( nodeItr->more() )
{
const SMDS_MeshElement* aNode = nodeItr->next();
if ( mySubShapesIDs.Contains( aNode->getshapeId() ))
return true;
}
}
return false;
}
void LyingOnGeom::SetType( SMDSAbs_ElementType theType )
{
myType = theType;
init();
}
SMDSAbs_ElementType LyingOnGeom::GetType() const
{
return myType;
}
TopoDS_Shape LyingOnGeom::GetShape()
{
return myShape;
}
const SMESHDS_Mesh* LyingOnGeom::GetMeshDS() const
{
return myMeshDS;
}
void LyingOnGeom::SetTolerance (double theTolerance)
{
myTolerance = theTolerance;
if (!myIsSubshape)
init();
}
double LyingOnGeom::GetTolerance()
{
return myTolerance;
}
bool LyingOnGeom::Contains( const SMESHDS_Mesh* theMeshDS,
const TopoDS_Shape& theShape,
const SMDS_MeshElement* theElem,
TopAbs_ShapeEnum theFindShapeEnum,
TopAbs_ShapeEnum theAvoidShapeEnum )
{
// if (IsContains(theMeshDS, theShape, theElem, theFindShapeEnum, theAvoidShapeEnum))
// return true;
// TopTools_MapOfShape aSubShapes;
// TopExp_Explorer exp( theShape, theFindShapeEnum, theAvoidShapeEnum );
// for ( ; exp.More(); exp.Next() )
// {
// const TopoDS_Shape& aShape = exp.Current();
// if ( !aSubShapes.Add( aShape )) continue;
// if ( SMESHDS_SubMesh* aSubMesh = theMeshDS->MeshElements( aShape ))
// {
// if ( aSubMesh->Contains( theElem ))
// return true;
// SMDS_ElemIteratorPtr nodeItr = theElem->nodesIterator();
// while ( nodeItr->more() )
// {
// const SMDS_MeshElement* aNode = nodeItr->next();
// if ( aSubMesh->Contains( aNode ))
// return true;
// }
// }
// }
return false;
}
TSequenceOfXYZ::TSequenceOfXYZ(): myElem(0)
{}
TSequenceOfXYZ::TSequenceOfXYZ(size_type n) : myArray(n), myElem(0)
{}
TSequenceOfXYZ::TSequenceOfXYZ(size_type n, const gp_XYZ& t) : myArray(n,t), myElem(0)
{}
TSequenceOfXYZ::TSequenceOfXYZ(const TSequenceOfXYZ& theSequenceOfXYZ) : myArray(theSequenceOfXYZ.myArray), myElem(theSequenceOfXYZ.myElem)
{}
template <class InputIterator>
TSequenceOfXYZ::TSequenceOfXYZ(InputIterator theBegin, InputIterator theEnd): myArray(theBegin,theEnd), myElem(0)
{}
TSequenceOfXYZ::~TSequenceOfXYZ()
{}
TSequenceOfXYZ& TSequenceOfXYZ::operator=(const TSequenceOfXYZ& theSequenceOfXYZ)
{
myArray = theSequenceOfXYZ.myArray;
myElem = theSequenceOfXYZ.myElem;
return *this;
}
gp_XYZ& TSequenceOfXYZ::operator()(size_type n)
{
return myArray[n-1];
}
const gp_XYZ& TSequenceOfXYZ::operator()(size_type n) const
{
return myArray[n-1];
}
void TSequenceOfXYZ::clear()
{
myArray.clear();
}
void TSequenceOfXYZ::reserve(size_type n)
{
myArray.reserve(n);
}
void TSequenceOfXYZ::push_back(const gp_XYZ& v)
{
myArray.push_back(v);
}
TSequenceOfXYZ::size_type TSequenceOfXYZ::size() const
{
return myArray.size();
}
SMDSAbs_EntityType TSequenceOfXYZ::getElementEntity() const
{
return myElem ? myElem->GetEntityType() : SMDSEntity_Last;
}
TMeshModifTracer::TMeshModifTracer():
myMeshModifTime(0), myMesh(0)
{
}
void TMeshModifTracer::SetMesh( const SMDS_Mesh* theMesh )
{
if ( theMesh != myMesh )
myMeshModifTime = 0;
myMesh = theMesh;
}
bool TMeshModifTracer::IsMeshModified()
{
bool modified = false;
if ( myMesh )
{
modified = ( myMeshModifTime != myMesh->GetMTime() );
myMeshModifTime = myMesh->GetMTime();
}
return modified;
}<|fim▁end|> | |
<|file_name|>requests_test.go<|end_file_name|><|fim▁begin|>package testing
import (
"testing"
"github.com/gophercloud/gophercloud/openstack/compute/v2/extensions/networks"
"github.com/gophercloud/gophercloud/pagination"
th "github.com/gophercloud/gophercloud/testhelper"
"github.com/gophercloud/gophercloud/testhelper/client"
)
func TestList(t *testing.T) {
th.SetupHTTP()
defer th.TeardownHTTP()
HandleListSuccessfully(t)
count := 0
err := networks.List(client.ServiceClient()).EachPage(func(page pagination.Page) (bool, error) {
count++<|fim▁hole|> th.AssertNoErr(t, err)
th.CheckDeepEquals(t, ExpectedNetworkSlice, actual)
return true, nil
})
th.AssertNoErr(t, err)
th.CheckEquals(t, 1, count)
}
func TestGet(t *testing.T) {
th.SetupHTTP()
defer th.TeardownHTTP()
HandleGetSuccessfully(t)
actual, err := networks.Get(client.ServiceClient(), "20c8acc0-f747-4d71-a389-46d078ebf000").Extract()
th.AssertNoErr(t, err)
th.CheckDeepEquals(t, &SecondNetwork, actual)
}<|fim▁end|> | actual, err := networks.ExtractNetworks(page) |
<|file_name|>base.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
<|fim▁hole|> player = None
table = None
def __init__(self, player):
self.player = player
def discard_tile(self):
pass<|fim▁end|> |
class BaseAI(object): |
<|file_name|>0125_auto__chg_field_modelfielddata_foreign.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'ModelFieldData.foreign'
db.alter_column('blogs_modelfielddata', 'foreign_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['blogs.ModelData']))
def backwards(self, orm):
# Changing field 'ModelFieldData.foreign'
db.alter_column('blogs_modelfielddata', 'foreign_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blogs.ModelFieldData'], null=True))
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'blogs.blog': {
'Meta': {'object_name': 'Blog'},
'analytics_account': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'contributors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'blogcontributor'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'custom_domain': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'draft_notice': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'exclusion_end': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'exclusion_start': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'facebook_link': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'fb_page_access_token': ('django.db.models.fields.CharField', [], {'max_length': '260', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'has_artists': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'header_image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_bootblog': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_online': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_open': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '7', 'blank': 'True'}),
'main_color': ('django.db.models.fields.CharField', [], {'default': "'#C4BDB2'", 'max_length': '10', 'blank': 'True'}),
'moderator_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'pinterest_link': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '30'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Template']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '240'}),
'translation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True', 'blank': 'True'}),
'twitter_link': ('django.db.models.fields.URLField', [], {'max_length': '100', 'blank': 'True'}),
'twitter_oauth_token': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'twitter_oauth_token_secret': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
'blogs.category': {
'Meta': {'object_name': 'Category'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_category'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'cat_image': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_caret': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_close': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_email': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_fb': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_left': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_pint': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_right': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'cat_image_tw': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'color': ('django.db.models.fields.CharField', [], {'default': "'#000000'", 'max_length': '10'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'parent_category': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'child_category'", 'null': 'True', 'to': "orm['blogs.Category']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.comment': {
'Meta': {'object_name': 'Comment'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'Comment_author'", 'null': 'True', 'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '10000'}),
'comment_status': ('django.db.models.fields.CharField', [], {'default': "'pe'", 'max_length': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'notify_me': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Post']", 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'})
},
'blogs.info_email': {
'Meta': {'object_name': 'Info_email'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'We'", 'max_length': '2', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'subject': ('django.db.models.fields.TextField', [], {'max_length': '100', 'blank': 'True'}),
'subscribers': ('django.db.models.fields.CharField', [], {'default': "'A'", 'max_length': '2', 'null': 'True'})
},
'blogs.language': {
'Meta': {'object_name': 'Language'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),<|fim▁hole|> 'Meta': {'object_name': 'Menu'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_menu'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_main': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'blogs.menuitem': {
'Meta': {'object_name': 'MenuItem'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Category']", 'null': 'True', 'blank': 'True'}),
'external_link': ('django.db.models.fields.URLField', [], {'max_length': '140', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'menu': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Menu']", 'null': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Page']", 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'})
},
'blogs.model': {
'Meta': {'object_name': 'Model'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Custom_post'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'})
},
'blogs.modeldata': {
'Meta': {'object_name': 'ModelData'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Unknown'", 'max_length': '140'})
},
'blogs.modelfield': {
'Meta': {'object_name': 'ModelField'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'post_type': ('django.db.models.fields.CharField', [], {'default': "'Text'", 'max_length': '40'}),
'rank': ('django.db.models.fields.CharField', [], {'default': "'1'", 'max_length': '2'})
},
'blogs.modelfielddata': {
'Meta': {'object_name': 'ModelFieldData'},
'date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '100', 'blank': 'True'}),
'foreign': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'foreign'", 'null': 'True', 'to': "orm['blogs.ModelData']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'longtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'model': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True'}),
'model_data': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.ModelData']", 'null': 'True'}),
'model_field': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.ModelField']", 'null': 'True'}),
'nullboolean': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'onetofive': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'positiveinteger': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'relation': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'relation'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['blogs.ModelData']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.page': {
'Meta': {'object_name': 'Page'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_page'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.post': {
'Meta': {'object_name': 'Post'},
'artist': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'base62id': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['blogs.Category']", 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_0': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_01': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_1': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_2': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_3': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_4': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_5': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_6': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_video': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Model']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discarded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_ready': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_sticky': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_top': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'karma': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'layout_type': ('django.db.models.fields.CharField', [], {'default': "'s'", 'max_length': '1'}),
'message': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'pic': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_0': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_04': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_1': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_10': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_11': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_12': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_13': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_14': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_15': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_16': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_17': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_18': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_19': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_2': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_20': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_21': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_22': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_23': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_24': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_25': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_26': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_27': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_28': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_29': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_3': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_30': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_31': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_32': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_33': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_4': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_5': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_6': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_7': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_8': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_9': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'publish_on_facebook': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'}),
'soundcloud_id': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'soundcloud_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'source': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '2', 'null': 'True'}),
'tag': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['blogs.Tag']", 'null': 'True', 'blank': 'True'}),
'temp_tag_field': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'translated_content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'translated_title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'video': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'video_ogg': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'video_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'vimeo_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'vimeo_thumb_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'youtube_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
'blogs.rss': {
'Meta': {'object_name': 'Rss'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_rss'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'feed_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'blogs.subscription': {
'Meta': {'object_name': 'Subscription'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_new': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'blogs.subuser': {
'Meta': {'object_name': 'Subuser'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_user'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'})
},
'blogs.tag': {
'Meta': {'object_name': 'Tag'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Blog_tag'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.template': {
'Meta': {'object_name': 'Template'},
'archives': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'base': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'category': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'single': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'})
},
'blogs.translation': {
'Meta': {'object_name': 'Translation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'origin_blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Translation_origin_blog'", 'null': 'True', 'to': "orm['blogs.Blog']"}),
'translated_blog': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'Translation_translated_blog'", 'null': 'True', 'to': "orm['blogs.Blog']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['blogs']<|fim▁end|> | 'language_code': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'language_name': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'blogs.menu': { |
<|file_name|>app-nav.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, HostListener, ElementRef } from '@angular/core';
import { Router, NavigationEnd, NavigationExtras } from '@angular/router';
import { AuthService } from '../../services/auth.service';
@Component({
selector: 'app-nav',
templateUrl: 'app-nav.component.html'
})
export class AppNavComponent implements OnInit {
loggedIn: boolean;
menuDropped: boolean;
searchKeyword: string;
constructor(
public auth: AuthService,
private router: Router,
private elementRef: ElementRef
) {
this.router.events.subscribe(event => {
if (event instanceof NavigationEnd) {
this.menuDropped = false;
this.searchKeyword = '';
}
});
this.searchKeyword = '';
}
ngOnInit() {
this.auth.checkLogin();
}
toggleMenu() {
this.menuDropped = !this.menuDropped;
}
logout(): void {
this.auth.logout();
this.menuDropped = false;
}
searchPackages(e: Event): void {
e.preventDefault();
const navigationExtras: NavigationExtras = {
queryParams: { 'keyword': this.searchKeyword }
};
this.router.navigate(['search'], navigationExtras);
}
@HostListener('document:click', ['$event'])
onBlur(e: MouseEvent) {
if (!this.menuDropped) {
return;
}
let toggleBtn = this.elementRef.nativeElement.querySelector('.drop-menu-act');
if (e.target === toggleBtn || toggleBtn.contains(<any>e.target)) {
return;
}
let dropMenu: HTMLElement = this.elementRef.nativeElement.querySelector('.nav-dropdown');
if (dropMenu && dropMenu !== e.target && !dropMenu.contains((<any>e.target))) {
this.menuDropped = false;
}
}<|fim▁hole|><|fim▁end|> | } |
<|file_name|>start.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# pylint: disable=line-too-long
""" Start the containers """
import argparse
<|fim▁hole|>
def main():
""" Start the containers """
parser = argparse.ArgumentParser(description="Set up testing environment.")
parser.add_argument("--pg", help="PostgreSQL version")
parser.add_argument("--es", help="Elasticsearch version")
args = parser.parse_args()
pg_version = args.pg
es_version = args.es
print(
"Starting environment with PostgreSQL {pg_version} with Elasticsearch {es_version}...".format(
pg_version=pg_version, es_version=es_version
)
)
set_up(pg_version, es_version)
if __name__ == "__main__":
main()<|fim▁end|> | from lib.docker_compose_tools import set_up
|
<|file_name|>FlexTableColumn.spec.js<|end_file_name|><|fim▁begin|>import _ from 'lodash'
import Vue from 'vue'
import test from 'ava'
import FlexTableColumn from '../../src/components/FlexTableColumn.vue'
Vue.config.productionTip = false
test('has a mounted hook', (t) => {
t.true(_.isFunction(FlexTableColumn.mounted))
})
test('has data as function', (t) => {
t.true(_.isFunction(FlexTableColumn.data))<|fim▁hole|><|fim▁end|> | }) |
<|file_name|>parser.py<|end_file_name|><|fim▁begin|>_first = dict()
_follow = dict()
_table = dict()
_endsymbol = '$'
_emptysymbol = '#'
# function to remove left recursion from a subgrammar
def recursion(grammar):
section = grammar[0][0]
nonrecursivegrammar = [[item for item in rule] + [section + 'bar'] for rule in grammar if rule[0] != rule[1]]
recursivegrammar = [rule for rule in grammar if rule[0] == rule[1]]
return [rule for rule in nonrecursivegrammar] + [[section + 'bar', _emptysymbol]] + [[rule[0] + 'bar'] + rule[2: ] + [section + 'bar'] for rule in recursivegrammar]
# function to left factor a subgrammar
def factoring(grammar):
section = grammar[0][0]
factoredgrammar = list()
index = 0
while index + 1 < len(grammar):
mismatches = [subindex for subindex in range(min(len(grammar[index]), len(grammar[index + 1]))) if grammar[index][subindex] != grammar[index + 1][subindex]]
maxmatch = min(mismatches) if mismatches else min(len(grammar[index]), len(grammar[index + 1]))
if maxmatch == 1:
index += 1
continue
subindex = 2
while index + subindex < len(grammar):
if grammar[index][: maxmatch] != grammar[index + subindex][: maxmatch]:
break
subindex += 1
factoredgrammar.append(grammar[index][: maxmatch] + [section + 'star' * (index + 1)])
for subsubindex in range(subindex):
if grammar[index + subsubindex][maxmatch: ]:
factoredgrammar.append([section + 'star' * (index + 1)] + grammar[index + subsubindex][maxmatch: ])
else:
factoredgrammar.append([section + 'star' * (index + 1)] + [_emptysymbol])
del grammar[index : index + subindex]
return factoredgrammar + grammar
# function to calculate first of expressions
def first(expression, grammar, terminals):
if expression in _first:
return _first[expression]
if expression in terminals:
return set((expression, ))
_first[expression] = set()
for rule in grammar:
if expression == rule[0]:
flag = True
for production in rule[1: ]:
foremost = first(production, grammar, terminals)
_first[expression].update(foremost)
if _emptysymbol in foremost:
_first[expression].remove(_emptysymbol)
else:
flag = False
break
if flag:
_first[expression].add(_emptysymbol)
return _first[expression]
# function to calculate follow of expressions
def follow(expression, grammar, terminals, startsymbol):
if expression in _follow:
return _follow[expression]
temporary = frozenset()
if expression == startsymbol:
temporary = temporary.union(frozenset([_endsymbol]))
for rule in grammar:
if expression in rule[1: ]:
index = rule[1: ].index(expression)
index += 2
flag = True
for production in rule[index: ]:
foremost = first(production, grammar, terminals)
temporary = temporary.union(foremost)
if _emptysymbol in foremost:
temporary = temporary.difference(frozenset([_emptysymbol]))
else:
flag = False
break
if flag and rule[0] != expression:
temporary = temporary.union(follow(rule[0], grammar, terminals, startsymbol))
_follow[expression] = temporary
return _follow[expression]
# function to create parsing table
def table(grammar, terminals, startsymbol):
for rule in grammar:
if rule[0] not in _table:
_table[rule[0]] = dict()
for terminal in terminals:
_table[rule[0]][terminal] = set()
_table[rule[0]][_endsymbol] = set()
for rule in grammar:
flag = True
for production in rule[1: ]:
foremost = first(production, grammar, terminals)
for symbol in foremost:
if symbol in terminals or symbol == _endsymbol:
_table[rule[0]][symbol].add(tuple(rule[1: ]))
if _emptysymbol not in foremost:
flag = False
break
if flag:
rearmost = follow(rule[0], grammar, terminals, startsymbol)
for symbol in rearmost:
if symbol in terminals or symbol == _endsymbol:
_table[rule[0]][symbol].add(tuple(rule[1: ]))
if _endsymbol in rearmost:
_table[rule[0]][_endsymbol].add(tuple(rule[1: ]))
return _table
# function to syntactically parse code
def parser(code, start):
_input = line.strip().split() + [_endsymbol]
_stack = [_endsymbol, start]
while _stack:
if not _input or _stack[-1] == _emptysymbol:
print '[?POP', _stack.pop(), ']', _input, _stack
continue
if _stack[-1] == _input[0]:
print '[POP', _stack.pop(), ']', _input, _stack
_input = _input[1: ]
continue
if not _table[_stack[-1]][_input[0]]:<|fim▁hole|> print '[!POP', _stack.pop(), ']', _input, _stack
continue
ex = list(_table[_stack[-1]][_input[0]])[0]
a = _stack.pop()
_stack += [x for x in reversed(ex)]
print '[RULE', a, '->', ' '.join(ex), ']', _input, _stack
# function to replace rules while removing indirect left recursion
def replace(replacable, replacing):
replacedgrammar = list()
for replacablerule in replacable:
flag = False
for replacingrule in replacing:
for index in range(1, len(replacablerule)):
if replacablerule[index] == replacingrule[0]:
replacedgrammar.append(replacablerule[: index] + replacingrule[1: ] + replacablerule[index + 1:])
flag = True
if not flag:
replacedgrammar.append(replacablerule)
return replacedgrammar
if __name__ == '__main__':
grammar = [rule.split() for rule in open('grammar.txt', 'r').readlines()]
grammar = [[rule[0]] + rule[2: ] for rule in grammar]
temporarygrammar = list()
index = 0
while index < len(grammar):
subindex = 1
while index + subindex < len(grammar):
if grammar[index][0] != grammar[index + subindex][0]:
break
subindex += 1
temporarygrammar = temporarygrammar + factoring(grammar[index: index + subindex])
index += subindex
grammar = temporarygrammar
temporarygrammar = list()
index = 0
while index < len(grammar):
subindex = 1
while index + subindex < len(grammar):
if grammar[index][0] != grammar[index + subindex][0]:
break
subindex += 1
temporarygrammar = temporarygrammar + recursion(replace(grammar[index: index + subindex], temporarygrammar))
index += subindex
grammar = temporarygrammar
startsymbol = 'program'
print '\n\nGRAMMAR RULES\n\n'
for rule in grammar:
print rule
terms = set([term for rule in grammar for term in rule])
nonterminals = set([rule[0] for rule in grammar])
terminals = terms - nonterminals
print '\n\nFIRSTS\n\n'
for nonterminal in nonterminals:
print nonterminal, first(nonterminal, grammar, terminals)
print '\n\nFOLLOWS\n\n'
for nonterminal in nonterminals:
print nonterminal, follow(nonterminal, grammar, terminals, startsymbol)
table(grammar, terminals, startsymbol)
print '\n\nTABLE\n\n'
for left in _table:
for top in _table[left]:
if _table[left][top]:
print left, top, _table[left][top]
with open('intermediate.txt', 'r') as filein:
for line in filein:
print '\n\nSYNTACTIC PARSE\n\n'
parser(line.strip().split(), startsymbol)<|fim▁end|> | |
<|file_name|>UIClientService.java<|end_file_name|><|fim▁begin|>package ca.corefacility.bioinformatics.irida.ria.web.services;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import java.util.stream.Collectors;
import javax.validation.ConstraintViolationException;
import org.apache.commons.lang3.RandomStringUtils;
import org.springframework.context.MessageSource;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.jpa.domain.Specification;
import org.springframework.security.oauth2.provider.NoSuchClientException;
import org.springframework.stereotype.Component;
import ca.corefacility.bioinformatics.irida.exceptions.EntityExistsException;
import ca.corefacility.bioinformatics.irida.model.IridaClientDetails;
import ca.corefacility.bioinformatics.irida.repositories.specification.IridaClientDetailsSpecification;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.clients.ClientTableModel;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.clients.ClientTableRequest;
import ca.corefacility.bioinformatics.irida.ria.web.ajax.dto.clients.CreateUpdateClientDetails;
import ca.corefacility.bioinformatics.irida.ria.web.models.tables.TableResponse;
import ca.corefacility.bioinformatics.irida.service.IridaClientDetailsService;
import com.google.common.collect.Sets;
/**
* UI Service to handle IRIDA Clients
*/
@Component
public class UIClientService {
private final IridaClientDetailsService clientDetailsService;
private final MessageSource messageSource;
private final String AUTO_APPROVE = "auto";
private final String SCOPE_READ = "read";
private final String SCOPE_WRITE = "write";
private final String GRANT_TYPE_AUTH_CODE = "authorization_code";
public UIClientService(IridaClientDetailsService clientDetailsService, MessageSource messageSource) {
this.clientDetailsService = clientDetailsService;
this.messageSource = messageSource;
}
/**
* Get a listing of clients based on the table request.
*
* @param tableRequest Information about the sort and page of the table.
* @return Current status of the table
*/
public TableResponse<ClientTableModel> getClientList(ClientTableRequest tableRequest) {
Specification<IridaClientDetails> specification = IridaClientDetailsSpecification.searchClient(
tableRequest.getSearch());
Page<IridaClientDetails> page = clientDetailsService.search(specification,
PageRequest.of(tableRequest.getCurrent(), tableRequest.getPageSize(), tableRequest.getSort()));
List<ClientTableModel> models = page.getContent()
.stream()
.map(client -> new ClientTableModel(client, clientDetailsService.countActiveTokensForClient(client)))
.collect(Collectors.toList());
return new TableResponse<>(models, page.getTotalElements());
}
/**
* Revoke all tokens for a specific client
*
* @param id Identifier for a client
*/
public void deleteClientTokens(Long id) {
IridaClientDetails details = clientDetailsService.read(id);
clientDetailsService.revokeTokensForClient(details);
}
/**
* Validate a client identifier for a new client<|fim▁hole|> public void validateClientId(String clientId) throws NoSuchClientException {
clientDetailsService.loadClientByClientId(clientId);
}
/**
* Create a new client
*
* @param request Details about the new client
* @param locale Current users {@link Locale}
* @return A message to the user about the result of the create/update
* @throws EntityExistsException thrown if the client id already is used.
* @throws ConstraintViolationException thrown if the client id violates any of its constraints
*/
public String createOrUpdateClient(CreateUpdateClientDetails request, Locale locale)
throws EntityExistsException, ConstraintViolationException {
IridaClientDetails client;
if (request.getId() != null) {
// Existing client
client = clientDetailsService.read(request.getId());
} else {
// New client, so need to set up a few things that cannot be mutated in an existing one
client = new IridaClientDetails();
client.setClientSecret(generateClientSecret());
client.setClientId(request.getClientId());
}
client.setAccessTokenValiditySeconds(request.getTokenValidity());
// Let's set up the scopes for this client
Set<String> scopes = new HashSet<>();
Set<String> autoScopes = new HashSet<>();
// 1. Read scope
if (request.getRead()
.equals(SCOPE_READ)) {
scopes.add(SCOPE_READ);
} else if (request.getRead()
.equals(AUTO_APPROVE)) {
scopes.add(SCOPE_READ);
autoScopes.add(SCOPE_READ);
}
// 2. Write scope
if (request.getWrite()
.equals(SCOPE_WRITE)) {
scopes.add(SCOPE_WRITE);
} else if (request.getWrite()
.equals(AUTO_APPROVE)) {
scopes.add(SCOPE_WRITE);
autoScopes.add(SCOPE_WRITE);
}
client.setScope(scopes);
client.setAutoApprovableScopes(autoScopes);
// Set the grant type
client.setAuthorizedGrantTypes(Sets.newHashSet(request.getGrantType()));
if (request.getGrantType()
.equals(GRANT_TYPE_AUTH_CODE)) {
client.setRegisteredRedirectUri(request.getRedirectURI());
}
// See if allowed refresh tokens
if (request.getRefreshToken() > 0) {
client.getAuthorizedGrantTypes().add("refresh_token");
client.setRefreshTokenValiditySeconds(request.getRefreshToken());
}
if (client.getId() != null) {
clientDetailsService.update(client);
return messageSource.getMessage("server.UpdateClientForm.success", new Object[] { client.getClientId() },
locale);
} else {
client = clientDetailsService.create(client);
return messageSource.getMessage("server.AddClientForm.success", new Object[] { client.getClientId() },
locale);
}
}
/**
* Delete a client
*
* @param id Identifier for the client to delete
*/
public void deleteClient(Long id) {
clientDetailsService.delete(id);
}
/**
* Generate a new client secret
*
* @param id identifier for a client
*/
public void regenerateClientSecret(Long id) {
IridaClientDetails details = clientDetailsService.read(id);
String secret = generateClientSecret();
details.setClientSecret(secret);
clientDetailsService.update(details);
}
private String generateClientSecret() {
return RandomStringUtils.randomAlphanumeric(42);
}
}<|fim▁end|> | *
* @param clientId Identifier to check to see if it exists
* @throws NoSuchClientException thrown if a client does not exist with the given client id.
*/ |
<|file_name|>irc.py<|end_file_name|><|fim▁begin|>import time
from typing import List, Optional
from utils import tasks
from zirc.event import Event
from utils.database import Database
from zirc.wrappers import connection_wrapper
def chunks(l: List, n: int):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
def set_mode(irc: connection_wrapper, channel: str, users: List[str], mode: str):
for block in chunks(users, 4):
modes = "".join(mode[1:]) * len(block)
irc.mode(channel, " ".join(block), mode[0] + modes)
def get_users(args: str):
if args.find(",") != -1:
pos = args.find(",")
users_str = args[pos:].strip()
if args[pos + 1] != " ":
users = users_str[1:].split(",")
else:
users = users_str[2:].split(", ")
args = args[:pos].strip().split(" ")
users.append(args[-1])
else:
args_list = args.split(" ")
if len(args_list) == 1:
users = args_list[0]
elif len(args_list) >= 2:
users = args_list[:-1]
return users
def get_user_host(userdb: Database, channel: str, nick: str):
return userdb.get_user_host(channel, nick)
def get_info_tuple(event: Event, args: List[str], userdb: Optional[Database]=None):
if args[0].startswith("#"):
channel = args[0]
str_args = " ".join(args[1:])
del args[0]
else:
channel = event.target
str_args = " ".join(args)
if str_args.find(",") != -1:
users = get_users(str_args)
else:
users = args[-1:]
if " ".join(args[:-len(users)]) != '':
message = " ".join(args[:-len(users)])<|fim▁hole|> else:
message = f"{event.source.nick}"
for (i, v) in enumerate(users):
if not v.find("!") != -1 and userdb is not None:
users[i] = get_user_host(userdb, event.target, v)
return channel, users, message
def unban_after_duration(irc: connection_wrapper, users: List[str], chan: str, duration: int):
duration += int(time.time())
def func(irc: connection_wrapper, users: List[str], chan: str):
for i in users:
irc.unban(chan, i)
tasks.run_at(duration, func, (irc, users, chan))
def strip_colours(s: str):
import re
ccodes = ['\x0f', '\x16', '\x1d', '\x1f', '\x02',
'\x03([1-9][0-6]?)?,?([1-9][0-6]?)?']
for cc in ccodes:
s = re.sub(cc, '', s)
return s<|fim▁end|> | |
<|file_name|>ast_clone.rs<|end_file_name|><|fim▁begin|>use std::str::FromStr;
use {
proc_macro2::{Span, TokenStream},
syn::{
self, Data, DataEnum, DataStruct, DeriveInput, Field, Fields, FieldsNamed, FieldsUnnamed,
Generics, Ident, Variant,
},
};
use crate::{
attr,
shared::{map_lifetimes, map_type_params, split_for_impl},
};
pub fn derive(input: TokenStream) -> TokenStream {
let derive_input = syn::parse2(input).expect("Input is checked by rustc");
let container = attr::Container::from_ast(&derive_input);
let DeriveInput {
ident,
data,
generics,
..
} = derive_input;
let tokens = match data {
Data::Struct(ast) => derive_struct(&container, ast, ident, generics),
Data::Enum(ast) => derive_enum(&container, ast, ident, generics),
Data::Union(_) => panic!("Unions are not supported"),
};
tokens.into()
}
fn derive_struct(
container: &attr::Container,
ast: DataStruct,
ident: Ident,
generics: Generics,
) -> TokenStream {
let cons = match ast.fields {
Fields::Named(FieldsNamed { named, .. }) => gen_struct_cons(&ident, named),
Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => gen_tuple_struct_cons(&ident, unnamed),
Fields::Unit => quote! { #ident },
};
gen_impl(container, ident, generics, cons)
}
fn gen_struct_cons<I>(ident: &Ident, fields: I) -> TokenStream
where
I: IntoIterator<Item = Field>,
{
// lookup each field by its name and then convert to its type using the Getable
// impl of the fields type
let field_initializers = fields.into_iter().map(|field| {
let field_ty = &field.ty;
let ident = field
.ident
.as_ref()
.expect("Struct fields always have names");
quote! {
#ident: <#field_ty as gluon_base::ast::AstClone<'ast, Id>>::ast_clone(&self.#ident, arena)
}
});
quote! {
#ident {
#(#field_initializers,)*
}
}<|fim▁hole|>
fn gen_tuple_struct_cons<I>(ident: &Ident, fields: I) -> TokenStream
where
I: IntoIterator<Item = Field>,
{
let mut fields = fields.into_iter().fuse();
// Treat newtype structs as just their inner type
let (first, second) = (fields.next(), fields.next());
match (&first, &second) {
(Some(field), None) => {
let field_ty = &field.ty;
return quote! {
#ident (
<#field_ty as gluon_base::ast::AstClone<'__vm, _>>::ast_clone(&self.0, arena)
)
};
}
_ => (),
}
// do the lookup using the tag, because tuple structs don't have field names
let field_initializers = first
.into_iter()
.chain(second)
.chain(fields)
.enumerate()
.map(|(idx, field)| {
let field_ty = &field.ty;
let idx = syn::Index::from(idx);
quote! {
<#field_ty as gluon_base::ast::AstClone<'__vm, _>>::ast_clone(&self. #idx, arena)
}
});
quote! {
#ident (
#(#field_initializers,)*
)
}
}
fn derive_enum(
container: &attr::Container,
ast: DataEnum,
ident: Ident,
generics: Generics,
) -> TokenStream {
let cons = {
let variants = ast
.variants
.iter()
.enumerate()
.map(|(tag, variant)| gen_variant_match(&ident, tag, variant));
// data contains the the data for each field of a variant; the variant of the passed value
// is defined by the tag(), which is defined by order of the variants (the first variant is 0)
quote! {
match self {
#(#variants,)*
}
}
};
gen_impl(container, ident, generics, cons)
}
fn gen_impl(
container: &attr::Container,
ident: Ident,
generics: Generics,
clone_impl: TokenStream,
) -> TokenStream {
// lifetime bounds like '__vm: 'a, 'a: '__vm (which implies => 'a == '__vm)
// writing bounds like this is a lot easier than actually replacing all lifetimes
// with '__vm
let lifetime_bounds = create_lifetime_bounds(&generics);
// generate bounds like T: Getable for every type parameter
let ast_clone_bounds = create_ast_clone_bounds(&generics);
let (impl_generics, ty_generics, where_clause) = split_for_impl(&generics, &["Id"], &["'ast"]);
let dummy_const = Ident::new(&format!("_IMPL_AST_CLONE_FOR_{}", ident), Span::call_site());
let extra_bounds = container.ast_clone_bounds.as_ref().map(|b| {
let b = TokenStream::from_str(b).unwrap();
quote! { #b, }
});
quote! {
#[allow(non_upper_case_globals)]
const #dummy_const: () = {
use crate as gluon_base;
#[automatically_derived]
#[allow(unused_attributes, unused_variables)]
impl #impl_generics gluon_base::ast::AstClone<'ast, Id> for #ident #ty_generics
#where_clause #(#ast_clone_bounds,)* #(#lifetime_bounds),* #extra_bounds Id: Clone
{
fn ast_clone(&self, arena: gluon_base::ast::ArenaRef<'_, 'ast, Id>) -> Self {
#clone_impl
}
}
};
}
}
fn gen_variant_match(ident: &Ident, _tag: usize, variant: &Variant) -> TokenStream {
let variant_ident = &variant.ident;
// depending on the type of the variant we need to generate different constructors
// for the enum
match &variant.fields {
Fields::Unit => quote! {
#ident::#variant_ident => #ident::#variant_ident
},
// both constructors that need to marshall values extract them by using the index
// of the field to get the content from Data::get_variant;
// the data variable was assigned in the function body above
Fields::Unnamed(FieldsUnnamed { unnamed, .. }) => {
let fields: Vec<_> = unnamed
.iter()
.enumerate()
.map(|(idx, _field)| syn::Ident::new(&format!("_{}", idx), Span::call_site()))
.collect();
let cons = gen_tuple_variant_cons(unnamed.iter().zip(fields.iter()));
quote! {
#ident::#variant_ident ( #(#fields),* ) =>
#ident::#variant_ident#cons
}
}
Fields::Named(FieldsNamed { named, .. }) => {
let cons = gen_struct_variant_cons(ident, variant_ident, named);
let named = named.iter().map(|field| field.ident.as_ref().unwrap());
quote! {
#ident::#variant_ident { #(#named),* } => #cons
}
}
}
}
fn gen_tuple_variant_cons<'a, I>(fields: I) -> TokenStream
where
I: IntoIterator<Item = (&'a syn::Field, &'a syn::Ident)>,
{
let fields = fields.into_iter().map(|(field, ident)| {
let field_ty = &field.ty;
quote! {
<#field_ty as gluon_base::ast::AstClone<_>>::ast_clone(#ident, arena)
}
});
quote! {
(#(#fields),*)
}
}
fn gen_struct_variant_cons<'a, I>(ident: &Ident, variant_ident: &Ident, fields: I) -> TokenStream
where
I: IntoIterator<Item = &'a Field>,
{
let fields = fields.into_iter().map(|field| {
let field_ty = &field.ty;
let field_ident = field
.ident
.as_ref()
.expect("Struct fields always have names");
quote! {
#field_ident: <#field_ty as gluon_base::ast::AstClone<_>>::ast_clone(#field_ident, arena)
}
});
quote! {
#ident::#variant_ident { #(#fields),* }
}
}
fn create_ast_clone_bounds(generics: &Generics) -> Vec<TokenStream> {
map_type_params(generics, |ty| {
quote! {
#ty: gluon_base::ast::AstClone<'ast, Id>
}
})
}
fn create_lifetime_bounds(generics: &Generics) -> Vec<TokenStream> {
map_lifetimes(generics, |_lifetime| {
quote! {}
})
}<|fim▁end|> | } |
<|file_name|>ViewMailingListsAction.java<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2000-2004 Liferay, LLC. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.dotmarketing.portlets.mailinglists.action;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
import com.dotcms.repackage.portlet.javax.portlet.PortletConfig;
import com.dotcms.repackage.portlet.javax.portlet.RenderRequest;
import com.dotcms.repackage.portlet.javax.portlet.RenderResponse;
import com.dotcms.repackage.portlet.javax.portlet.WindowState;
import javax.servlet.jsp.PageContext;
import com.dotcms.repackage.commons_beanutils.org.apache.commons.beanutils.BeanUtils;
import com.dotcms.repackage.struts.org.apache.struts.action.ActionForm;
import com.dotcms.repackage.struts.org.apache.struts.action.ActionForward;
import com.dotcms.repackage.struts.org.apache.struts.action.ActionMapping;
import com.dotmarketing.business.Role;
import com.dotmarketing.portlets.mailinglists.factories.MailingListFactory;
import com.dotmarketing.portlets.mailinglists.model.MailingList;
import com.dotmarketing.portlets.userfilter.factories.UserFilterFactory;
import com.dotmarketing.util.Config;
import com.dotmarketing.util.Logger;
import com.dotmarketing.util.UtilMethods;
import com.dotmarketing.util.WebKeys;
import com.liferay.portal.model.User;
import com.liferay.portal.struts.PortletAction;
import com.liferay.portal.util.Constants;
/**
* <a href="ViewQuestionsAction.java.html"><b><i>View Source</i></b></a>
*
* @author Brian Wing Shun Chan
* @version $Revision: 1.4 $
*
*/
public class ViewMailingListsAction extends PortletAction {
public ActionForward render(
ActionMapping mapping, ActionForm form, PortletConfig config,
RenderRequest req, RenderResponse res)
throws Exception {
Logger.debug(this, "Running ViewMailingListsAction");
try {
//get the user, order, direction
User user = com.liferay.portal.util.PortalUtil.getUser(req);
String orderBy = req.getParameter("orderby");
String direction = req.getParameter("direction");
String condition = req.getParameter("query");
//get their lists
List list = null;
List roles = com.dotmarketing.business.APILocator.getRoleAPI().loadRolesForUser(user.getUserId());
boolean isMarketingAdmin = false;
Iterator rolesIt = roles.iterator();
while (rolesIt.hasNext()) {
Role role = (Role) rolesIt.next();
if (UtilMethods.isSet(role.getRoleKey()) && role.getRoleKey().equals(Config.getStringProperty("MAILINGLISTS_ADMIN_ROLE"))) {
isMarketingAdmin = true;
break;
}
}
if (isMarketingAdmin) {
if (UtilMethods.isSet(orderBy) && UtilMethods.isSet(direction)) {
//list = MailingListFactory.getAllMailingLists(orderBy, direction);
list = MailingListFactory.getAllMailingLists();
list.addAll(UserFilterFactory.getAllUserFilter());
if (orderBy.equals("title")) {
if (direction.equals(" asc"))
Collections.sort(list, new ComparatorTitleAsc());
else
Collections.sort(list, new ComparatorTitleDesc());
}
} else if(UtilMethods.isSet(condition)) {
list = MailingListFactory.getAllMailingListsCondition(condition);
list.addAll(UserFilterFactory.getUserFilterByTitle(condition));
Collections.sort(list, new ComparatorTitleAsc());
} else {
list = MailingListFactory.getAllMailingLists();
list.addAll(UserFilterFactory.getAllUserFilter());
Collections.sort(list, new ComparatorTitleAsc());
}
} else {
if (UtilMethods.isSet(orderBy) && UtilMethods.isSet(direction)) {
//list = MailingListFactory.getMailingListsByUser(user, orderBy, direction);<|fim▁hole|> list.add(MailingListFactory.getUnsubscribersMailingList());
list.addAll(UserFilterFactory.getAllUserFilterByUser(user));
if (orderBy.equals("title")) {
if (direction.equals(" asc"))
Collections.sort(list, new ComparatorTitleAsc());
else
Collections.sort(list, new ComparatorTitleDesc());
}
} else if(UtilMethods.isSet(condition)) {
list = MailingListFactory.getMailingListsByUserCondition(user, condition);
list.add(MailingListFactory.getUnsubscribersMailingList());
list.addAll(UserFilterFactory.getUserFilterByTitleAndUser(condition, user));
Collections.sort(list, new ComparatorTitleAsc());
} else {
list = MailingListFactory.getMailingListsByUser(user);
list.add(MailingListFactory.getUnsubscribersMailingList());
list.addAll(UserFilterFactory.getAllUserFilterByUser(user));
Collections.sort(list, new ComparatorTitleAsc());
}
}
if (req.getWindowState().equals(WindowState.NORMAL)) {
// if (list != null)
// list = orderMailingListByDescDate(list);
req.setAttribute(WebKeys.MAILING_LIST_VIEW_PORTLET, list);
return mapping.findForward("portlet.ext.mailinglists.view");
}
else {
req.setAttribute(WebKeys.MAILING_LIST_VIEW, list);
return mapping.findForward("portlet.ext.mailinglists.view_mailinglists");
}
}
catch (Exception e) {
req.setAttribute(PageContext.EXCEPTION, e);
return mapping.findForward(Constants.COMMON_ERROR);
}
}
private List<MailingList> orderMailingListByDescDate(List<MailingList> list) {
List<MailingList> result = new ArrayList<MailingList>(list.size());
int i;
boolean added;
MailingList mailingList2;
for (MailingList mailingList1: list) {
if (result.size() == 0) {
result.add(mailingList1);
} else {
added = false;
for (i = 0; i < result.size(); ++i) {
mailingList2 = result.get(i);
if (mailingList2.getIDate().before(mailingList1.getIDate())) {
result.add(i, mailingList1);
added = true;
break;
}
}
if (!added)
result.add(mailingList1);
}
}
return result;
}
private class ComparatorTitleAsc implements Comparator {
public int compare(Object o1, Object o2) {
String title1, title2;
try {
if (o1 instanceof MailingList)
title1 = BeanUtils.getProperty(o1, "title");
else
title1 = BeanUtils.getProperty(o1, "userFilterTitle");
} catch (Exception e) {
title1 = "";
}
try {
if (o2 instanceof MailingList)
title2 = BeanUtils.getProperty(o2, "title");
else
title2 = BeanUtils.getProperty(o2, "userFilterTitle");
} catch (Exception e) {
title2 = "";
}
return title1.compareToIgnoreCase(title2);
}
}
private class ComparatorTitleDesc implements Comparator {
public int compare(Object o1, Object o2) {
String title1, title2;
try {
if (o1 instanceof MailingList)
title1 = BeanUtils.getProperty(o1, "title");
else
title1 = BeanUtils.getProperty(o1, "userFilterTitle");
} catch (Exception e) {
title1 = "";
}
try {
if (o2 instanceof MailingList)
title2 = BeanUtils.getProperty(o2, "title");
else
title2 = BeanUtils.getProperty(o2, "userFilterTitle");
} catch (Exception e) {
title2 = "";
}
return title2.compareToIgnoreCase(title1);
}
}
}<|fim▁end|> | list = MailingListFactory.getMailingListsByUser(user); |
<|file_name|>unittest_job_manager.py<|end_file_name|><|fim▁begin|>#-------------------------------------------------------------------------
# The Azure Batch Apps Python Client
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#--------------------------------------------------------------------------
"""Unit tests for JobManager"""
import sys
try:
import unittest2 as unittest
except ImportError:
import unittest
try:
from unittest import mock
except ImportError:
import mock
from batchapps.job_manager import JobManager
from batchapps.api import Response
from batchapps.exceptions import RestCallException
from batchapps.files import FileCollection
from batchapps.job import (
JobSubmission,
SubmittedJob)
# pylint: disable=W0212
class TestJobManager(unittest.TestCase):
"""Unit tests for JobManager"""
@mock.patch('batchapps.credentials.Configuration')
@mock.patch('batchapps.credentials.Credentials')
@mock.patch('batchapps.job_manager.BatchAppsApi')
@mock.patch('batchapps.job_manager.SubmittedJob')
def test_jobmgr_get_job(self, mock_job, mock_api, mock_creds, mock_cfg):
"""Test get_job"""
mgr = JobManager(mock_creds, cfg=mock_cfg)
with self.assertRaises(ValueError):
mgr.get_job()
resp = mock.create_autospec(Response)
resp.success = False
resp.result = RestCallException(None, "test", None)
mgr._client.get_job.return_value = resp
with self.assertRaises(RestCallException):
mgr.get_job(url="http://test")
mgr._client.get_job.assert_called_with(url="http://test")
resp.success = True
resp.result = {'id':'1', 'name':'2', 'type':'3'}
job = mgr.get_job(url="http://test")
mgr._client.get_job.assert_called_with(url="http://test")
mock_job.assert_called_with(mgr._client, '1', '2', '3')
resp.result = {'id':'1', 'name':'2', 'type':'3', 'other':'4'}
job = mgr.get_job(jobid="test_id")
mgr._client.get_job.assert_called_with(job_id="test_id")
mock_job.assert_called_with(mgr._client, '1', '2', '3', other='4')
with self.assertRaises(ValueError):
mgr.get_job("test")
with self.assertRaises(ValueError):
mgr.get_job(job="test")
sub = mock.create_autospec(SubmittedJob)
job = mgr.get_job(sub)
self.assertEqual(job, sub)
job = mgr.get_job(job=sub)
self.assertEqual(job, sub)
@mock.patch('batchapps.credentials.Configuration')
@mock.patch('batchapps.credentials.Credentials')
@mock.patch('batchapps.job_manager.BatchAppsApi')
@mock.patch('batchapps.job_manager.SubmittedJob')
def test_jobmgr_get_jobs(self, mock_job, mock_api, mock_creds, mock_cfg):
"""Test get_jobs"""
mgr = JobManager(mock_creds, cfg=mock_cfg)
resp = mock.create_autospec(Response)
resp.success = False
resp.result = RestCallException(None, "test", None)
mgr._client.list_jobs.return_value = resp
with self.assertRaises(RestCallException):
mgr.get_jobs()
mgr._client.list_jobs.assert_called_with(0, 10)
resp.success = True
resp.result = {'totalCount':10, 'jobs':[]}
jobs = mgr.get_jobs(10, "5", 5)
mgr._client.list_jobs.assert_called_with(10, 5, name='5')
self.assertEqual(jobs, [])
self.assertEqual(len(mgr), 10)
resp.result = {'totalCount':10, 'jobs':[{'id':'1', 'name':'2'}]}
with self.assertRaises(RestCallException):
mgr.get_jobs(name="test")
resp.result = {'totalCount':10,
'jobs':[{'id':'1',
'name':'2',
'type':'3',
'other':'4'}]}
jobs = mgr.get_jobs(index="10")
mock_job.assert_called_with(mgr._client, '1', '2', '3', other='4')
self.assertEqual(len(jobs), 1)
@mock.patch('batchapps.credentials.Configuration')
@mock.patch('batchapps.credentials.Credentials')
@mock.patch('batchapps.job_manager.BatchAppsApi')
@mock.patch('batchapps.job_manager.JobSubmission')
def test_jobmgr_create_job(self, mock_job, mock_api, mock_creds, mock_cfg):
"""Test create_job"""
mgr = JobManager(mock_creds, cfg=mock_cfg)
mgr.create_job("my_job", a='a', b='None', c=[], d=42)
mock_job.assert_called_with(mgr._client,
"my_job",
a='a',
b='None',
c=[],
d=42)
mgr.create_job(None)
mock_job.assert_called_with(mgr._client, "None")
@mock.patch('batchapps.credentials.Configuration')
@mock.patch('batchapps.credentials.Credentials')
@mock.patch('batchapps.job_manager.BatchAppsApi')
def test_jobmgr_submit(self, mock_api, mock_creds, mock_cfg):
"""Test submit"""
_callback = mock.Mock()
job = mock.create_autospec(JobSubmission)
job.name = "test"
job.source = "test"
job.required_files = mock.create_autospec(FileCollection)
mgr = JobManager(mock_creds, cfg=mock_cfg)
mgr.submit(job)
self.assertTrue(job.submit.called)<|fim▁hole|> self.assertTrue(job.submit.called)
job.required_files.upload.assert_called_with(threads=10, callback=_callback, block=11111)
with self.assertRaises(TypeError):
mgr.submit("test")
job.required_files.upload.return_value = ["oops"]
with self.assertRaises(Exception):
mgr.submit(job)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | job.required_files.upload.assert_called_with(threads=None, callback=None, block=4096)
mgr.submit(job, upload_threads=10, callback=_callback, block=11111) |
<|file_name|>uniquefileidentifierframe.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
copyright : (C) 2002 - 2008 by Scott Wheeler
email : [email protected]
***************************************************************************/
/***************************************************************************
* This library is free software; you can redistribute it and/or modify *
* it under the terms of the GNU Lesser General Public License version *
* 2.1 as published by the Free Software Foundation. *
* *
* This library is distributed in the hope that it will be useful, but *
* WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
* Lesser General Public License for more details. *
* *
* You should have received a copy of the GNU Lesser General Public *
* License along with this library; if not, write to the Free Software *
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA *
* 02110-1301 USA *
* *
* Alternatively, this file is available under the Mozilla Public *
* License Version 1.1. You may obtain a copy of the License at *
* http://www.mozilla.org/MPL/ *
***************************************************************************/
#include <tbytevectorlist.h>
#include <tpropertymap.h>
#include <tdebug.h>
#include "id3v2tag.h"
#include "uniquefileidentifierframe.h"
using namespace TagLib;
using namespace ID3v2;
class UniqueFileIdentifierFrame::UniqueFileIdentifierFramePrivate
{
public:
String owner;
ByteVector identifier;
};
////////////////////////////////////////////////////////////////////////////////
// public methods
////////////////////////////////////////////////////////////////////////////////<|fim▁hole|>UniqueFileIdentifierFrame::UniqueFileIdentifierFrame(const ByteVector &data) :
ID3v2::Frame(data),
d(new UniqueFileIdentifierFramePrivate())
{
setData(data);
}
UniqueFileIdentifierFrame::UniqueFileIdentifierFrame(const String &owner, const ByteVector &id) :
ID3v2::Frame("UFID"),
d(new UniqueFileIdentifierFramePrivate())
{
d->owner = owner;
d->identifier = id;
}
UniqueFileIdentifierFrame::~UniqueFileIdentifierFrame()
{
delete d;
}
String UniqueFileIdentifierFrame::owner() const
{
return d->owner;
}
ByteVector UniqueFileIdentifierFrame::identifier() const
{
return d->identifier;
}
void UniqueFileIdentifierFrame::setOwner(const String &s)
{
d->owner = s;
}
void UniqueFileIdentifierFrame::setIdentifier(const ByteVector &v)
{
d->identifier = v;
}
String UniqueFileIdentifierFrame::toString() const
{
return String();
}
PropertyMap UniqueFileIdentifierFrame::asProperties() const
{
PropertyMap map;
if(d->owner == "http://musicbrainz.org") {
map.insert("MUSICBRAINZ_TRACKID", String(d->identifier));
}
else {
map.unsupportedData().append(frameID() + String("/") + d->owner);
}
return map;
}
UniqueFileIdentifierFrame *UniqueFileIdentifierFrame::findByOwner(const ID3v2::Tag *tag, const String &o) // static
{
ID3v2::FrameList comments = tag->frameList("UFID");
for(ID3v2::FrameList::ConstIterator it = comments.begin();
it != comments.end();
++it)
{
UniqueFileIdentifierFrame *frame = dynamic_cast<UniqueFileIdentifierFrame *>(*it);
if(frame && frame->owner() == o)
return frame;
}
return 0;
}
void UniqueFileIdentifierFrame::parseFields(const ByteVector &data)
{
if(data.size() < 1) {
debug("An UFID frame must contain at least 1 byte.");
return;
}
int pos = 0;
d->owner = readStringField(data, String::Latin1, &pos);
d->identifier = data.mid(pos);
}
ByteVector UniqueFileIdentifierFrame::renderFields() const
{
ByteVector data;
data.append(d->owner.data(String::Latin1));
data.append(char(0));
data.append(d->identifier);
return data;
}
UniqueFileIdentifierFrame::UniqueFileIdentifierFrame(const ByteVector &data, Header *h) :
Frame(h),
d(new UniqueFileIdentifierFramePrivate())
{
parseFields(fieldData(data));
}<|fim▁end|> | |
<|file_name|>0007_merge_20170408_2326.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-04-08 23:26
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [<|fim▁hole|>
operations = [
]<|fim▁end|> | ('app', '0006_author_has_github_task'),
('app', '0006_auto_20170405_1957'),
] |
<|file_name|>playlist.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import ConfigParser, sys, os, urllib2, json, time, shutil, filecmp
import Levenshtein
config = ConfigParser.ConfigParser()
config.read("config.ini")
def clean(chaine):
#print chaine
return chaine.lower().strip()
def decode(chaine):
chaine = chaine.replace(u"\u2018", "'").replace(u"\u2019", "'")
try:
chaine = unicodedata.normalize('NFKD', chaine).encode('ascii','ignore')
return chaine
except:
return chaine
def remove_accents(input_str):
try:
nkfd_form = unicodedata.normalize('NFKD', unicode(input_str))
return u"".join([c for c in nkfd_form if not unicodedata.combining(c)])
except:
return input_str
def cc(i):
return decode(clean(remove_accents(i)))
def getKey(item):
return item[0]
class playlist:
def __init__(self, limit, page=1, period="overall"):
self.api_key = config.get("lastfm",'key')
self.music_dir = config.get("lastfm",'directory')
self.page = page
self.mp_dir = config.get("lastfm",'mudir')
self.user = config.get("lastfm",'user')
self.dossier = os.listdir(self.music_dir)
self.period = period
self.limit = limit
self.notfound = []
#for i in req!
def lastfm(self, meth):
try:
url = 'http://ws.audioscrobbler.com/2.0/?api_key='+self.api_key+'&autocorrect=1'+meth+'&format=json&page='+str(self.page)
txt = urllib2.urlopen(url).read()
return json.loads(txt)
except urllib2.HTTPError:
#print '\n Error : '+art
return None
def toptracks(self):
url = '&method=user.gettoptracks&user='+self.user+'&limit='+self.limit+'&period='+self.period;
req = self.lastfm(url)
for i in req["toptracks"]["track"]:<|fim▁hole|>
"""Rechercher le dossier artiste, exacte ou levenshtein inferieure a longueur moins 2"""
def findartist(self, artist):
textlog = " find (" + artist + "):\n"
lev = {}
# Chaque artiste dans le dossier
for art in self.dossier:
ar = cc(art)
# Correspondance exacte (pas de majuscule, pas d'accents, pas d'expace)
if ar == artist:
##print "YES BITCH"
return art
# Distance de levenshtein: on stocke si pas trop de difference
elif abs(len(ar) - len(artist)) < 5:
l = Levenshtein.distance(ar, artist)
if l < (len(art)/2):
if not l in lev.keys():
lev[l] = []
lev[l].append(art)
# On process
textlog += str(lev) + "\n"
if lev != {} and len( lev[min(lev.keys())] ) == 1:
##print lev[min(lev.keys())][0]
##print "YES BIS BITCHY BITCH"
return lev[min(lev.keys())][0]
else:
pass ##print textlog
"""Rechercher le dossier artiste, exacte ou levenshtein inferieure a longueur moins 2"""
def findtrack(self, artist, track, i=0, lev=False):
# Chaque truc dans le dossier
base = self.music_dir + "/" + artist
for fil in os.listdir(base):
if os.path.isdir(base +"/"+ fil):
##print ("findtrack " + artist + " / " + fil + " - " + track)
try:
for result in self.findtrack(artist + "/" + fil, track, i=i+1, lev=lev):
yield result
except UnicodeDecodeError:
pass
if os.path.isfile(base +"/"+ fil):
if lev:
nfil = cc(clean(unicode(fil[:-4],'utf-8')))
ntr = cc(clean(track))
l = Levenshtein.distance(ntr, nfil)
if l < len(ntr):
##print "lev |" + ntr + "|" + nfil + "|"
##print str(l) + " - " + str(len(cc(track)))
yield [l, base+"/"+fil]
else:
if clean(track) in clean(unicode(fil,'utf-8')):
##print base+"/"+fil
yield base+"/"+fil
def mkdirs(self, li, pat):
if li != []:
dd = os.path.join(pat, li[0])
if not os.path.isdir( dd ):
##print "mkdir(" + dd+")"
os.mkdir(dd)
return self.mkdirs(li[1:], dd)
else:
return pat
def move(self, t):
dirs = t[len(self.music_dir)+1:].split("/")
new = self.mkdirs(dirs[:-1], self.mp_dir)
dst = os.path.join(new, dirs[-1])
if os.path.isfile( dst ):
if os.path.getsize(t) != os.path.getsize(dst):
os.remove(dst)
else:
return 1
shutil.copyfile(t, dst)
##print "exist"
#shutil.copyfile(t, dst)
def findtrackall(self, a, i):
for t in self.findtrack(a, i['name']):
return t
##print "### :: " + i['artist'] + '-' + i['name'] + ""
ties = []
for t in self.findtrack(a, i['name'], lev=True):
ties.append(t)
if len(ties) == 0:
return 0
if len(ties) == 1:
##print ties[0][1]
return ties[0][1]
else:
ties = sorted(ties, key=getKey)
##print ties[0][1]
return ties[0][1]
def run(self):
file = time.strftime("TOP"+self.limit+"_%m%d%H%M.m3u")
fo = open(file, 'w+')
number = 0
for i in self.toptracks():
number += 1
print number
#for i in [{'name':u"The sound of silence",'artist':u"Simon and Garfunkel"}]:
a = self.findartist(i['artist'])
t = 0
if a:
t = self.findtrackall(a, i)
if t == 0:
t = self.findtrackall("Various Artists", i)
##print t
if t != 0:
fo.write(t+"\n")
if os.path.isdir( self.mp_dir ):
self.move(t)
else:
#print "###########"
#print i['artist'] + '-' + i['name']
pass
#print self.notfound
#print '--finished--'
fo.close()
# <?xml version="1.0" encoding="UTF-8"?>
# <playlist version="1" xmlns="http://xspf.org/ns/0/">
# <trackList>
# <track><location>file:///media/data/Musique/Cypress Hill/2010 - Rise Up/Cypress Hill - Rise Up - 13 - Armed and Dangerous.mp3</location></track>
# <track><location>file:///media/data/Musique/The Black Keys/Attack & Release/The Black Keys - Psychotic Girl.mp3</location></track>
# <track><location>file:///media/data/Musique/Odezenne/2012 - OVNI edition Louis XIV/13 - Hirondelles.mp3</location></track>
# </trackList>
# </playlist>
pass
if len(sys.argv) == 0 :
print "usage : python playlist.py length page"
else:
if len(sys.argv) <= 1 :
p = playlist(100)
elif len(sys.argv) <= 2 :
p = playlist(sys.argv[1])
elif len(sys.argv) <= 3 :
p = playlist(sys.argv[1], sys.argv[2])
else: p = playlist(sys.argv[1], sys.argv[2], sys.argv[3])
p.run()<|fim▁end|> | #if cc(i['artist']['name']) == "high tone":
yield {'name':i['name'],'artist':cc(i['artist']['name'])} |
<|file_name|>calendar.module.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>
import { RequestService } from "../services";
import { SharedModule } from "../shared/shared.module";
import { PipeModule } from "../pipes/pipe.module";
import * as fromComponents from './components';
import { AuthGuard } from "../auth/auth.guard";
import { CalendarComponent } from "./components/calendar.component";
import { FullCalendarModule } from 'primeng/fullcalendar';
import { CalendarService } from "../services/calendar.service";
const routes: Routes = [
{ path: "", component: CalendarComponent, canActivate: [AuthGuard] },
];
@NgModule({
imports: [
RouterModule.forChild(routes),
SharedModule,
PipeModule,
FullCalendarModule,
],
declarations: [
...fromComponents.components
],
exports: [
RouterModule,
],
providers: [
RequestService,
CalendarService
],
})
export class CalendarModule { }<|fim▁end|> | import { NgModule } from "@angular/core";
import { RouterModule, Routes } from "@angular/router"; |
<|file_name|>network_irc.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 Claudio "nex" Guarnieri (@botherder)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
<|fim▁hole|> description = "Connects to an IRC server, possibly part of a botnet"
severity = 3
categories = ["irc"]
authors = ["nex"]
minimum = "0.6"
def run(self):
if "irc" in self.results["network"]:
if len(self.results["network"]["irc"]) > 0:
return True
return False<|fim▁end|> | from lib.cuckoo.common.abstracts import Signature
class NetworkIRC(Signature):
name = "network_irc" |
<|file_name|>diagnostic.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use self::Level::*;
pub use self::RenderSpan::*;
pub use self::ColorConfig::*;
use self::Destination::*;
use codemap::{COMMAND_LINE_SP, COMMAND_LINE_EXPN, Pos, Span};
use codemap;
use diagnostics;
use std::cell::{RefCell, Cell};
use std::cmp;
use std::fmt;
use std::io::prelude::*;
use std::io;
use term::WriterWrapper;
use term;
use libc;
/// maximum number of lines we will print for each error; arbitrary.
const MAX_LINES: usize = 6;
#[derive(Clone)]
pub enum RenderSpan {
/// A FullSpan renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary of
/// the source code covered by the span.
FullSpan(Span),
/// Similar to a FullSpan, but the cited position is the end of
/// the span, instead of the start. Used, at least, for telling
/// compiletest/runtest to look at the last line of the span
/// (since `end_highlight_lines` displays an arrow to the end
/// of the span).
EndSpan(Span),
/// A suggestion renders with both with an initial line for the
/// message, prefixed by file:linenum, followed by a summary
/// of hypothetical source code, where the `String` is spliced
/// into the lines in place of the code covered by the span.
Suggestion(Span, String),
/// A FileLine renders with just a line for the message prefixed
/// by file:linenum.
FileLine(Span),
}
impl RenderSpan {
fn span(&self) -> Span {
match *self {
FullSpan(s) |
Suggestion(s, _) |
EndSpan(s) |
FileLine(s) =>
s
}
}
}
#[derive(Clone, Copy)]
pub enum ColorConfig {
Auto,
Always,
Never
}
pub trait Emitter {
fn emit(&mut self, cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level);
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level);
}
/// Used as a return value to signify a fatal error occurred. (It is also
/// used as the argument to panic at the moment, but that will eventually
/// not be true.)
#[derive(Copy, Clone)]
#[must_use]
pub struct FatalError;
/// Signifies that the compiler died with an explicit call to `.bug`
/// or `.span_bug` rather than a failed assertion, etc.
#[derive(Copy, Clone)]
pub struct ExplicitBug;
/// A span-handler is like a handler but also
/// accepts span information for source-location
/// reporting.
pub struct SpanHandler {
pub handler: Handler,
pub cm: codemap::CodeMap,
}
impl SpanHandler {
pub fn span_fatal(&self, sp: Span, msg: &str) -> FatalError {
self.handler.emit(Some((&self.cm, sp)), msg, Fatal);
return FatalError;
}
pub fn span_fatal_with_code(&self, sp: Span, msg: &str, code: &str) -> FatalError {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Fatal);
return FatalError;
}
pub fn span_err(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Error);
self.handler.bump_err_count();
}
pub fn span_err_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Error);
self.handler.bump_err_count();
}
pub fn span_warn(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Warning);
}
pub fn span_warn_with_code(&self, sp: Span, msg: &str, code: &str) {
self.handler.emit_with_code(Some((&self.cm, sp)), msg, code, Warning);
}
pub fn span_note(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Note);
}
pub fn span_end_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, EndSpan(sp), msg, Note);
}
pub fn span_help(&self, sp: Span, msg: &str) {
self.handler.emit(Some((&self.cm, sp)), msg, Help);
}
/// Prints out a message with a suggested edit of the code.
///
/// See `diagnostic::RenderSpan::Suggestion` for more information.
pub fn span_suggestion(&self, sp: Span, msg: &str, suggestion: String) {
self.handler.custom_emit(&self.cm, Suggestion(sp, suggestion), msg, Help);
}
pub fn fileline_note(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Note);
}
pub fn fileline_help(&self, sp: Span, msg: &str) {
self.handler.custom_emit(&self.cm, FileLine(sp), msg, Help);
}
pub fn span_bug(&self, sp: Span, msg: &str) -> ! {
self.handler.emit(Some((&self.cm, sp)), msg, Bug);
panic!(ExplicitBug);
}
pub fn span_unimpl(&self, sp: Span, msg: &str) -> ! {
self.span_bug(sp, &format!("unimplemented {}", msg));
}
pub fn handler<'a>(&'a self) -> &'a Handler {
&self.handler
}
}
/// A handler deals with errors; certain errors
/// (fatal, bug, unimpl) may cause immediate exit,
/// others log errors for later reporting.
pub struct Handler {
err_count: Cell<usize>,
emit: RefCell<Box<Emitter + Send>>,
pub can_emit_warnings: bool
}
impl Handler {
pub fn fatal(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Fatal);
panic!(FatalError);
}
pub fn err(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Error);
self.bump_err_count();
}
pub fn bump_err_count(&self) {
self.err_count.set(self.err_count.get() + 1);
}
pub fn err_count(&self) -> usize {
self.err_count.get()
}
pub fn has_errors(&self) -> bool {
self.err_count.get() > 0
}
pub fn abort_if_errors(&self) {
let s;
match self.err_count.get() {
0 => return,
1 => s = "aborting due to previous error".to_string(),
_ => {
s = format!("aborting due to {} previous errors",
self.err_count.get());
}
}
self.fatal(&s[..]);
}
pub fn warn(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Warning);
}
pub fn note(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Note);
}
pub fn help(&self, msg: &str) {
self.emit.borrow_mut().emit(None, msg, None, Help);
}
pub fn bug(&self, msg: &str) -> ! {
self.emit.borrow_mut().emit(None, msg, None, Bug);
panic!(ExplicitBug);
}
pub fn unimpl(&self, msg: &str) -> ! {
self.bug(&format!("unimplemented {}", msg));
}
pub fn emit(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, None, lvl);
}
pub fn emit_with_code(&self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str,
code: &str,
lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().emit(cmsp, msg, Some(code), lvl);
}
pub fn custom_emit(&self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
if lvl == Warning && !self.can_emit_warnings { return }
self.emit.borrow_mut().custom_emit(cm, sp, msg, lvl);
}
}
pub fn mk_span_handler(handler: Handler, cm: codemap::CodeMap) -> SpanHandler {
SpanHandler {
handler: handler,
cm: cm,
}
}
pub fn default_handler(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>,
can_emit_warnings: bool) -> Handler {
mk_handler(can_emit_warnings, Box::new(EmitterWriter::stderr(color_config, registry)))
}
pub fn mk_handler(can_emit_warnings: bool, e: Box<Emitter + Send>) -> Handler {
Handler {
err_count: Cell::new(0),
emit: RefCell::new(e),
can_emit_warnings: can_emit_warnings
}
}
#[derive(Copy, PartialEq, Clone, Debug)]
pub enum Level {
Bug,
Fatal,
Error,
Warning,
Note,
Help,
}
impl fmt::Display for Level {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use std::fmt::Display;
match *self {
Bug => "error: internal compiler error".fmt(f),
Fatal | Error => "error".fmt(f),
Warning => "warning".fmt(f),
Note => "note".fmt(f),
Help => "help".fmt(f),
}
}
}
impl Level {
fn color(self) -> term::color::Color {
match self {
Bug | Fatal | Error => term::color::BRIGHT_RED,
Warning => term::color::BRIGHT_YELLOW,
Note => term::color::BRIGHT_GREEN,
Help => term::color::BRIGHT_CYAN,
}
}
}
fn print_maybe_styled(w: &mut EmitterWriter,
msg: &str,
color: term::attr::Attr) -> io::Result<()> {
match w.dst {
Terminal(ref mut t) => {
try!(t.attr(color));
// If `msg` ends in a newline, we need to reset the color before
// the newline. We're making the assumption that we end up writing
// to a `LineBufferedWriter`, which means that emitting the reset
// after the newline ends up buffering the reset until we print
// another line or exit. Buffering the reset is a problem if we're
// sharing the terminal with any other programs (e.g. other rustc
// instances via `make -jN`).
//
// Note that if `msg` contains any internal newlines, this will
// result in the `LineBufferedWriter` flushing twice instead of
// once, which still leaves the opportunity for interleaved output
// to be miscolored. We assume this is rare enough that we don't
// have to worry about it.
if msg.ends_with("\n") {
try!(t.write_all(msg[..msg.len()-1].as_bytes()));
try!(t.reset());
try!(t.write_all(b"\n"));
} else {
try!(t.write_all(msg.as_bytes()));
try!(t.reset());
}
Ok(())
}
Raw(ref mut w) => w.write_all(msg.as_bytes()),
}
}
fn print_diagnostic(dst: &mut EmitterWriter, topic: &str, lvl: Level,
msg: &str, code: Option<&str>) -> io::Result<()> {
if !topic.is_empty() {
try!(write!(&mut dst.dst, "{} ", topic));
}
try!(print_maybe_styled(dst,
&format!("{}: ", lvl.to_string()),
term::attr::ForegroundColor(lvl.color())));
try!(print_maybe_styled(dst,
&format!("{}", msg),
term::attr::Bold));
match code {
Some(code) => {
let style = term::attr::ForegroundColor(term::color::BRIGHT_MAGENTA);
try!(print_maybe_styled(dst, &format!(" [{}]", code.clone()), style));
}
None => ()
}
try!(write!(&mut dst.dst, "\n"));
Ok(())
}
pub struct EmitterWriter {
dst: Destination,
registry: Option<diagnostics::registry::Registry>
}
enum Destination {
Terminal(Box<term::Terminal<WriterWrapper> + Send>),
Raw(Box<Write + Send>),
}
impl EmitterWriter {
pub fn stderr(color_config: ColorConfig,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
let stderr = io::stderr();
let use_color = match color_config {
Always => true,
Never => false,
Auto => stderr_isatty(),
};
if use_color {
let dst = match term::stderr() {
Some(t) => Terminal(t),
None => Raw(Box::new(stderr)),
};
EmitterWriter { dst: dst, registry: registry }
} else {
EmitterWriter { dst: Raw(Box::new(stderr)), registry: registry }
}
}
pub fn new(dst: Box<Write + Send>,
registry: Option<diagnostics::registry::Registry>) -> EmitterWriter {
EmitterWriter { dst: Raw(dst), registry: registry }
}
}
#[cfg(unix)]
fn stderr_isatty() -> bool {
unsafe { libc::isatty(libc::STDERR_FILENO) != 0 }
}
#[cfg(windows)]
fn stderr_isatty() -> bool {
const STD_ERROR_HANDLE: libc::DWORD = -12i32 as libc::DWORD;
extern "system" {
fn GetStdHandle(which: libc::DWORD) -> libc::HANDLE;
fn GetConsoleMode(hConsoleHandle: libc::HANDLE,
lpMode: libc::LPDWORD) -> libc::BOOL;
}
unsafe {
let handle = GetStdHandle(STD_ERROR_HANDLE);
let mut out = 0;
GetConsoleMode(handle, &mut out) != 0
}
}
impl Write for Destination {
fn write(&mut self, bytes: &[u8]) -> io::Result<usize> {
match *self {
Terminal(ref mut t) => t.write(bytes),
Raw(ref mut w) => w.write(bytes),
}
}
fn flush(&mut self) -> io::Result<()> {
match *self {
Terminal(ref mut t) => t.flush(),
Raw(ref mut w) => w.flush(),
}
}
}
impl Emitter for EmitterWriter {
fn emit(&mut self,
cmsp: Option<(&codemap::CodeMap, Span)>,
msg: &str, code: Option<&str>, lvl: Level) {
let error = match cmsp {
Some((cm, COMMAND_LINE_SP)) => emit(self, cm,
FileLine(COMMAND_LINE_SP),
msg, code, lvl),
Some((cm, sp)) => emit(self, cm, FullSpan(sp), msg, code, lvl),
None => print_diagnostic(self, "", lvl, msg, code),
};
match error {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
fn custom_emit(&mut self, cm: &codemap::CodeMap,
sp: RenderSpan, msg: &str, lvl: Level) {
match emit(self, cm, sp, msg, None, lvl) {
Ok(()) => {}
Err(e) => panic!("failed to print diagnostics: {:?}", e),
}
}
}
fn emit(dst: &mut EmitterWriter, cm: &codemap::CodeMap, rsp: RenderSpan,
msg: &str, code: Option<&str>, lvl: Level) -> io::Result<()> {
let sp = rsp.span();
// We cannot check equality directly with COMMAND_LINE_SP
// since PartialEq is manually implemented to ignore the ExpnId
let ss = if sp.expn_id == COMMAND_LINE_EXPN {
"<command line option>".to_string()
} else if let EndSpan(_) = rsp {
let span_end = Span { lo: sp.hi, hi: sp.hi, expn_id: sp.expn_id};
cm.span_to_string(span_end)
} else {
cm.span_to_string(sp)
};
try!(print_diagnostic(dst, &ss[..], lvl, msg, code));
match rsp {
FullSpan(_) => {
try!(highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
EndSpan(_) => {
try!(end_highlight_lines(dst, cm, sp, lvl, cm.span_to_lines(sp)));
try!(print_macro_backtrace(dst, cm, sp));
}
Suggestion(_, ref suggestion) => {
try!(highlight_suggestion(dst, cm, sp, suggestion));
try!(print_macro_backtrace(dst, cm, sp));
}
FileLine(..) => {
// no source text in this case!
}
}
match code {
Some(code) =>
match dst.registry.as_ref().and_then(|registry| registry.find_description(code)) {
Some(_) => {
try!(print_diagnostic(dst, &ss[..], Help,
&format!("pass `--explain {}` to see a detailed \
explanation", code), None));
}
None => ()
},
None => (),
}
Ok(())
}
fn highlight_suggestion(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
suggestion: &str)
-> io::Result<()>
{
let lines = cm.span_to_lines(sp);
assert!(!lines.lines.is_empty());
// To build up the result, we want to take the snippet from the first
// line that precedes the span, prepend that with the suggestion, and
// then append the snippet from the last line that trails the span.
let fm = &lines.file;
let first_line = &lines.lines[0];
let prefix = fm.get_line(first_line.line_index)
.map(|l| &l[..first_line.start_col.0])
.unwrap_or("");
let last_line = lines.lines.last().unwrap();
let suffix = fm.get_line(last_line.line_index)
.map(|l| &l[last_line.end_col.0..])
.unwrap_or("");
let complete = format!("{}{}{}", prefix, suggestion, suffix);
// print the suggestion without any line numbers, but leave
// space for them. This helps with lining up with previous
// snippets from the actual error being reported.
let fm = &*lines.file;
let mut lines = complete.lines();
for (line, line_index) in lines.by_ref().take(MAX_LINES).zip(first_line.line_index..) {
let elided_line_num = format!("{}", line_index+1);
try!(write!(&mut err.dst, "{0}:{1:2$} {3}\n",
fm.name, "", elided_line_num.len(), line));
}
// if we elided some lines, add an ellipsis
if lines.next().is_some() {
let elided_line_num = format!("{}", first_line.line_index + MAX_LINES + 1);
try!(write!(&mut err.dst, "{0:1$} {0:2$} ...\n",
"", fm.name.len(), elided_line_num.len()));
}
Ok(())
}
fn highlight_lines(err: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLines)
-> io::Result<()>
{
let fm = &*lines.file;
let line_strings: Option<Vec<&str>> =
lines.lines.iter()
.map(|info| fm.get_line(info.line_index))
.collect();
let line_strings = match line_strings {
None => { return Ok(()); }
Some(line_strings) => line_strings
};
// Display only the first MAX_LINES lines.
let all_lines = lines.lines.len();
let display_lines = cmp::min(all_lines, MAX_LINES);
let display_line_infos = &lines.lines[..display_lines];
let display_line_strings = &line_strings[..display_lines];
// Print the offending lines
for (line_info, line) in display_line_infos.iter().zip(display_line_strings.iter()) {
try!(write!(&mut err.dst, "{}:{} {}\n",
fm.name,
line_info.line_index + 1,
line));
}
// If we elided something, put an ellipsis.
if display_lines < all_lines {
let last_line_index = display_line_infos.last().unwrap().line_index;
let s = format!("{}:{} ", fm.name, last_line_index + 1);
try!(write!(&mut err.dst, "{0:1$}...\n", "", s.len()));
}
// FIXME (#3260)
// If there's one line at fault we can easily point to the problem
if lines.lines.len() == 1 {
let lo = cm.lookup_char_pos(sp.lo);
let mut digits = 0;
let mut num = (lines.lines[0].line_index + 1) / 10;
// how many digits must be indent past?
while num > 0 { num /= 10; digits += 1; }
let mut s = String::new();
// Skip is the number of characters we need to skip because they are
// part of the 'filename:line ' part of the previous line.
let skip = fm.name.width(false) + digits + 3;
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines.lines[0].line_index) {
let mut col = skip;
let mut lastc = ' ';
let mut iter = orig.chars().enumerate();
for (pos, ch) in iter.by_ref() {
lastc = ch;
if pos >= lo.col.to_usize() { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => {
col += 8 - col%8;
s.push('\t');
},
c => for _ in 0..c.width(false).unwrap_or(0) {
col += 1;
s.push(' ');
},
}
}
try!(write!(&mut err.dst, "{}", s));
let mut s = String::from_str("^");
let count = match lastc {
// Most terminals have a tab stop every eight columns by default
'\t' => 8 - col%8,
_ => lastc.width(false).unwrap_or(0),
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
let hi = cm.lookup_char_pos(sp.hi);<|fim▁hole|> if pos >= hi.col.to_usize() { break; }
let count = match ch {
'\t' => 8 - col%8,
_ => ch.width(false).unwrap_or(0),
};
col += count;
s.extend(::std::iter::repeat('~').take(count));
}
}
if s.len() > 1 {
// One extra squiggly is replaced by a "^"
s.pop();
}
try!(print_maybe_styled(err,
&format!("{}\n", s),
term::attr::ForegroundColor(lvl.color())));
}
}
Ok(())
}
/// Here are the differences between this and the normal `highlight_lines`:
/// `end_highlight_lines` will always put arrow on the last byte of the
/// span (instead of the first byte). Also, when the span is too long (more
/// than 6 lines), `end_highlight_lines` will print the first line, then
/// dot dot dot, then last line, whereas `highlight_lines` prints the first
/// six lines.
fn end_highlight_lines(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span,
lvl: Level,
lines: codemap::FileLines)
-> io::Result<()> {
let fm = &*lines.file;
let lines = &lines.lines[..];
if lines.len() > MAX_LINES {
if let Some(line) = fm.get_line(lines[0].line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
lines[0].line_index + 1, line));
}
try!(write!(&mut w.dst, "...\n"));
let last_line_index = lines[lines.len() - 1].line_index;
if let Some(last_line) = fm.get_line(last_line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
last_line_index + 1, last_line));
}
} else {
for line_info in lines {
if let Some(line) = fm.get_line(line_info.line_index) {
try!(write!(&mut w.dst, "{}:{} {}\n", fm.name,
line_info.line_index + 1, line));
}
}
}
let last_line_start = format!("{}:{} ", fm.name, lines[lines.len()-1].line_index + 1);
let hi = cm.lookup_char_pos(sp.hi);
let skip = last_line_start.width(false);
let mut s = String::new();
for _ in 0..skip {
s.push(' ');
}
if let Some(orig) = fm.get_line(lines[0].line_index) {
let iter = orig.chars().enumerate();
for (pos, ch) in iter {
// Span seems to use half-opened interval, so subtract 1
if pos >= hi.col.to_usize() - 1 { break; }
// Whenever a tab occurs on the previous line, we insert one on
// the error-point-squiggly-line as well (instead of a space).
// That way the squiggly line will usually appear in the correct
// position.
match ch {
'\t' => s.push('\t'),
c => for _ in 0..c.width(false).unwrap_or(0) {
s.push(' ');
},
}
}
}
s.push('^');
s.push('\n');
print_maybe_styled(w,
&s[..],
term::attr::ForegroundColor(lvl.color()))
}
fn print_macro_backtrace(w: &mut EmitterWriter,
cm: &codemap::CodeMap,
sp: Span)
-> io::Result<()> {
let cs = try!(cm.with_expn_info(sp.expn_id, |expn_info| -> io::Result<_> {
match expn_info {
Some(ei) => {
let ss = ei.callee.span.map_or(String::new(),
|span| cm.span_to_string(span));
let (pre, post) = match ei.callee.format {
codemap::MacroAttribute => ("#[", "]"),
codemap::MacroBang => ("", "!")
};
try!(print_diagnostic(w, &ss, Note,
&format!("in expansion of {}{}{}", pre,
ei.callee.name,
post), None));
let ss = cm.span_to_string(ei.call_site);
try!(print_diagnostic(w, &ss, Note, "expansion site", None));
Ok(Some(ei.call_site))
}
None => Ok(None)
}
}));
cs.map_or(Ok(()), |call_site| print_macro_backtrace(w, cm, call_site))
}
pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
M: FnOnce() -> String,
{
match opt {
Some(t) => t,
None => diag.handler().bug(&msg()),
}
}<|fim▁end|> | if hi.col != lo.col {
for (pos, ch) in iter { |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.