prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>textAngular.js<|end_file_name|><|fim▁begin|>/* @license textAngular Author : Austin Anderson License : 2013 MIT Version 1.3.0-17 See README.md or https://github.com/fraywing/textAngular/wiki for requirements and use. */ (function(){ // encapsulate all variables so they don't become global vars "Use Strict"; // IE version detection - http://stackoverflow.com/questions/4169160/javascript-ie-detection-why-not-use-simple-conditional-comments // We need this as IE sometimes plays funny tricks with the contenteditable. // ---------------------------------------------------------- // If you're not in IE (or IE version is less than 5) then: // ie === undefined // If you're in IE (>=5) then you can determine which version: // ie === 7; // IE7 // Thus, to detect IE: // if (ie) {} // And to detect the version: // ie === 6 // IE6 // ie > 7 // IE8, IE9, IE10 ... // ie < 9 // Anything less than IE9 // ---------------------------------------------------------- /* istanbul ignore next: untestable browser check */ var _browserDetect = { ie: (function(){ var undef, v = 3, div = document.createElement('div'), all = div.getElementsByTagName('i'); while ( div.innerHTML = '<!--[if gt IE ' + (++v) + ']><i></i><![endif]-->', all[0] ); return v > 4 ? v : undef; }()), webkit: /AppleWebKit\/([\d.]+)/i.test(navigator.userAgent) }; // fix a webkit bug, see: https://gist.github.com/shimondoodkin/1081133 // this is set true when a blur occurs as the blur of the ta-bind triggers before the click var globalContentEditableBlur = false; /* istanbul ignore next: Browser Un-Focus fix for webkit */ if(_browserDetect.webkit) { document.addEventListener("mousedown", function(_event){ var e = _event || window.event; var curelement = e.target; if(globalContentEditableBlur && curelement !== null){ var isEditable = false; var tempEl = curelement; while(tempEl !== null && tempEl.tagName.toLowerCase() !== 'html' && !isEditable){ isEditable = tempEl.contentEditable === 'true'; tempEl = tempEl.parentNode; } if(!isEditable){ document.getElementById('textAngular-editableFix-010203040506070809').setSelectionRange(0, 0); // set caret focus to an element that handles caret focus correctly. curelement.focus(); // focus the wanted element. if (curelement.select) { curelement.select(); // use select to place cursor for input elements. } } } globalContentEditableBlur = false; }, false); // add global click handler angular.element(document).ready(function () { angular.element(document.body).append(angular.element('<input id="textAngular-editableFix-010203040506070809" style="width:1px;height:1px;border:none;margin:0;padding:0;position:absolute; top: -10000px; left: -10000px;" unselectable="on" tabIndex="-1">')); }); } // Gloabl to textAngular REGEXP vars for block and list elements. var BLOCKELEMENTS = /^(address|article|aside|audio|blockquote|canvas|dd|div|dl|fieldset|figcaption|figure|footer|form|h1|h2|h3|h4|h5|h6|header|hgroup|hr|noscript|ol|output|p|pre|section|table|tfoot|ul|video)$/ig; var LISTELEMENTS = /^(ul|li|ol)$/ig; var VALIDELEMENTS = /^(address|article|aside|audio|blockquote|canvas|dd|div|dl|fieldset|figcaption|figure|footer|form|h1|h2|h3|h4|h5|h6|header|hgroup|hr|noscript|ol|output|p|pre|section|table|tfoot|ul|video|li)$/ig; // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/Trim#Compatibility /* istanbul ignore next: trim shim for older browsers */ if (!String.prototype.trim) { String.prototype.trim = function () { return this.replace(/^\s+|\s+$/g, ''); }; } // tests against the current jqLite/jquery implementation if this can be an element function validElementString(string){ try{ return angular.element(string).length !== 0; }catch(any){ return false; } } /* Custom stylesheet for the placeholders rules. Credit to: http://davidwalsh.name/add-rules-stylesheets */ var sheet, addCSSRule, removeCSSRule, _addCSSRule, _removeCSSRule; /* istanbul ignore else: IE <8 test*/ if(_browserDetect.ie > 8 || _browserDetect.ie === undefined){ var _sheets = document.styleSheets; /* istanbul ignore next: preference for stylesheet loaded externally */ for(var i = 0; i < _sheets.length; i++){ if(_sheets[i].media.length === 0 || _sheets[i].media.mediaText.match(/(all|screen)/ig)){ if(_sheets[i].href){ if(_sheets[i].href.match(/textangular\.(min\.|)css/ig)){ sheet = _sheets[i]; break; } } } } /* istanbul ignore next: preference for stylesheet loaded externally */ if(!sheet){ // this sheet is used for the placeholders later on. sheet = (function() { // Create the <style> tag var style = document.createElement("style"); /* istanbul ignore else : WebKit hack :( */ if(_browserDetect.webkit) style.appendChild(document.createTextNode("")); // Add the <style> element to the page, add as first so the styles can be overridden by custom stylesheets document.head.appendChild(style); return style.sheet; })(); } // use as: addCSSRule("header", "float: left"); addCSSRule = function(selector, rules) { return _addCSSRule(sheet, selector, rules); }; _addCSSRule = function(_sheet, selector, rules){ var insertIndex; // This order is important as IE 11 has both cssRules and rules but they have different lengths - cssRules is correct, rules gives an error in IE 11 /* istanbul ignore else: firefox catch */ if(_sheet.cssRules) insertIndex = Math.max(_sheet.cssRules.length - 1, 0); else if(_sheet.rules) insertIndex = Math.max(_sheet.rules.length - 1, 0); /* istanbul ignore else: untestable IE option */ if(_sheet.insertRule) { _sheet.insertRule(selector + "{" + rules + "}", insertIndex); } else { _sheet.addRule(selector, rules, insertIndex); } // return the index of the stylesheet rule return insertIndex; }; removeCSSRule = function(index){ _removeCSSRule(sheet, index); }; _removeCSSRule = function(sheet, index){ /* istanbul ignore else: untestable IE option */ if(sheet.removeRule){ sheet.removeRule(index); }else{ sheet.deleteRule(index); } }; } // recursive function that returns an array of angular.elements that have the passed attribute set on them function getByAttribute(element, attribute){ var resultingElements = []; var childNodes = element.children(); if(childNodes.length){ angular.forEach(childNodes, function(child){ resultingElements = resultingElements.concat(getByAttribute(angular.element(child), attribute)); }); } if(element.attr(attribute) !== undefined) resultingElements.push(element); return resultingElements; } angular.module('textAngular.factories', []) .factory('taBrowserTag', [function(){ return function(tag){ /* istanbul ignore next: ie specific test */ if(!tag) return (_browserDetect.ie <= 8)? 'P' : 'p'; else if(tag === '') return (_browserDetect.ie === undefined)? 'div' : (_browserDetect.ie <= 8)? 'P' : 'p'; else return (_browserDetect.ie <= 8)? tag.toUpperCase() : tag; }; }]).factory('taApplyCustomRenderers', ['taCustomRenderers', function(taCustomRenderers){ return function(val){ var element = angular.element('<div></div>'); element[0].innerHTML = val; angular.forEach(taCustomRenderers, function(renderer){ var elements = []; // get elements based on what is defined. If both defined do secondary filter in the forEach after using selector string if(renderer.selector && renderer.selector !== '') elements = element.find(renderer.selector); /* istanbul ignore else: shouldn't fire, if it does we're ignoring everything */ else if(renderer.customAttribute && renderer.customAttribute !== '') elements = getByAttribute(element, renderer.customAttribute); // process elements if any found angular.forEach(elements, function(_element){ _element = angular.element(_element); if(renderer.selector && renderer.selector !== '' && renderer.customAttribute && renderer.customAttribute !== ''){ if(_element.attr(renderer.customAttribute) !== undefined) renderer.renderLogic(_element); } else renderer.renderLogic(_element); }); }); return element[0].innerHTML; }; }]).factory('taFixChrome', function(){ // get whaterever rubbish is inserted in chrome // should be passed an html string, returns an html string var taFixChrome = function(html){ // default wrapper is a span so find all of them var $html = angular.element('<div>' + html + '</div>'); var spans = angular.element($html).find('span'); for(var s = 0; s < spans.length; s++){ var span = angular.element(spans[s]); // chrome specific string that gets inserted into the style attribute, other parts may vary. Second part is specific ONLY to hitting backspace in Headers if(span.attr('style') && span.attr('style').match(/line-height: 1.428571429;|color: inherit; line-height: 1.1;/i)){ span.attr('style', span.attr('style').replace(/( |)font-family: inherit;|( |)line-height: 1.428571429;|( |)line-height:1.1;|( |)color: inherit;/ig, '')); if(!span.attr('style') || span.attr('style') === ''){ if(span.next().length > 0 && span.next()[0].tagName === 'BR') span.next().remove(); span.replaceWith(span[0].innerHTML); } } } // regex to replace ONLY offending styles - these can be inserted into various other tags on delete var result = $html[0].innerHTML.replace(/style="[^"]*?(line-height: 1.428571429;|color: inherit; line-height: 1.1;)[^"]*"/ig, ''); // only replace when something has changed, else we get focus problems on inserting lists if(result !== $html[0].innerHTML) $html[0].innerHTML = result; return $html[0].innerHTML; }; return taFixChrome; }).factory('taSanitize', ['$sanitize', function taSanitizeFactory($sanitize){ var convert_infos = [ { property: 'font-weight', values: [ 'bold' ], tag: 'b' }, { property: 'font-style', values: [ 'italic' ], tag: 'i' } ]; function fixChildren( jq_elm ) { var children = jq_elm.children(); if ( !children.length ) { return; } angular.forEach( children, function( child ) { var jq_child = angular.element(child); fixElement( jq_child ); fixChildren( jq_child ); }); } function fixElement( jq_elm ) { var styleString = jq_elm.attr('style'); if ( !styleString ) { return; } angular.forEach( convert_infos, function( convert_info ) { var css_key = convert_info.property; var css_value = jq_elm.css(css_key); if ( convert_info.values.indexOf(css_value) >= 0 && styleString.toLowerCase().indexOf(css_key) >= 0 ) { jq_elm.css( css_key, '' ); var inner_html = jq_elm.html(); var tag = convert_info.tag; inner_html = '<'+tag+'>' + inner_html + '</'+tag+'>'; jq_elm.html( inner_html ); } }); } return function taSanitize(unsafe, oldsafe, ignore){ if ( !ignore ) { try { var jq_container = angular.element('<div>' + unsafe + '</div>'); fixElement( jq_container ); fixChildren( jq_container ); unsafe = jq_container.html(); } catch (e) { } } // unsafe and oldsafe should be valid HTML strings // any exceptions (lets say, color for example) should be made here but with great care // setup unsafe element for modification var unsafeElement = angular.element('<div>' + unsafe + '</div>'); // replace all align='...' tags with text-align attributes angular.forEach(getByAttribute(unsafeElement, 'align'), function(element){ element.css('text-align', element.attr('align')); element.removeAttr('align'); }); // get the html string back var safe; unsafe = unsafeElement[0].innerHTML; try { safe = $sanitize(unsafe); // do this afterwards, then the $sanitizer should still throw for bad markup if(ignore) safe = unsafe; } catch (e){ safe = oldsafe || ''; } return safe; }; }]).factory('taToolExecuteAction', ['$q', '$log', function($q, $log){ // this must be called on a toolScope or instance return function(editor){ if(editor !== undefined) this.$editor = function(){ return editor; }; var deferred = $q.defer(), promise = deferred.promise, _editor = this.$editor(); promise['finally'](function(){ _editor.endAction.call(_editor); }); // pass into the action the deferred function and also the function to reload the current selection if rangy available var result; try{ result = this.action(deferred, _editor.startAction()); }catch(exc){ $log.error(exc); } if(result || result === undefined){ // if true or undefined is returned then the action has finished. Otherwise the deferred action will be resolved manually. deferred.resolve(); } }; }]); angular.module('textAngular.DOM', ['textAngular.factories']) .factory('taExecCommand', ['taSelection', 'taBrowserTag', '$document', function(taSelection, taBrowserTag, $document){ var listToDefault = function(listElement, defaultWrap){ var $target, i; // if all selected then we should remove the list // grab all li elements and convert to taDefaultWrap tags var children = listElement.find('li'); for(i = children.length - 1; i >= 0; i--){ $target = angular.element('<' + defaultWrap + '>' + children[i].innerHTML + '</' + defaultWrap + '>'); listElement.after($target); } listElement.remove(); taSelection.setSelectionToElementEnd($target[0]); }; var selectLi = function(liElement){ if(/(<br(|\/)>)$/i.test(liElement.innerHTML.trim())) taSelection.setSelectionBeforeElement(angular.element(liElement).find("br")[0]); else taSelection.setSelectionToElementEnd(liElement); }; var listToList = function(listElement, newListTag){ var $target = angular.element('<' + newListTag + '>' + listElement[0].innerHTML + '</' + newListTag + '>'); listElement.after($target); listElement.remove(); selectLi($target.find('li')[0]); }; var childElementsToList = function(elements, listElement, newListTag){ var html = ''; for(var i = 0; i < elements.length; i++){ html += '<' + taBrowserTag('li') + '>' + elements[i].innerHTML + '</' + taBrowserTag('li') + '>'; } var $target = angular.element('<' + newListTag + '>' + html + '</' + newListTag + '>'); listElement.after($target); listElement.remove(); selectLi($target.find('li')[0]); }; return function(taDefaultWrap, topNode){ taDefaultWrap = taBrowserTag(taDefaultWrap); return function(command, showUI, options){ var i, $target, html, _nodes, next, optionsTagName, selectedElement; var defaultWrapper = angular.element('<' + taDefaultWrap + '>'); try{ selectedElement = taSelection.getSelectionElement(); }catch(e){} var $selected = angular.element(selectedElement); if(selectedElement !== undefined){ var tagName = selectedElement.tagName.toLowerCase(); if(command.toLowerCase() === 'insertorderedlist' || command.toLowerCase() === 'insertunorderedlist'){ var selfTag = taBrowserTag((command.toLowerCase() === 'insertorderedlist')? 'ol' : 'ul'); if(tagName === selfTag){ // if all selected then we should remove the list // grab all li elements and convert to taDefaultWrap tags return listToDefault($selected, taDefaultWrap); }else if(tagName === 'li' && $selected.parent()[0].tagName.toLowerCase() === selfTag && $selected.parent().children().length === 1){ // catch for the previous statement if only one li exists return listToDefault($selected.parent(), taDefaultWrap); }else if(tagName === 'li' && $selected.parent()[0].tagName.toLowerCase() !== selfTag && $selected.parent().children().length === 1){ // catch for the previous statement if only one li exists return listToList($selected.parent(), selfTag); }else if(tagName.match(BLOCKELEMENTS) && !$selected.hasClass('ta-bind')){ // if it's one of those block elements we have to change the contents // if it's a ol/ul we are changing from one to the other if(tagName === 'ol' || tagName === 'ul'){ return listToList($selected, selfTag); }else{ var childBlockElements = false; angular.forEach($selected.children(), function(elem){ if(elem.tagName.match(BLOCKELEMENTS)) { childBlockElements = true; } }); if(childBlockElements){ return childElementsToList($selected.children(), $selected, selfTag); }else{ return childElementsToList([angular.element('<div>' + selectedElement.innerHTML + '</div>')[0]], $selected, selfTag); } } }else if(tagName.match(BLOCKELEMENTS)){ // if we get here then all the contents of the ta-bind are selected _nodes = taSelection.getOnlySelectedElements(); if(_nodes.length === 0){ // here is if there is only text in ta-bind ie <div ta-bind>test content</div> $target = angular.element('<' + selfTag + '><li>' + selectedElement.innerHTML + '</li></' + selfTag + '>'); $selected.html(''); $selected.append($target); }else if(_nodes.length === 1 && (_nodes[0].tagName.toLowerCase() === 'ol' || _nodes[0].tagName.toLowerCase() === 'ul')){ if(_nodes[0].tagName.toLowerCase() === selfTag){ // remove return listToDefault(angular.element(_nodes[0]), taDefaultWrap); }else{ return listToList(angular.element(_nodes[0]), selfTag); } }else{ html = ''; var $nodes = []; for(i = 0; i < _nodes.length; i++){ /* istanbul ignore else: catch for real-world can't make it occur in testing */ if(_nodes[i].nodeType !== 3){ var $n = angular.element(_nodes[i]); /* istanbul ignore if: browser check only, phantomjs doesn't return children nodes but chrome at least does */ if(_nodes[i].tagName.toLowerCase() === 'li') continue; else if(_nodes[i].tagName.toLowerCase() === 'ol' || _nodes[i].tagName.toLowerCase() === 'ul'){ html += $n[0].innerHTML; // if it's a list, add all it's children }else if(_nodes[i].tagName.toLowerCase() === 'span' && (_nodes[i].childNodes[0].tagName.toLowerCase() === 'ol' || _nodes[i].childNodes[0].tagName.toLowerCase() === 'ul')){ html += $n[0].childNodes[0].innerHTML; // if it's a list, add all it's children }else{ html += '<' + taBrowserTag('li') + '>' + $n[0].innerHTML + '</' + taBrowserTag('li') + '>'; } $nodes.unshift($n); } } $target = angular.element('<' + selfTag + '>' + html + '</' + selfTag + '>'); $nodes.pop().replaceWith($target); angular.forEach($nodes, function($node){ $node.remove(); }); } taSelection.setSelectionToElementEnd($target[0]); return; } }else if(command.toLowerCase() === 'formatblock'){ optionsTagName = options.toLowerCase().replace(/[<>]/ig, ''); if(optionsTagName.trim() === 'default') { optionsTagName = taDefaultWrap; options = '<' + taDefaultWrap + '>'; } if(tagName === 'li') $target = $selected.parent(); else $target = $selected; // find the first blockElement while(!$target[0].tagName || !$target[0].tagName.match(BLOCKELEMENTS) && !$target.parent().attr('contenteditable')){ $target = $target.parent(); /* istanbul ignore next */ tagName = ($target[0].tagName || '').toLowerCase(); } if(tagName === optionsTagName){ // $target is wrap element _nodes = $target.children(); var hasBlock = false; for(i = 0; i < _nodes.length; i++){ hasBlock = hasBlock || _nodes[i].tagName.match(BLOCKELEMENTS); } if(hasBlock){ $target.after(_nodes); next = $target.next(); $target.remove(); $target = next; }else{ defaultWrapper.append($target[0].childNodes); $target.after(defaultWrapper); $target.remove(); $target = defaultWrapper; } }else if($target.parent()[0].tagName.toLowerCase() === optionsTagName && !$target.parent().hasClass('ta-bind')){ //unwrap logic for parent var blockElement = $target.parent(); var contents = blockElement.contents(); for(i = 0; i < contents.length; i ++){ /* istanbul ignore next: can't test - some wierd thing with how phantomjs works */ if(blockElement.parent().hasClass('ta-bind') && contents[i].nodeType === 3){ defaultWrapper = angular.element('<' + taDefaultWrap + '>'); defaultWrapper[0].innerHTML = contents[i].outerHTML; contents[i] = defaultWrapper[0]; } blockElement.parent()[0].insertBefore(contents[i], blockElement[0]); } blockElement.remove(); }else if(tagName.match(LISTELEMENTS)){ // wrapping a list element $target.wrap(options); }else{ // default wrap behaviour _nodes = taSelection.getOnlySelectedElements(); if(_nodes.length === 0) _nodes = [$target[0]]; // find the parent block element if any of the nodes are inline or text for(i = 0; i < _nodes.length; i++){ if(_nodes[i].nodeType === 3 || !_nodes[i].tagName.match(BLOCKELEMENTS)){ while(_nodes[i].nodeType === 3 || !_nodes[i].tagName || !_nodes[i].tagName.match(BLOCKELEMENTS)){ _nodes[i] = _nodes[i].parentNode; } } } if(angular.element(_nodes[0]).hasClass('ta-bind')){ $target = angular.element(options); $target[0].innerHTML = _nodes[0].innerHTML; _nodes[0].innerHTML = $target[0].outerHTML; }else if(optionsTagName === 'blockquote'){ // blockquotes wrap other block elements html = ''; for(i = 0; i < _nodes.length; i++){ html += _nodes[i].outerHTML; } $target = angular.element(options); $target[0].innerHTML = html; _nodes[0].parentNode.insertBefore($target[0],_nodes[0]); for(i = _nodes.length - 1; i >= 0; i--){ /* istanbul ignore else: */ if(_nodes[i].parentNode) _nodes[i].parentNode.removeChild(_nodes[i]); } } else { // regular block elements replace other block elements for(i = 0; i < _nodes.length; i++){ $target = angular.element(options); $target[0].innerHTML = _nodes[i].innerHTML; _nodes[i].parentNode.insertBefore($target[0],_nodes[i]); _nodes[i].parentNode.removeChild(_nodes[i]); } } } taSelection.setSelectionToElementEnd($target[0]); return; }else if(command.toLowerCase() === 'createlink'){ var _selection = taSelection.getSelection(); if(_selection.collapsed){ // insert text at selection, then select then just let normal exec-command run taSelection.insertHtml('<a href="' + options + '">' + options + '</a>', topNode); return; } }else if(command.toLowerCase() === 'inserthtml'){ taSelection.insertHtml(options, topNode); return; } } try{ $document[0].execCommand(command, showUI, options); }catch(e){} }; }; }]).service('taSelection', ['$window', '$document', /* istanbul ignore next: all browser specifics and PhantomJS dosen't seem to support half of it */ function($window, $document){ // need to dereference the document else the calls don't work correctly var _document = $document[0]; var rangy = $window.rangy; var api = { getSelection: function(){ var range = rangy.getSelection().getRangeAt(0); var container = range.commonAncestorContainer; // Check if the container is a text node and return its parent if so container = container.nodeType === 3 ? container.parentNode : container; return { start: { element: range.startContainer, offset: range.startOffset }, end: { element: range.endContainer, offset: range.endOffset }, container: container, collapsed: range.collapsed }; }, getOnlySelectedElements: function(){ var range = rangy.getSelection().getRangeAt(0); var container = range.commonAncestorContainer; // Check if the container is a text node and return its parent if so container = container.nodeType === 3 ? container.parentNode : container; return range.getNodes([1], function(node){ return node.parentNode === container; }); }, // Some basic selection functions getSelectionElement: function () { return api.getSelection().container; }, setSelection: function(el, start, end){ var range = rangy.createRange(); range.setStart(el, start); range.setEnd(el, end); rangy.getSelection().setSingleRange(range); }, setSelectionBeforeElement: function (el){ var range = rangy.createRange(); range.selectNode(el); range.collapse(true); rangy.getSelection().setSingleRange(range); }, setSelectionAfterElement: function (el){ var range = rangy.createRange(); range.selectNode(el); range.collapse(false); rangy.getSelection().setSingleRange(range); }, setSelectionToElementStart: function (el){ var range = rangy.createRange(); range.selectNodeContents(el); range.collapse(true); rangy.getSelection().setSingleRange(range); }, setSelectionToElementEnd: function (el){ var range = rangy.createRange(); range.selectNodeContents(el); range.collapse(false); rangy.getSelection().setSingleRange(range); }, // from http://stackoverflow.com/questions/6690752/insert-html-at-caret-in-a-contenteditable-div // topNode is the contenteditable normally, all manipulation MUST be inside this. insertHtml: function(html, topNode){ var parent, secondParent, _childI, nodes, startIndex, startNodes, endNodes, i, lastNode; var element = angular.element("<div>" + html + "</div>"); var range = rangy.getSelection().getRangeAt(0); var frag = _document.createDocumentFragment(); var children = element[0].childNodes; var isInline = true; if(children.length > 0){ // NOTE!! We need to do the following: // check for blockelements - if they exist then we have to split the current element in half (and all others up to the closest block element) and insert all children in-between. // If there are no block elements, or there is a mixture we need to create textNodes for the non wrapped text (we don't want them spans messing up the picture). nodes = []; for(_childI = 0; _childI < children.length; _childI++){ if(!( (children[_childI].nodeName.toLowerCase() === 'p' && children[_childI].innerHTML.trim() === '') || // empty p element (children[_childI].nodeType === 3 && children[_childI].nodeValue.trim() === '') // empty text node )){ isInline = isInline && !BLOCKELEMENTS.test(children[_childI].nodeName); nodes.push(children[_childI]); } } for(var _n = 0; _n < nodes.length; _n++) lastNode = frag.appendChild(nodes[_n]); if(!isInline && range.collapsed && /^(|<br(|\/)>)$/i.test(range.startContainer.innerHTML)) range.selectNode(range.startContainer); }else{ isInline = true; // paste text of some sort lastNode = frag = _document.createTextNode(html); } // Other Edge case - selected data spans multiple blocks. if(isInline){ range.deleteContents(); }else{ // not inline insert if(range.collapsed && range.startContainer !== topNode && range.startContainer.parentNode !== topNode){ // split element into 2 and insert block element in middle if(range.startContainer.nodeType === 3){ // if text node parent = range.startContainer.parentNode; nodes = parent.childNodes; // split the nodes into two lists - before and after, splitting the node with the selection into 2 text nodes. startNodes = []; endNodes = []; for(startIndex = 0; startIndex < nodes.length; startIndex++){ startNodes.push(nodes[startIndex]); if(nodes[startIndex] === range.startContainer) break; } endNodes.push(_document.createTextNode(range.startContainer.nodeValue.substring(range.startOffset))); range.startContainer.nodeValue = range.startContainer.nodeValue.substring(0, range.startOffset); for(i = startIndex + 1; i < nodes.length; i++) endNodes.push(nodes[i]); secondParent = parent.cloneNode(); parent.childNodes = startNodes; secondParent.childNodes = endNodes; }else{ parent = range.startContainer; secondParent = parent.cloneNode(); secondParent.innerHTML = parent.innerHTML.substring(range.startOffset); parent.innerHTML = parent.innerHTML.substring(0, range.startOffset); } angular.element(parent).after(secondParent); // put cursor to end of inserted content range.setStartAfter(parent); range.setEndAfter(parent); if(/^(|<br(|\/)>)$/i.test(parent.innerHTML.trim())){ range.setStartBefore(parent); range.setEndBefore(parent); angular.element(parent).remove(); } if(/^(|<br(|\/)>)$/i.test(secondParent.innerHTML.trim())) angular.element(secondParent).remove(); }else{ range.deleteContents(); } } range.insertNode(frag); if(lastNode){ api.setSelectionToElementEnd(lastNode); } } }; return api; }]); angular.module('textAngular.validators', []) .directive('taMaxText', function(){ return { restrict: 'A', require: 'ngModel', link: function(scope, elem, attrs, ctrl){ var max = parseInt(scope.$eval(attrs.taMaxText)); if (isNaN(max)){ throw('Max text must be an integer'); } attrs.$observe('taMaxText', function(value){ max = parseInt(value); if (isNaN(max)){ throw('Max text must be an integer'); } if (ctrl.$dirty){ ctrl.$setViewValue(ctrl.$viewValue); } }); function validator (viewValue){ var source = angular.element('<div/>'); source.html(viewValue); var length = source.text().length; if (length <= max){ ctrl.$setValidity('taMaxText', true); return viewValue; } else{ ctrl.$setValidity('taMaxText', false); return undefined; } } ctrl.$parsers.unshift(validator); } }; }).directive('taMinText', function(){ return { restrict: 'A', require: 'ngModel', link: function(scope, elem, attrs, ctrl){ var min = parseInt(scope.$eval(attrs.taMinText)); if (isNaN(min)){ throw('Min text must be an integer'); } attrs.$observe('taMinText', function(value){ min = parseInt(value); if (isNaN(min)){ throw('Min text must be an integer'); } if (ctrl.$dirty){ ctrl.$setViewValue(ctrl.$viewValue); } }); function validator (viewValue){ var source = angular.element('<div/>'); source.html(viewValue); var length = source.text().length; if (!length || length >= min){ ctrl.$setValidity('taMinText', true); return viewValue; } else{ ctrl.$setValidity('taMinText', false); return undefined; } } ctrl.$parsers.unshift(validator); } }; }); angular.module('textAngular.taBind', ['textAngular.factories', 'textAngular.DOM']) .directive('taBind', ['taSanitize', '$timeout', '$window', '$document', 'taFixChrome', 'taBrowserTag', 'taSelection', 'taSelectableElements', 'taApplyCustomRenderers', 'taOptions', function(taSanitize, $timeout, $window, $document, taFixChrome, taBrowserTag, taSelection, taSelectableElements, taApplyCustomRenderers, taOptions){ // Uses for this are textarea or input with ng-model and ta-bind='text' // OR any non-form element with contenteditable="contenteditable" ta-bind="html|text" ng-model return { require: 'ngModel', link: function(scope, element, attrs, ngModel){ // the option to use taBind on an input or textarea is required as it will sanitize all input into it correctly. var _isContentEditable = element.attr('contenteditable') !== undefined && element.attr('contenteditable'); var _isInputFriendly = _isContentEditable || element[0].tagName.toLowerCase() === 'textarea' || element[0].tagName.toLowerCase() === 'input'; var _isReadonly = false; var _focussed = false; var _disableSanitizer = attrs.taUnsafeSanitizer || taOptions.disableSanitizer; var _lastKey; var BLOCKED_KEYS = /^(9|19|20|27|33|34|35|36|37|38|39|40|45|112|113|114|115|116|117|118|119|120|121|122|123|144|145)$/i; var UNDO_TRIGGER_KEYS = /^(8|13|32|46|59|61|107|109|186|187|188|189|190|191|192|219|220|221|222)$/i; // spaces, enter, delete, backspace, all punctuation var INLINETAGS_NONBLANK = /<(a|abbr|acronym|bdi|bdo|big|cite|code|del|dfn|img|ins|kbd|label|map|mark|q|ruby|rp|rt|s|samp|time|tt|var)[^>]*>/i; // defaults to the paragraph element, but we need the line-break or it doesn't allow you to type into the empty element // non IE is '<p><br/></p>', ie is '<p></p>' as for once IE gets it correct... var _defaultVal, _defaultTest; // set the default to be a paragraph value if(attrs.taDefaultWrap === undefined) attrs.taDefaultWrap = 'p'; /* istanbul ignore next: ie specific test */ if(attrs.taDefaultWrap === ''){ _defaultVal = ''; _defaultTest = (_browserDetect.ie === undefined)? '<div><br></div>' : (_browserDetect.ie >= 11)? '<p><br></p>' : (_browserDetect.ie <= 8)? '<P>&nbsp;</P>' : '<p>&nbsp;</p>'; }else{ _defaultVal = (_browserDetect.ie === undefined || _browserDetect.ie >= 11)? '<' + attrs.taDefaultWrap + '><br></' + attrs.taDefaultWrap + '>' : (_browserDetect.ie <= 8)? '<' + attrs.taDefaultWrap.toUpperCase() + '></' + attrs.taDefaultWrap.toUpperCase() + '>' : '<' + attrs.taDefaultWrap + '></' + attrs.taDefaultWrap + '>'; _defaultTest = (_browserDetect.ie === undefined || _browserDetect.ie >= 11)? '<' + attrs.taDefaultWrap + '><br></' + attrs.taDefaultWrap + '>' : (_browserDetect.ie <= 8)? '<' + attrs.taDefaultWrap.toUpperCase() + '>&nbsp;</' + attrs.taDefaultWrap.toUpperCase() + '>' : '<' + attrs.taDefaultWrap + '>&nbsp;</' + attrs.taDefaultWrap + '>'; } var _blankTest = function(_blankVal){ var _firstTagIndex = _blankVal.indexOf('>'); if(_firstTagIndex === -1) return _blankVal.trim().length === 0; _blankVal = _blankVal.trim().substring(_firstTagIndex, _firstTagIndex + 100); // this regex is to match any number of whitespace only between two tags if (_blankVal.length === 0 || _blankVal === _defaultTest || /^>(\s|&nbsp;)*<\/[^>]+>$/ig.test(_blankVal)) return true; // this regex tests if there is a tag followed by some optional whitespace and some text after that else if (/>\s*[^\s<]/i.test(_blankVal) || INLINETAGS_NONBLANK.test(_blankVal)) return false; else return true; }; element.addClass('ta-bind'); var _undoKeyupTimeout; scope['$undoManager' + (attrs.id || '')] = ngModel.$undoManager = { _stack: [], _index: 0, _max: 1000, push: function(value){ if((typeof value === "undefined" || value === null) || ((typeof this.current() !== "undefined" && this.current() !== null) && value === this.current())) return value; if(this._index < this._stack.length - 1){ this._stack = this._stack.slice(0,this._index+1); } this._stack.push(value); if(_undoKeyupTimeout) $timeout.cancel(_undoKeyupTimeout); if(this._stack.length > this._max) this._stack.shift(); this._index = this._stack.length - 1; return value; }, undo: function(){ return this.setToIndex(this._index-1); }, redo: function(){ return this.setToIndex(this._index+1); }, setToIndex: function(index){ if(index < 0 || index > this._stack.length - 1){ return undefined; } this._index = index; return this.current(); }, current: function(){ return this._stack[this._index]; } }; var _undo = scope['$undoTaBind' + (attrs.id || '')] = function(){ /* istanbul ignore else: can't really test it due to all changes being ignored as well in readonly */ if(!_isReadonly && _isContentEditable){ var content = ngModel.$undoManager.undo(); if(typeof content !== "undefined" && content !== null){ _setInnerHTML(content); _setViewValue(content, false); /* istanbul ignore else: browser catch */ if(element[0].childNodes.length) taSelection.setSelectionToElementEnd(element[0].childNodes[element[0].childNodes.length-1]); else taSelection.setSelectionToElementEnd(element[0]); } } }; var _redo = scope['$redoTaBind' + (attrs.id || '')] = function(){ /* istanbul ignore else: can't really test it due to all changes being ignored as well in readonly */ if(!_isReadonly && _isContentEditable){ var content = ngModel.$undoManager.redo(); if(typeof content !== "undefined" && content !== null){ _setInnerHTML(content); _setViewValue(content, false); /* istanbul ignore else: browser catch */ if(element[0].childNodes.length) taSelection.setSelectionToElementEnd(element[0].childNodes[element[0].childNodes.length-1]); else taSelection.setSelectionToElementEnd(element[0]); } } }; // in here we are undoing the converts used elsewhere to prevent the < > and & being displayed when they shouldn't in the code. var _compileHtml = function(){ if(_isContentEditable) return element[0].innerHTML; if(_isInputFriendly) return element.val(); throw ('textAngular Error: attempting to update non-editable taBind'); }; var _setViewValue = function(_val, triggerUndo){ if(typeof triggerUndo === "undefined" || triggerUndo === null) triggerUndo = true && _isContentEditable; // if not contentEditable then the native undo/redo is fine if(typeof _val === "undefined" || _val === null) _val = _compileHtml(); if(_blankTest(_val)){ // this avoids us from tripping the ng-pristine flag if we click in and out with out typing if(ngModel.$viewValue !== '') ngModel.$setViewValue(''); if(triggerUndo && ngModel.$undoManager.current() !== '') ngModel.$undoManager.push(''); }else{ _reApplyOnSelectorHandlers(); if(ngModel.$viewValue !== _val){ ngModel.$setViewValue(_val); if(triggerUndo) ngModel.$undoManager.push(_val); } } }; //used for updating when inserting wrapped elements scope['updateTaBind' + (attrs.id || '')] = function(){ if(!_isReadonly) _setViewValue(); }; //this code is used to update the models when data is entered/deleted if(_isInputFriendly){ if(!_isContentEditable){ // if a textarea or input just add in change and blur handlers, everything else is done by angulars input directive element.on('change blur', function(){ if(!_isReadonly) ngModel.$setViewValue(_compileHtml()); }); }else{ // all the code specific to contenteditable divs var waitforpastedata = function(savedcontent, _savedSelection, cb) { if (element[0].childNodes && element[0].childNodes.length > 0) { cb(savedcontent, _savedSelection); } else { that = { s: savedcontent, _: _savedSelection, cb: cb }; that.callself = function () { waitforpastedata(that.s, that._, that.cb); }; setTimeout(that.callself, 5); } }; var _processingPaste = false; /* istanbul ignore next: phantom js cannot test this for some reason */ var processpaste = function(savedcontent, _savedSelection) { text = element[0].innerHTML; element[0].innerHTML = savedcontent; // restore selection $window.rangy.restoreSelection(_savedSelection); /* istanbul ignore else: don't care if nothing pasted */ if(text.trim().length){ // test paste from word/microsoft product if(text.match(/class=["']*Mso(Normal|List)/i)){ var textFragment = text.match(/<!--StartFragment-->([\s\S]*?)<!--EndFragment-->/i); if(!textFragment) textFragment = text; else textFragment = textFragment[1]; textFragment = textFragment.replace(/<o:p>[\s\S]*?<\/o:p>/ig, '').replace(/class=(["']|)MsoNormal(["']|)/ig, ''); var dom = angular.element("<div>" + textFragment + "</div>"); var targetDom = angular.element("<div></div>"); var _list = { element: null, lastIndent: [], lastLi: null, isUl: false }; _list.lastIndent.peek = function(){ var n = this.length; if (n>0) return this[n-1]; }; var _resetList = function(isUl){ _list.isUl = isUl; _list.element = angular.element(isUl ? "<ul>" : "<ol>"); _list.lastIndent = []; _list.lastIndent.peek = function(){ var n = this.length; if (n>0) return this[n-1]; }; _list.lastLevelMatch = null; }; for(var i = 0; i <= dom[0].childNodes.length; i++){ if(!dom[0].childNodes[i] || dom[0].childNodes[i].nodeName === "#text" || dom[0].childNodes[i].tagName.toLowerCase() !== "p") continue; var el = angular.element(dom[0].childNodes[i]); var _listMatch = (el.attr('class') || '').match(/MsoList(Bullet|Number|Paragraph)(CxSp(First|Middle|Last)|)/i); if(_listMatch){ if(el[0].childNodes.length < 2 || el[0].childNodes[1].childNodes.length < 1){ continue; } var isUl = _listMatch[1].toLowerCase() === "bullet" || (_listMatch[1].toLowerCase() !== "number" && !(/^[^0-9a-z<]*[0-9a-z]+[^0-9a-z<>]</i.test(el[0].childNodes[1].innerHTML) || /^[^0-9a-z<]*[0-9a-z]+[^0-9a-z<>]</i.test(el[0].childNodes[1].childNodes[0].innerHTML))); var _indentMatch = (el.attr('style') || '').match(/margin-left:([\-\.0-9]*)/i); var indent = parseFloat((_indentMatch)?_indentMatch[1]:0); var _levelMatch = (el.attr('style') || '').match(/mso-list:l([0-9]+) level([0-9]+) lfo[0-9+]($|;)/i); // prefers the mso-list syntax if(_levelMatch && _levelMatch[2]) indent = parseInt(_levelMatch[2]); if ((_levelMatch && (!_list.lastLevelMatch || _levelMatch[1] !== _list.lastLevelMatch[1])) || !_listMatch[3] || _listMatch[3].toLowerCase() === "first" || (_list.lastIndent.peek() === null) || (_list.isUl !== isUl && _list.lastIndent.peek() === indent)) { _resetList(isUl); targetDom.append(_list.element); } else if (_list.lastIndent.peek() != null && _list.lastIndent.peek() < indent){ _list.element = angular.element(isUl ? "<ul>" : "<ol>"); _list.lastLi.append(_list.element); } else if (_list.lastIndent.peek() != null && _list.lastIndent.peek() > indent){ while(_list.lastIndent.peek() != null && _list.lastIndent.peek() > indent){ if(_list.element.parent()[0].tagName.toLowerCase() === 'li'){ _list.element = _list.element.parent(); continue; }else if(/[uo]l/i.test(_list.element.parent()[0].tagName.toLowerCase())){ _list.element = _list.element.parent(); }else{ // else it's it should be a sibling break; } _list.lastIndent.pop(); } _list.isUl = _list.element[0].tagName.toLowerCase() === "ul"; if (isUl !== _list.isUl) { _resetList(isUl); targetDom.append(_list.element); } } _list.lastLevelMatch = _levelMatch; if(indent !== _list.lastIndent.peek()) _list.lastIndent.push(indent); _list.lastLi = angular.element("<li>"); _list.element.append(_list.lastLi); _list.lastLi.html(el.html().replace(/<!(--|)\[if !supportLists\](--|)>[\s\S]*?<!(--|)\[endif\](--|)>/ig, '')); el.remove(); }else{ _resetList(false); targetDom.append(el); } } var _unwrapElement = function(node){ node = angular.element(node); for(var _n = node[0].childNodes.length - 1; _n >= 0; _n--) node.after(node[0].childNodes[_n]); node.remove(); }; angular.forEach(targetDom.find('span'), function(node){ node.removeAttribute('lang'); if(node.attributes.length <= 0) _unwrapElement(node); }); angular.forEach(targetDom.find('font'), _unwrapElement); text = targetDom.html(); }else{ // remove unnecessary chrome insert text = text.replace(/<(|\/)meta[^>]*?>/ig, ''); if(text.match(/<[^>]*?(ta-bind)[^>]*?>/)){ // entire text-angular or ta-bind has been pasted, REMOVE AT ONCE!! if(text.match(/<[^>]*?(text-angular)[^>]*?>/)){ var _el = angular.element("<div>" + text + "</div>"); _el.find('textarea').remove(); var binds = getByAttribute(_el, 'ta-bind'); for(var _b = 0; _b < binds.length; _b++){ var _target = binds[_b][0].parentNode.parentNode; for(var _c = 0; _c < binds[_b][0].childNodes.length; _c++){ _target.parentNode.insertBefore(binds[_b][0].childNodes[_c], _target); } _target.parentNode.removeChild(_target); } text = _el.html().replace('<br class="Apple-interchange-newline">', ''); } }else if(text.match(/^<span/)){ // in case of pasting only a span - chrome paste, remove them. THis is just some wierd formatting text = text.replace(/<(|\/)span[^>]*?>/ig, ''); } text = text.replace(/<br class="Apple-interchange-newline"[^>]*?>/ig, ''); } text = taSanitize(text, '', _disableSanitizer); taSelection.insertHtml(text, element[0]); $timeout(function(){ ngModel.$setViewValue(_compileHtml()); _processingPaste = false; element.removeClass('processing-paste'); }, 0); }else{ _processingPaste = false; element.removeClass('processing-paste'); } }; element.on('paste', function(e, eventData){ /* istanbul ignore else: this is for catching the jqLite testing*/ if(eventData) angular.extend(e, eventData); if(_isReadonly || _processingPaste){ e.stopPropagation(); e.preventDefault(); return false; } // Code adapted from http://stackoverflow.com/questions/2176861/javascript-get-clipboard-data-on-paste-event-cross-browser/6804718#6804718 var _savedSelection = $window.rangy.saveSelection(); _processingPaste = true; element.addClass('processing-paste'); var savedcontent = element[0].innerHTML; var clipboardData = (e.originalEvent || e).clipboardData; if (clipboardData && clipboardData.getData) {// Webkit - get data from clipboard, put into editdiv, cleanup, then cancel event var _types = ""; for(var _t = 0; _t < clipboardData.types.length; _t++){ _types += " " + clipboardData.types[_t]; } /* istanbul ignore next: browser tests */ if (/text\/html/i.test(_types)) { element[0].innerHTML = clipboardData.getData('text/html'); } else if (/text\/plain/i.test(_types)) { element[0].innerHTML = clipboardData.getData('text/plain'); } else { element[0].innerHTML = ""; } waitforpastedata(savedcontent, _savedSelection, processpaste); e.stopPropagation(); e.preventDefault(); return false; } else {// Everything else - empty editdiv and allow browser to paste content into it, then cleanup element[0].innerHTML = ""; waitforpastedata(savedcontent, _savedSelection, processpaste); return true; } }); element.on('cut', function(e){ // timeout to next is needed as otherwise the paste/cut event has not finished actually changing the display if(!_isReadonly) $timeout(function(){ ngModel.$setViewValue(_compileHtml()); }, 0); else e.preventDefault(); }); element.on('keydown', function(event, eventData){ /* istanbul ignore else: this is for catching the jqLite testing*/ if(eventData) angular.extend(event, eventData); /* istanbul ignore else: readonly check */ if(!_isReadonly){ if(event.metaKey || event.ctrlKey){ // covers ctrl/command + z if((event.keyCode === 90 && !event.shiftKey)){ _undo(); event.preventDefault(); // covers ctrl + y, command + shift + z }else if((event.keyCode === 90 && event.shiftKey) || (event.keyCode === 89 && !event.shiftKey)){ _redo(); event.preventDefault(); } /* istanbul ignore next: difficult to test as can't seem to select */ }else if(event.keyCode === 13 && !event.shiftKey){ var selection = taSelection.getSelectionElement(); if(!selection.tagName.match(VALIDELEMENTS)) return; var _new = angular.element(_defaultVal); if (/^<br(|\/)>$/i.test(selection.innerHTML.trim()) && selection.parentNode.tagName.toLowerCase() === 'blockquote' && !selection.nextSibling) { // if last element in blockquote and element is blank, pull element outside of blockquote. $selection = angular.element(selection); var _parent = $selection.parent(); _parent.after(_new); $selection.remove(); if(_parent.children().length === 0) _parent.remove(); taSelection.setSelectionToElementStart(_new[0]); event.preventDefault(); }else if (/^<[^>]+><br(|\/)><\/[^>]+>$/i.test(selection.innerHTML.trim()) && selection.tagName.toLowerCase() === 'blockquote'){ $selection = angular.element(selection); $selection.after(_new); $selection.remove(); taSelection.setSelectionToElementStart(_new[0]); event.preventDefault(); } } } }); element.on('keyup', function(event, eventData){ /* istanbul ignore else: this is for catching the jqLite testing*/ if(eventData) angular.extend(event, eventData); if(_undoKeyupTimeout) $timeout.cancel(_undoKeyupTimeout); if(!_isReadonly && !BLOCKED_KEYS.test(event.keyCode)){ // if enter - insert new taDefaultWrap, if shift+enter insert <br/> if(_defaultVal !== '' && event.keyCode === 13){ if(!event.shiftKey){ // new paragraph, br should be caught correctly var selection = taSelection.getSelectionElement(); while(!selection.tagName.match(VALIDELEMENTS) && selection !== element[0]){ selection = selection.parentNode; } if(selection.tagName.toLowerCase() !== attrs.taDefaultWrap && selection.tagName.toLowerCase() !== 'li' && (selection.innerHTML.trim() === '' || selection.innerHTML.trim() === '<br>')){ var _new = angular.element(_defaultVal); angular.element(selection).replaceWith(_new); taSelection.setSelectionToElementStart(_new[0]); } } } var val = _compileHtml(); if(_defaultVal !== '' && val.trim() === ''){ _setInnerHTML(_defaultVal); taSelection.setSelectionToElementStart(element.children()[0]); } var triggerUndo = _lastKey !== event.keyCode && UNDO_TRIGGER_KEYS.test(event.keyCode); _setViewValue(val, triggerUndo); if(!triggerUndo) _undoKeyupTimeout = $timeout(function(){ ngModel.$undoManager.push(val); }, 250); _lastKey = event.keyCode; } }); element.on('blur', function(){ _focussed = false; /* istanbul ignore else: if readonly don't update model */ if(!_isReadonly){ _setViewValue(); } ngModel.$render(); }); // Placeholders not supported on ie 8 and below if(attrs.placeholder && (_browserDetect.ie > 8 || _browserDetect.ie === undefined)){ var ruleIndex; if(attrs.id) ruleIndex = addCSSRule('#' + attrs.id + '.placeholder-text:before', 'content: "' + attrs.placeholder + '"'); else throw('textAngular Error: An unique ID is required for placeholders to work'); scope.$on('$destroy', function(){ removeCSSRule(ruleIndex); }); } element.on('focus', function(){ _focussed = true; ngModel.$render(); }); element.on('mouseup', function(){ var _selection = taSelection.getSelection(); if(_selection.start.element === element[0]) taSelection.setSelectionToElementStart(element.children()[0]); }); // prevent propagation on mousedown in editor, see #206 element.on('mousedown', function(event, eventData){ /* istanbul ignore else: this is for catching the jqLite testing*/ if(eventData) angular.extend(event, eventData); event.stopPropagation(); }); } } // catch DOM XSS via taSanitize // Sanitizing both ways is identical var _sanitize = function(unsafe){ return (ngModel.$oldViewValue = taSanitize(taFixChrome(unsafe), ngModel.$oldViewValue, _disableSanitizer)); }; // trigger the validation calls var _validity = function(value){ if(attrs.required) ngModel.$setValidity('required', !_blankTest(value)); return value; }; // parsers trigger from the above keyup function or any other time that the viewValue is updated and parses it for storage in the ngModel ngModel.$parsers.push(_sanitize); ngModel.$parsers.push(_validity); // because textAngular is bi-directional (which is awesome) we need to also sanitize values going in from the server ngModel.$formatters.push(_sanitize); ngModel.$formatters.push(function(value){ if(_blankTest(value)) return value; var domTest = angular.element("<div>" + value + "</div>"); if(domTest.children().length === 0){ value = "<" + attrs.taDefaultWrap + ">" + value + "</" + attrs.taDefaultWrap + ">"; } return value; }); ngModel.$formatters.push(_validity); ngModel.$formatters.push(function(value){ return ngModel.$undoManager.push(value || ''); }); var selectorClickHandler = function(event){ // emit the element-select event, pass the element scope.$emit('ta-element-select', this); event.preventDefault(); return false; }; var fileDropHandler = function(event, eventData){ /* istanbul ignore else: this is for catching the jqLite testing*/ if(eventData) angular.extend(event, eventData); // emit the drop event, pass the element, preventing should be done elsewhere if(!dropFired && !_isReadonly){ dropFired = true; var dataTransfer; if(event.originalEvent) dataTransfer = event.originalEvent.dataTransfer; else dataTransfer = event.dataTransfer; scope.$emit('ta-drop-event', this, event, dataTransfer); $timeout(function(){ dropFired = false; _setViewValue(); }, 100); } }; //used for updating when inserting wrapped elements var _reApplyOnSelectorHandlers = scope['reApplyOnSelectorHandlers' + (attrs.id || '')] = function(){ /* istanbul ignore else */ if(!_isReadonly) angular.forEach(taSelectableElements, function(selector){ // check we don't apply the handler twice element.find(selector) .off('click', selectorClickHandler) .on('click', selectorClickHandler); }); }; var _setInnerHTML = function(newval){ element[0].innerHTML = newval; }; // changes to the model variable from outside the html/text inputs ngModel.$render = function(){ // catch model being null or undefined var val = ngModel.$viewValue || ''; // if the editor isn't focused it needs to be updated, otherwise it's receiving user input if($document[0].activeElement !== element[0]){ // Not focussed if(_isContentEditable){ // WYSIWYG Mode if(attrs.placeholder){ if(val === ''){ // blank if(_focussed) element.removeClass('placeholder-text'); else element.addClass('placeholder-text'); _setInnerHTML(_defaultVal); }else{ // not-blank element.removeClass('placeholder-text'); _setInnerHTML(val); } }else{ _setInnerHTML((val === '') ? _defaultVal : val); } // if in WYSIWYG and readOnly we kill the use of links by clicking if(!_isReadonly){ _reApplyOnSelectorHandlers(); element.on('drop', fileDropHandler); }else{ element.off('drop', fileDropHandler); } }else if(element[0].tagName.toLowerCase() !== 'textarea' && element[0].tagName.toLowerCase() !== 'input'){ // make sure the end user can SEE the html code as a display. This is a read-only display element _setInnerHTML(taApplyCustomRenderers(val)); }else{ // only for input and textarea inputs element.val(val); } }else{ /* istanbul ignore else: in other cases we don't care */ if(_isContentEditable){ // element is focussed, test for placeholder element.removeClass('placeholder-text'); } } }; if(attrs.taReadonly){ //set initial value _isReadonly = scope.$eval(attrs.taReadonly); if(_isReadonly){ element.addClass('ta-readonly'); // we changed to readOnly mode (taReadonly='true') if(element[0].tagName.toLowerCase() === 'textarea' || element[0].tagName.toLowerCase() === 'input'){ element.attr('disabled', 'disabled'); } if(element.attr('contenteditable') !== undefined && element.attr('contenteditable')){ element.removeAttr('contenteditable'); } }else{ element.removeClass('ta-readonly'); // we changed to NOT readOnly mode (taReadonly='false') if(element[0].tagName.toLowerCase() === 'textarea' || element[0].tagName.toLowerCase() === 'input'){ element.removeAttr('disabled'); }else if(_isContentEditable){ element.attr('contenteditable', 'true'); } } // taReadonly only has an effect if the taBind element is an input or textarea or has contenteditable='true' on it. // Otherwise it is readonly by default scope.$watch(attrs.taReadonly, function(newVal, oldVal){ if(oldVal === newVal) return; if(newVal){ element.addClass('ta-readonly'); // we changed to readOnly mode (taReadonly='true') if(element[0].tagName.toLowerCase() === 'textarea' || element[0].tagName.toLowerCase() === 'input'){ element.attr('disabled', 'disabled'); } if(element.attr('contenteditable') !== undefined && element.attr('contenteditable')){ element.removeAttr('contenteditable'); } // turn ON selector click handlers angular.forEach(taSelectableElements, function(selector){ element.find(selector).on('click', selectorClickHandler); }); element.off('drop', fileDropHandler); }else{ element.removeClass('ta-readonly'); // we changed to NOT readOnly mode (taReadonly='false') if(element[0].tagName.toLowerCase() === 'textarea' || element[0].tagName.toLowerCase() === 'input'){ element.removeAttr('disabled'); }else if(_isContentEditable){ element.attr('contenteditable', 'true'); } // remove the selector click handlers angular.forEach(taSelectableElements, function(selector){ element.find(selector).off('click', selectorClickHandler); }); element.on('drop', fileDropHandler); } _isReadonly = newVal; }); } // Initialise the selectableElements // if in WYSIWYG and readOnly we kill the use of links by clicking if(_isContentEditable && !_isReadonly){ angular.forEach(taSelectableElements, function(selector){ element.find(selector).on('click', selectorClickHandler); }); element.on('drop', fileDropHandler); element.on('blur', function(){ /* istanbul ignore next: webkit fix */ if(_browserDetect.webkit) { // detect webkit globalContentEditableBlur = true; } }); } } }; }]); // this global var is used to prevent multiple fires of the drop event. Needs to be global to the textAngular file. var dropFired = false; var textAngular = angular.module("textAngular", ['ngSanitize', 'textAngularSetup', 'textAngular.factories', 'textAngular.DOM', 'textAngular.validators', 'textAngular.taBind']); //This makes ngSanitize required // setup the global contstant functions for setting up the toolbar // all tool definitions var taTools = {}; /* A tool definition is an object with the following key/value parameters: action: [function(deferred, restoreSelection)] a function that is executed on clicking on the button - this will allways be executed using ng-click and will overwrite any ng-click value in the display attribute. The function is passed a deferred object ($q.defer()), if this is wanted to be used `return false;` from the action and manually call `deferred.resolve();` elsewhere to notify the editor that the action has finished. restoreSelection is only defined if the rangy library is included and it can be called as `restoreSelection()` to restore the users selection in the WYSIWYG editor. display: [string]? Optional, an HTML element to be displayed as the button. The `scope` of the button is the tool definition object with some additional functions If set this will cause buttontext and iconclass to be ignored class: [string]? Optional, if set will override the taOptions.classes.toolbarButton class. buttontext: [string]? if this is defined it will replace the contents of the element contained in the `display` element iconclass: [string]? if this is defined an icon (<i>) will be appended to the `display` element with this string as it's class tooltiptext: [string]? Optional, a plain text description of the action, used for the title attribute of the action button in the toolbar by default. activestate: [function(commonElement)]? this function is called on every caret movement, if it returns true then the class taOptions.classes.toolbarButtonActive will be applied to the `display` element, else the class will be removed disabled: [function()]? if this function returns true then the tool will have the class taOptions.classes.disabled applied to it, else it will be removed Other functions available on the scope are: name: [string] the name of the tool, this is the first parameter passed into taRegisterTool isDisabled: [function()] returns true if the tool is disabled, false if it isn't displayActiveToolClass: [function(boolean)] returns true if the tool is 'active' in the currently focussed toolbar onElementSelect: [Object] This object contains the following key/value pairs and is used to trigger the ta-element-select event element: [String] an element name, will only trigger the onElementSelect action if the tagName of the element matches this string filter: [function(element)]? an optional filter that returns a boolean, if true it will trigger the onElementSelect. action: [function(event, element, editorScope)] the action that should be executed if the onElementSelect function runs */ // name and toolDefinition to add into the tools available to be added on the toolbar function registerTextAngularTool(name, toolDefinition){ if(!name || name === '' || taTools.hasOwnProperty(name)) throw('textAngular Error: A unique name is required for a Tool Definition'); if( (toolDefinition.display && (toolDefinition.display === '' || !validElementString(toolDefinition.display))) || (!toolDefinition.display && !toolDefinition.buttontext && !toolDefinition.iconclass) ) throw('textAngular Error: Tool Definition for "' + name + '" does not have a valid display/iconclass/buttontext value'); taTools[name] = toolDefinition; } textAngular.constant('taRegisterTool', registerTextAngularTool); textAngular.value('taTools', taTools); textAngular.config([function(){ // clear taTools variable. Just catches testing and any other time that this config may run multiple times... angular.forEach(taTools, function(value, key){ delete taTools[key]; }); }]); textAngular.run([function(){ /* istanbul ignore next: not sure how to test this */ // Require Rangy and rangy savedSelection module. if(!window.rangy){ throw("rangy-core.js and rangy-selectionsaverestore.js are required for textAngular to work correctly, rangy-core is not yet loaded."); }else{ window.rangy.init(); if(!window.rangy.saveSelection){ throw("rangy-selectionsaverestore.js is required for textAngular to work correctly."); } } }]); textAngular.directive("textAngular", [ '$compile', '$timeout', 'taOptions', 'taSelection', 'taExecCommand', 'textAngularManager', '$window', '$document', '$animate', '$log', '$q', function($compile, $timeout, taOptions, taSelection, taExecCommand, textAngularManager, $window, $document, $animate, $log, $q){ return { require: '?ngModel', scope: {}, restrict: "EA", link: function(scope, element, attrs, ngModel){ // all these vars should not be accessable outside this directive var _keydown, _keyup, _keypress, _mouseup, _focusin, _focusout, _originalContents, _toolbars, _serial = (attrs.serial) ? attrs.serial : Math.floor(Math.random() * 10000000000000000), _taExecCommand; scope._name = (attrs.name) ? attrs.name : 'textAngularEditor' + _serial; var oneEvent = function(_element, event, action){ $timeout(function(){ // shim the .one till fixed var _func = function(){ _element.off(event, _func); action.apply(this, arguments); }; _element.on(event, _func); }, 100); }; _taExecCommand = taExecCommand(attrs.taDefaultWrap); // get the settings from the defaults and add our specific functions that need to be on the scope angular.extend(scope, angular.copy(taOptions), { // wraps the selection in the provided tag / execCommand function. Should only be called in WYSIWYG mode. wrapSelection: function(command, opt, isSelectableElementTool){ if(command.toLowerCase() === "undo"){ scope['$undoTaBindtaTextElement' + _serial](); }else if(command.toLowerCase() === "redo"){ scope['$redoTaBindtaTextElement' + _serial](); }else{ // catch errors like FF erroring when you try to force an undo with nothing done _taExecCommand(command, false, opt);<|fim▁hole|> } // refocus on the shown display element, this fixes a display bug when using :focus styles to outline the box. // You still have focus on the text/html input it just doesn't show up scope.displayElements.text[0].focus(); } }, showHtml: scope.$eval(attrs.taShowHtml) || false }); // setup the options from the optional attributes if(attrs.taFocussedClass) scope.classes.focussed = attrs.taFocussedClass; if(attrs.taTextEditorClass) scope.classes.textEditor = attrs.taTextEditorClass; if(attrs.taHtmlEditorClass) scope.classes.htmlEditor = attrs.taHtmlEditorClass; // optional setup functions if(attrs.taTextEditorSetup) scope.setup.textEditorSetup = scope.$parent.$eval(attrs.taTextEditorSetup); if(attrs.taHtmlEditorSetup) scope.setup.htmlEditorSetup = scope.$parent.$eval(attrs.taHtmlEditorSetup); // optional fileDropHandler function if(attrs.taFileDrop) scope.fileDropHandler = scope.$parent.$eval(attrs.taFileDrop); else scope.fileDropHandler = scope.defaultFileDropHandler; _originalContents = element[0].innerHTML; // clear the original content element[0].innerHTML = ''; // Setup the HTML elements as variable references for use later scope.displayElements = { // we still need the hidden input even with a textarea as the textarea may have invalid/old input in it, // wheras the input will ALLWAYS have the correct value. forminput: angular.element("<input type='hidden' tabindex='-1' style='display: none;'>"), html: angular.element("<textarea></textarea>"), text: angular.element("<div></div>"), // other toolbased elements scrollWindow: angular.element("<div class='ta-scroll-window'></div>"), popover: angular.element('<div class="popover fade bottom" style="max-width: none; width: 305px;"></div>'), popoverArrow: angular.element('<div class="arrow"></div>'), popoverContainer: angular.element('<div class="popover-content"></div>'), resize: { overlay: angular.element('<div class="ta-resizer-handle-overlay"></div>'), background: angular.element('<div class="ta-resizer-handle-background"></div>'), anchors: [ angular.element('<div class="ta-resizer-handle-corner ta-resizer-handle-corner-tl"></div>'), angular.element('<div class="ta-resizer-handle-corner ta-resizer-handle-corner-tr"></div>'), angular.element('<div class="ta-resizer-handle-corner ta-resizer-handle-corner-bl"></div>'), angular.element('<div class="ta-resizer-handle-corner ta-resizer-handle-corner-br"></div>') ], info: angular.element('<div class="ta-resizer-handle-info"></div>') } }; // Setup the popover scope.displayElements.popover.append(scope.displayElements.popoverArrow); scope.displayElements.popover.append(scope.displayElements.popoverContainer); scope.displayElements.scrollWindow.append(scope.displayElements.popover); scope.displayElements.popover.on('mousedown', function(e, eventData){ /* istanbul ignore else: this is for catching the jqLite testing*/ if(eventData) angular.extend(e, eventData); // this prevents focusout from firing on the editor when clicking anything in the popover e.preventDefault(); return false; }); // define the popover show and hide functions scope.showPopover = function(_el){ scope.displayElements.popover.css('display', 'block'); scope.reflowPopover(_el); $animate.addClass(scope.displayElements.popover, 'in'); oneEvent($document.find('body'), 'click keyup', function(){scope.hidePopover();}); }; scope.reflowPopover = function(_el){ /* istanbul ignore if: catches only if near bottom of editor */ if(scope.displayElements.text[0].offsetHeight - 51 > _el[0].offsetTop){ scope.displayElements.popover.css('top', _el[0].offsetTop + _el[0].offsetHeight + 'px'); scope.displayElements.popover.removeClass('top').addClass('bottom'); }else{ scope.displayElements.popover.css('top', _el[0].offsetTop - 54 + 'px'); scope.displayElements.popover.removeClass('bottom').addClass('top'); } var _maxLeft = scope.displayElements.text[0].offsetWidth - scope.displayElements.popover[0].offsetWidth; var _targetLeft = _el[0].offsetLeft + (_el[0].offsetWidth / 2.0) - (scope.displayElements.popover[0].offsetWidth / 2.0); scope.displayElements.popover.css('left', Math.max(0, Math.min(_maxLeft, _targetLeft)) + 'px'); scope.displayElements.popoverArrow.css('margin-left', (Math.min(_targetLeft, (Math.max(0, _targetLeft - _maxLeft))) - 11) + 'px'); }; scope.hidePopover = function(){ /* istanbul ignore next: dosen't test with mocked animate */ var doneCb = function(){ scope.displayElements.popover.css('display', ''); scope.displayElements.popoverContainer.attr('style', ''); scope.displayElements.popoverContainer.attr('class', 'popover-content'); }; $q.when($animate.removeClass(scope.displayElements.popover, 'in', doneCb)).then(doneCb); }; // setup the resize overlay scope.displayElements.resize.overlay.append(scope.displayElements.resize.background); angular.forEach(scope.displayElements.resize.anchors, function(anchor){ scope.displayElements.resize.overlay.append(anchor);}); scope.displayElements.resize.overlay.append(scope.displayElements.resize.info); scope.displayElements.scrollWindow.append(scope.displayElements.resize.overlay); // define the show and hide events scope.reflowResizeOverlay = function(_el){ _el = angular.element(_el)[0]; scope.displayElements.resize.overlay.css({ 'display': 'block', 'left': _el.offsetLeft - 5 + 'px', 'top': _el.offsetTop - 5 + 'px', 'width': _el.offsetWidth + 10 + 'px', 'height': _el.offsetHeight + 10 + 'px' }); scope.displayElements.resize.info.text(_el.offsetWidth + ' x ' + _el.offsetHeight); }; /* istanbul ignore next: pretty sure phantomjs won't test this */ scope.showResizeOverlay = function(_el){ var resizeMouseDown = function(event){ var startPosition = { width: parseInt(_el.attr('width')), height: parseInt(_el.attr('height')), x: event.clientX, y: event.clientY }; if(startPosition.width === undefined) startPosition.width = _el[0].offsetWidth; if(startPosition.height === undefined) startPosition.height = _el[0].offsetHeight; scope.hidePopover(); var ratio = startPosition.height / startPosition.width; var mousemove = function(event){ // calculate new size var pos = { x: Math.max(0, startPosition.width + (event.clientX - startPosition.x)), y: Math.max(0, startPosition.height + (event.clientY - startPosition.y)) }; if(event.shiftKey){ // keep ratio var newRatio = pos.y / pos.x; pos.x = ratio > newRatio ? pos.x : pos.y / ratio; pos.y = ratio > newRatio ? pos.x * ratio : pos.y; } el = angular.element(_el); el.attr('height', Math.max(0, pos.y)); el.attr('width', Math.max(0, pos.x)); // reflow the popover tooltip scope.reflowResizeOverlay(_el); }; $document.find('body').on('mousemove', mousemove); oneEvent($document.find('body'), 'mouseup', function(event){ event.preventDefault(); event.stopPropagation(); $document.find('body').off('mousemove', mousemove); scope.showPopover(_el); }); event.stopPropagation(); event.preventDefault(); }; scope.displayElements.resize.anchors[3].on('mousedown', resizeMouseDown); scope.reflowResizeOverlay(_el); oneEvent($document.find('body'), 'click', function(){scope.hideResizeOverlay();}); }; /* istanbul ignore next: pretty sure phantomjs won't test this */ scope.hideResizeOverlay = function(){ scope.displayElements.resize.overlay.css('display', ''); }; // allow for insertion of custom directives on the textarea and div scope.setup.htmlEditorSetup(scope.displayElements.html); scope.setup.textEditorSetup(scope.displayElements.text); scope.displayElements.html.attr({ 'id': 'taHtmlElement' + _serial, 'ng-show': 'showHtml', 'ta-bind': 'ta-bind', 'ng-model': 'html' }); scope.displayElements.text.attr({ 'id': 'taTextElement' + _serial, 'contentEditable': 'true', 'ta-bind': 'ta-bind', 'ng-model': 'html' }); scope.displayElements.scrollWindow.attr({'ng-hide': 'showHtml'}); if(attrs.taDefaultWrap) scope.displayElements.text.attr('ta-default-wrap', attrs.taDefaultWrap); if(attrs.taUnsafeSanitizer){ scope.displayElements.text.attr('ta-unsafe-sanitizer', attrs.taUnsafeSanitizer); scope.displayElements.html.attr('ta-unsafe-sanitizer', attrs.taUnsafeSanitizer); } // add the main elements to the origional element scope.displayElements.scrollWindow.append(scope.displayElements.text); element.append(scope.displayElements.scrollWindow); element.append(scope.displayElements.html); scope.displayElements.forminput.attr('name', scope._name); element.append(scope.displayElements.forminput); if(attrs.tabindex){ element.removeAttr('tabindex'); scope.displayElements.text.attr('tabindex', attrs.tabindex); scope.displayElements.html.attr('tabindex', attrs.tabindex); } if (attrs.placeholder) { scope.displayElements.text.attr('placeholder', attrs.placeholder); scope.displayElements.html.attr('placeholder', attrs.placeholder); } if(attrs.taDisabled){ scope.displayElements.text.attr('ta-readonly', 'disabled'); scope.displayElements.html.attr('ta-readonly', 'disabled'); scope.disabled = scope.$parent.$eval(attrs.taDisabled); scope.$parent.$watch(attrs.taDisabled, function(newVal){ scope.disabled = newVal; if(scope.disabled){ element.addClass(scope.classes.disabled); }else{ element.removeClass(scope.classes.disabled); } }); } // compile the scope with the text and html elements only - if we do this with the main element it causes a compile loop $compile(scope.displayElements.scrollWindow)(scope); $compile(scope.displayElements.html)(scope); scope.updateTaBindtaTextElement = scope['updateTaBindtaTextElement' + _serial]; scope.updateTaBindtaHtmlElement = scope['updateTaBindtaHtmlElement' + _serial]; // add the classes manually last element.addClass("ta-root"); scope.displayElements.scrollWindow.addClass("ta-text ta-editor " + scope.classes.textEditor); scope.displayElements.html.addClass("ta-html ta-editor " + scope.classes.htmlEditor); // used in the toolbar actions scope._actionRunning = false; var _savedSelection = false; scope.startAction = function(){ scope._actionRunning = true; // if rangy library is loaded return a function to reload the current selection _savedSelection = $window.rangy.saveSelection(); return function(){ if(_savedSelection) $window.rangy.restoreSelection(_savedSelection); }; }; scope.endAction = function(){ scope._actionRunning = false; if(_savedSelection) $window.rangy.removeMarkers(_savedSelection); _savedSelection = false; scope.updateSelectedStyles(); // only update if in text or WYSIWYG mode if(!scope.showHtml) scope['updateTaBindtaTextElement' + _serial](); }; // note that focusout > focusin is called everytime we click a button - except bad support: http://www.quirksmode.org/dom/events/blurfocus.html // cascades to displayElements.text and displayElements.html automatically. _focusin = function(){ element.addClass(scope.classes.focussed); _toolbars.focus(); }; scope.displayElements.html.on('focus', _focusin); scope.displayElements.text.on('focus', _focusin); _focusout = function(e){ // if we are NOT runnig an action and have NOT focussed again on the text etc then fire the blur events if(!scope._actionRunning && $document[0].activeElement !== scope.displayElements.html[0] && $document[0].activeElement !== scope.displayElements.text[0]){ element.removeClass(scope.classes.focussed); _toolbars.unfocus(); // to prevent multiple apply error defer to next seems to work. $timeout(function(){ element.triggerHandler('blur'); }, 0); } e.preventDefault(); return false; }; scope.displayElements.html.on('blur', _focusout); scope.displayElements.text.on('blur', _focusout); // Setup the default toolbar tools, this way allows the user to add new tools like plugins. // This is on the editor for future proofing if we find a better way to do this. scope.queryFormatBlockState = function(command){ // $document[0].queryCommandValue('formatBlock') errors in Firefox if we call this when focussed on the textarea return !scope.showHtml && command.toLowerCase() === $document[0].queryCommandValue('formatBlock').toLowerCase(); }; scope.queryCommandState = function(command){ // $document[0].queryCommandValue('formatBlock') errors in Firefox if we call this when focussed on the textarea return (!scope.showHtml) ? $document[0].queryCommandState(command) : ''; }; scope.switchView = function(){ scope.showHtml = !scope.showHtml; $animate.enabled(false, scope.displayElements.html); $animate.enabled(false, scope.displayElements.text); //Show the HTML view if(scope.showHtml){ //defer until the element is visible $timeout(function(){ $animate.enabled(true, scope.displayElements.html); $animate.enabled(true, scope.displayElements.text); // [0] dereferences the DOM object from the angular.element return scope.displayElements.html[0].focus(); }, 100); }else{ //Show the WYSIWYG view //defer until the element is visible $timeout(function(){ $animate.enabled(true, scope.displayElements.html); $animate.enabled(true, scope.displayElements.text); // [0] dereferences the DOM object from the angular.element return scope.displayElements.text[0].focus(); }, 100); } }; // changes to the model variable from outside the html/text inputs // if no ngModel, then the only input is from inside text-angular if(attrs.ngModel){ var _firstRun = true; ngModel.$render = function(){ if(_firstRun){ // we need this firstRun to set the originalContents otherwise it gets overrided by the setting of ngModel to undefined from NaN _firstRun = false; // if view value is null or undefined initially and there was original content, set to the original content var _initialValue = scope.$parent.$eval(attrs.ngModel); if((_initialValue === undefined || _initialValue === null) && (_originalContents && _originalContents !== '')){ // on passing through to taBind it will be sanitised ngModel.$setViewValue(_originalContents); } } scope.displayElements.forminput.val(ngModel.$viewValue); // if the editors aren't focused they need to be updated, otherwise they are doing the updating /* istanbul ignore else: don't care */ if(!scope._elementSelectTriggered && $document[0].activeElement !== scope.displayElements.html[0] && $document[0].activeElement !== scope.displayElements.text[0]){ // catch model being null or undefined scope.html = ngModel.$viewValue || ''; } }; // trigger the validation calls var _validity = function(value){ if(attrs.required) ngModel.$setValidity('required', !(!value || value.trim() === '')); return value; }; ngModel.$parsers.push(_validity); ngModel.$formatters.push(_validity); }else{ // if no ngModel then update from the contents of the origional html. scope.displayElements.forminput.val(_originalContents); scope.html = _originalContents; } // changes from taBind back up to here scope.$watch('html', function(newValue, oldValue){ if(newValue !== oldValue){ if(attrs.ngModel && ngModel.$viewValue !== newValue) ngModel.$setViewValue(newValue); scope.displayElements.forminput.val(newValue); } }); if(attrs.taTargetToolbars) _toolbars = textAngularManager.registerEditor(scope._name, scope, attrs.taTargetToolbars.split(',')); else{ var _toolbar = angular.element('<div text-angular-toolbar name="textAngularToolbar' + _serial + '">'); // passthrough init of toolbar options if(attrs.taToolbar) _toolbar.attr('ta-toolbar', attrs.taToolbar); if(attrs.taToolbarClass) _toolbar.attr('ta-toolbar-class', attrs.taToolbarClass); if(attrs.taToolbarGroupClass) _toolbar.attr('ta-toolbar-group-class', attrs.taToolbarGroupClass); if(attrs.taToolbarButtonClass) _toolbar.attr('ta-toolbar-button-class', attrs.taToolbarButtonClass); if(attrs.taToolbarActiveButtonClass) _toolbar.attr('ta-toolbar-active-button-class', attrs.taToolbarActiveButtonClass); if(attrs.taFocussedClass) _toolbar.attr('ta-focussed-class', attrs.taFocussedClass); element.prepend(_toolbar); $compile(_toolbar)(scope.$parent); _toolbars = textAngularManager.registerEditor(scope._name, scope, ['textAngularToolbar' + _serial]); } scope.$on('$destroy', function(){ textAngularManager.unregisterEditor(scope._name); }); // catch element select event and pass to toolbar tools scope.$on('ta-element-select', function(event, element){ if(_toolbars.triggerElementSelect(event, element)){ scope['reApplyOnSelectorHandlerstaTextElement' + _serial](); } }); scope.$on('ta-drop-event', function(event, element, dropEvent, dataTransfer){ scope.displayElements.text[0].focus(); if(dataTransfer && dataTransfer.files && dataTransfer.files.length > 0){ angular.forEach(dataTransfer.files, function(file){ // taking advantage of boolean execution, if the fileDropHandler returns true, nothing else after it is executed // If it is false then execute the defaultFileDropHandler if the fileDropHandler is NOT the default one // Once one of these has been executed wrap the result as a promise, if undefined or variable update the taBind, else we should wait for the promise try{ $q.when(scope.fileDropHandler(file, scope.wrapSelection) || (scope.fileDropHandler !== scope.defaultFileDropHandler && $q.when(scope.defaultFileDropHandler(file, scope.wrapSelection)))).then(function(){ scope['updateTaBindtaTextElement' + _serial](); }); }catch(error){ $log.error(error); } }); dropEvent.preventDefault(); dropEvent.stopPropagation(); /* istanbul ignore else, the updates if moved text */ }else{ $timeout(function(){ scope['updateTaBindtaTextElement' + _serial](); }, 0); } }); // the following is for applying the active states to the tools that support it scope._bUpdateSelectedStyles = false; // loop through all the tools polling their activeState function if it exists scope.updateSelectedStyles = function(){ var _selection; // test if the common element ISN'T the root ta-text node if((_selection = taSelection.getSelectionElement()) !== undefined && _selection.parentNode !== scope.displayElements.text[0]){ _toolbars.updateSelectedStyles(angular.element(_selection)); }else _toolbars.updateSelectedStyles(); // used to update the active state when a key is held down, ie the left arrow /* istanbul ignore else: browser only check */ if(scope._bUpdateSelectedStyles && $document.hasFocus()) $timeout(scope.updateSelectedStyles, 200); else scope._bUpdateSelectedStyles = false; }; // start updating on keydown _keydown = function(){ /* istanbul ignore else: don't run if already running */ if(!scope._bUpdateSelectedStyles){ scope._bUpdateSelectedStyles = true; scope.$apply(function(){ scope.updateSelectedStyles(); }); } }; scope.displayElements.html.on('keydown', _keydown); scope.displayElements.text.on('keydown', _keydown); // stop updating on key up and update the display/model _keyup = function(){ scope._bUpdateSelectedStyles = false; }; scope.displayElements.html.on('keyup', _keyup); scope.displayElements.text.on('keyup', _keyup); // stop updating on key up and update the display/model _keypress = function(event, eventData){ /* istanbul ignore else: this is for catching the jqLite testing*/ if(eventData) angular.extend(event, eventData); scope.$apply(function(){ if(_toolbars.sendKeyCommand(event)){ /* istanbul ignore else: don't run if already running */ if(!scope._bUpdateSelectedStyles){ scope.updateSelectedStyles(); } event.preventDefault(); return false; } }); }; scope.displayElements.html.on('keypress', _keypress); scope.displayElements.text.on('keypress', _keypress); // update the toolbar active states when we click somewhere in the text/html boxed _mouseup = function(){ // ensure only one execution of updateSelectedStyles() scope._bUpdateSelectedStyles = false; scope.$apply(function(){ scope.updateSelectedStyles(); }); }; scope.displayElements.html.on('mouseup', _mouseup); scope.displayElements.text.on('mouseup', _mouseup); } }; } ]); textAngular.service('textAngularManager', ['taToolExecuteAction', 'taTools', 'taRegisterTool', function(taToolExecuteAction, taTools, taRegisterTool){ // this service is used to manage all textAngular editors and toolbars. // All publicly published functions that modify/need to access the toolbar or editor scopes should be in here // these contain references to all the editors and toolbars that have been initialised in this app var toolbars = {}, editors = {}; // when we focus into a toolbar, we need to set the TOOLBAR's $parent to be the toolbars it's linked to. // We also need to set the tools to be updated to be the toolbars... return { // register an editor and the toolbars that it is affected by registerEditor: function(name, scope, targetToolbars){ // targetToolbars are optional, we don't require a toolbar to function if(!name || name === '') throw('textAngular Error: An editor requires a name'); if(!scope) throw('textAngular Error: An editor requires a scope'); if(editors[name]) throw('textAngular Error: An Editor with name "' + name + '" already exists'); // _toolbars is an ARRAY of toolbar scopes var _toolbars = []; angular.forEach(targetToolbars, function(_name){ if(toolbars[_name]) _toolbars.push(toolbars[_name]); // if it doesn't exist it may not have been compiled yet and it will be added later }); editors[name] = { scope: scope, toolbars: targetToolbars, _registerToolbar: function(toolbarScope){ // add to the list late if(this.toolbars.indexOf(toolbarScope.name) >= 0) _toolbars.push(toolbarScope); }, // this is a suite of functions the editor should use to update all it's linked toolbars editorFunctions: { disable: function(){ // disable all linked toolbars angular.forEach(_toolbars, function(toolbarScope){ toolbarScope.disabled = true; }); }, enable: function(){ // enable all linked toolbars angular.forEach(_toolbars, function(toolbarScope){ toolbarScope.disabled = false; }); }, focus: function(){ // this should be called when the editor is focussed angular.forEach(_toolbars, function(toolbarScope){ toolbarScope._parent = scope; toolbarScope.disabled = false; toolbarScope.focussed = true; }); }, unfocus: function(){ // this should be called when the editor becomes unfocussed angular.forEach(_toolbars, function(toolbarScope){ toolbarScope.disabled = true; toolbarScope.focussed = false; }); }, updateSelectedStyles: function(selectedElement){ // update the active state of all buttons on liked toolbars angular.forEach(_toolbars, function(toolbarScope){ angular.forEach(toolbarScope.tools, function(toolScope){ if(toolScope.activeState){ toolbarScope._parent = scope; toolScope.active = toolScope.activeState(selectedElement); } }); }); }, sendKeyCommand: function(event){ // we return true if we applied an action, false otherwise var result = false; if(event.ctrlKey || event.metaKey) angular.forEach(taTools, function(tool, name){ if(tool.commandKeyCode && tool.commandKeyCode === event.which){ for(var _t = 0; _t < _toolbars.length; _t++){ if(_toolbars[_t].tools[name] !== undefined){ taToolExecuteAction.call(_toolbars[_t].tools[name], scope); result = true; break; } } } }); return result; }, triggerElementSelect: function(event, element){ // search through the taTools to see if a match for the tag is made. // if there is, see if the tool is on a registered toolbar and not disabled. // NOTE: This can trigger on MULTIPLE tools simultaneously. var elementHasAttrs = function(_element, attrs){ var result = true; for(var i = 0; i < attrs.length; i++) result = result && _element.attr(attrs[i]); return result; }; var workerTools = []; var unfilteredTools = {}; var result = false; element = angular.element(element); // get all valid tools by element name, keep track if one matches the var onlyWithAttrsFilter = false; angular.forEach(taTools, function(tool, name){ if( tool.onElementSelect && tool.onElementSelect.element && tool.onElementSelect.element.toLowerCase() === element[0].tagName.toLowerCase() && (!tool.onElementSelect.filter || tool.onElementSelect.filter(element)) ){ // this should only end up true if the element matches the only attributes onlyWithAttrsFilter = onlyWithAttrsFilter || (angular.isArray(tool.onElementSelect.onlyWithAttrs) && elementHasAttrs(element, tool.onElementSelect.onlyWithAttrs)); if(!tool.onElementSelect.onlyWithAttrs || elementHasAttrs(element, tool.onElementSelect.onlyWithAttrs)) unfilteredTools[name] = tool; } }); // if we matched attributes to filter on, then filter, else continue if(onlyWithAttrsFilter){ angular.forEach(unfilteredTools, function(tool, name){ if(tool.onElementSelect.onlyWithAttrs && elementHasAttrs(element, tool.onElementSelect.onlyWithAttrs)) workerTools.push({'name': name, 'tool': tool}); }); // sort most specific (most attrs to find) first workerTools.sort(function(a,b){ return b.tool.onElementSelect.onlyWithAttrs.length - a.tool.onElementSelect.onlyWithAttrs.length; }); }else{ angular.forEach(unfilteredTools, function(tool, name){ workerTools.push({'name': name, 'tool': tool}); }); } // Run the actions on the first visible filtered tool only if(workerTools.length > 0){ for(var _i = 0; _i < workerTools.length; _i++){ var tool = workerTools[_i].tool; var name = workerTools[_i].name; for(var _t = 0; _t < _toolbars.length; _t++){ if(_toolbars[_t].tools[name] !== undefined){ tool.onElementSelect.action.call(_toolbars[_t].tools[name], event, element, scope); result = true; break; } } if(result) break; } } return result; } } }; return editors[name].editorFunctions; }, // retrieve editor by name, largely used by testing suites only retrieveEditor: function(name){ return editors[name]; }, unregisterEditor: function(name){ delete editors[name]; }, // registers a toolbar such that it can be linked to editors registerToolbar: function(scope){ if(!scope) throw('textAngular Error: A toolbar requires a scope'); if(!scope.name || scope.name === '') throw('textAngular Error: A toolbar requires a name'); if(toolbars[scope.name]) throw('textAngular Error: A toolbar with name "' + scope.name + '" already exists'); toolbars[scope.name] = scope; angular.forEach(editors, function(_editor){ _editor._registerToolbar(scope); }); }, // retrieve toolbar by name, largely used by testing suites only retrieveToolbar: function(name){ return toolbars[name]; }, // retrieve toolbars by editor name, largely used by testing suites only retrieveToolbarsViaEditor: function(name){ var result = [], _this = this; angular.forEach(this.retrieveEditor(name).toolbars, function(name){ result.push(_this.retrieveToolbar(name)); }); return result; }, unregisterToolbar: function(name){ delete toolbars[name]; }, // functions for updating the toolbar buttons display updateToolsDisplay: function(newTaTools){ // pass a partial struct of the taTools, this allows us to update the tools on the fly, will not change the defaults. var _this = this; angular.forEach(newTaTools, function(_newTool, key){ _this.updateToolDisplay(key, _newTool); }); }, // this function resets all toolbars to their default tool definitions resetToolsDisplay: function(){ var _this = this; angular.forEach(taTools, function(_newTool, key){ _this.resetToolDisplay(key); }); }, // update a tool on all toolbars updateToolDisplay: function(toolKey, _newTool){ var _this = this; angular.forEach(toolbars, function(toolbarScope, toolbarKey){ _this.updateToolbarToolDisplay(toolbarKey, toolKey, _newTool); }); }, // resets a tool to the default/starting state on all toolbars resetToolDisplay: function(toolKey){ var _this = this; angular.forEach(toolbars, function(toolbarScope, toolbarKey){ _this.resetToolbarToolDisplay(toolbarKey, toolKey); }); }, // update a tool on a specific toolbar updateToolbarToolDisplay: function(toolbarKey, toolKey, _newTool){ if(toolbars[toolbarKey]) toolbars[toolbarKey].updateToolDisplay(toolKey, _newTool); else throw('textAngular Error: No Toolbar with name "' + toolbarKey + '" exists'); }, // reset a tool on a specific toolbar to it's default starting value resetToolbarToolDisplay: function(toolbarKey, toolKey){ if(toolbars[toolbarKey]) toolbars[toolbarKey].updateToolDisplay(toolKey, taTools[toolKey], true); else throw('textAngular Error: No Toolbar with name "' + toolbarKey + '" exists'); }, // removes a tool from all toolbars and it's definition removeTool: function(toolKey){ delete taTools[toolKey]; angular.forEach(toolbars, function(toolbarScope){ delete toolbarScope.tools[toolKey]; for(var i = 0; i < toolbarScope.toolbar.length; i++){ var toolbarIndex; for(var j = 0; j < toolbarScope.toolbar[i].length; j++){ if(toolbarScope.toolbar[i][j] === toolKey){ toolbarIndex = { group: i, index: j }; break; } if(toolbarIndex !== undefined) break; } if(toolbarIndex !== undefined){ toolbarScope.toolbar[toolbarIndex.group].slice(toolbarIndex.index, 1); toolbarScope._$element.children().eq(toolbarIndex.group).children().eq(toolbarIndex.index).remove(); } } }); }, // toolkey, toolDefinition are required. If group is not specified will pick the last group, if index isnt defined will append to group addTool: function(toolKey, toolDefinition, group, index){ taRegisterTool(toolKey, toolDefinition); angular.forEach(toolbars, function(toolbarScope){ toolbarScope.addTool(toolKey, toolDefinition, group, index); }); }, // adds a Tool but only to one toolbar not all addToolToToolbar: function(toolKey, toolDefinition, toolbarKey, group, index){ taRegisterTool(toolKey, toolDefinition); toolbars[toolbarKey].addTool(toolKey, toolDefinition, group, index); }, // this is used when externally the html of an editor has been changed and textAngular needs to be notified to update the model. // this will call a $digest if not already happening refreshEditor: function(name){ if(editors[name]){ editors[name].scope.updateTaBindtaTextElement(); /* istanbul ignore else: phase catch */ if(!editors[name].scope.$$phase) editors[name].scope.$digest(); }else throw('textAngular Error: No Editor with name "' + name + '" exists'); } }; }]); textAngular.directive('textAngularToolbar', [ '$compile', 'textAngularManager', 'taOptions', 'taTools', 'taToolExecuteAction', '$window', function($compile, textAngularManager, taOptions, taTools, taToolExecuteAction, $window){ return { scope: { name: '@' // a name IS required }, restrict: "EA", link: function(scope, element, attrs){ if(!scope.name || scope.name === '') throw('textAngular Error: A toolbar requires a name'); angular.extend(scope, angular.copy(taOptions)); if(attrs.taToolbar) scope.toolbar = scope.$parent.$eval(attrs.taToolbar); if(attrs.taToolbarClass) scope.classes.toolbar = attrs.taToolbarClass; if(attrs.taToolbarGroupClass) scope.classes.toolbarGroup = attrs.taToolbarGroupClass; if(attrs.taToolbarButtonClass) scope.classes.toolbarButton = attrs.taToolbarButtonClass; if(attrs.taToolbarActiveButtonClass) scope.classes.toolbarButtonActive = attrs.taToolbarActiveButtonClass; if(attrs.taFocussedClass) scope.classes.focussed = attrs.taFocussedClass; scope.disabled = true; scope.focussed = false; scope._$element = element; element[0].innerHTML = ''; element.addClass("ta-toolbar " + scope.classes.toolbar); scope.$watch('focussed', function(){ if(scope.focussed) element.addClass(scope.classes.focussed); else element.removeClass(scope.classes.focussed); }); var setupToolElement = function(toolDefinition, toolScope){ var toolElement; if(toolDefinition && toolDefinition.display){ toolElement = angular.element(toolDefinition.display); } else toolElement = angular.element("<button type='button'>"); if(toolDefinition && toolDefinition["class"]) toolElement.addClass(toolDefinition["class"]); else toolElement.addClass(scope.classes.toolbarButton); toolElement.attr('name', toolScope.name); // important to not take focus from the main text/html entry toolElement.attr('unselectable', 'on'); toolElement.attr('ng-disabled', 'isDisabled()'); toolElement.attr('tabindex', '-1'); toolElement.attr('ng-click', 'executeAction()'); toolElement.attr('ng-class', 'displayActiveToolClass(active)'); if (toolDefinition && toolDefinition.tooltiptext) { toolElement.attr('title', toolDefinition.tooltiptext); } toolElement.on('mousedown', function(e, eventData){ /* istanbul ignore else: this is for catching the jqLite testing*/ if(eventData) angular.extend(e, eventData); // this prevents focusout from firing on the editor when clicking toolbar buttons e.preventDefault(); return false; }); if(toolDefinition && !toolDefinition.display && !toolScope._display){ // first clear out the current contents if any toolElement[0].innerHTML = ''; // add the buttonText if(toolDefinition.buttontext) toolElement[0].innerHTML = toolDefinition.buttontext; // add the icon to the front of the button if there is content if(toolDefinition.iconclass){ var icon = angular.element('<i>'), content = toolElement[0].innerHTML; icon.addClass(toolDefinition.iconclass); toolElement[0].innerHTML = ''; toolElement.append(icon); if(content && content !== '') toolElement.append('&nbsp;' + content); } } toolScope._lastToolDefinition = angular.copy(toolDefinition); return $compile(toolElement)(toolScope); }; // Keep a reference for updating the active states later scope.tools = {}; // create the tools in the toolbar // default functions and values to prevent errors in testing and on init scope._parent = { disabled: true, showHtml: false, queryFormatBlockState: function(){ return false; }, queryCommandState: function(){ return false; } }; var defaultChildScope = { $window: $window, $editor: function(){ // dynamically gets the editor as it is set return scope._parent; }, isDisabled: function(){ // to set your own disabled logic set a function or boolean on the tool called 'disabled' return ( // this bracket is important as without it it just returns the first bracket and ignores the rest // when the button's disabled function/value evaluates to true (typeof this.$eval('disabled') !== 'function' && this.$eval('disabled')) || this.$eval('disabled()') || // all buttons except the HTML Switch button should be disabled in the showHtml (RAW html) mode (this.name !== 'html' && this.$editor().showHtml) || // if the toolbar is disabled this.$parent.disabled || // if the current editor is disabled this.$editor().disabled ); }, displayActiveToolClass: function(active){ return (active)? scope.classes.toolbarButtonActive : ''; }, executeAction: taToolExecuteAction }; angular.forEach(scope.toolbar, function(group){ // setup the toolbar group var groupElement = angular.element("<div>"); groupElement.addClass(scope.classes.toolbarGroup); angular.forEach(group, function(tool){ // init and add the tools to the group // a tool name (key name from taTools struct) //creates a child scope of the main angularText scope and then extends the childScope with the functions of this particular tool // reference to the scope and element kept scope.tools[tool] = angular.extend(scope.$new(true), taTools[tool], defaultChildScope, {name: tool}); scope.tools[tool].$element = setupToolElement(taTools[tool], scope.tools[tool]); // append the tool compiled with the childScope to the group element groupElement.append(scope.tools[tool].$element); }); // append the group to the toolbar element.append(groupElement); }); // update a tool // if a value is set to null, remove from the display // when forceNew is set to true it will ignore all previous settings, used to reset to taTools definition // to reset to defaults pass in taTools[key] as _newTool and forceNew as true, ie `updateToolDisplay(key, taTools[key], true);` scope.updateToolDisplay = function(key, _newTool, forceNew){ var toolInstance = scope.tools[key]; if(toolInstance){ // get the last toolDefinition, then override with the new definition if(toolInstance._lastToolDefinition && !forceNew) _newTool = angular.extend({}, toolInstance._lastToolDefinition, _newTool); if(_newTool.buttontext === null && _newTool.iconclass === null && _newTool.display === null) throw('textAngular Error: Tool Definition for updating "' + key + '" does not have a valid display/iconclass/buttontext value'); // if tool is defined on this toolbar, update/redo the tool if(_newTool.buttontext === null){ delete _newTool.buttontext; } if(_newTool.iconclass === null){ delete _newTool.iconclass; } if(_newTool.display === null){ delete _newTool.display; } var toolElement = setupToolElement(_newTool, toolInstance); toolInstance.$element.replaceWith(toolElement); toolInstance.$element = toolElement; } }; // we assume here that all values passed are valid and correct scope.addTool = function(key, _newTool, groupIndex, index){ scope.tools[key] = angular.extend(scope.$new(true), taTools[key], defaultChildScope, {name: key}); scope.tools[key].$element = setupToolElement(taTools[key], scope.tools[key]); var group; if(groupIndex === undefined) groupIndex = scope.toolbar.length - 1; group = angular.element(element.children()[groupIndex]); if(index === undefined){ group.append(scope.tools[key].$element); scope.toolbar[groupIndex][scope.toolbar[groupIndex].length - 1] = key; }else{ group.children().eq(index).after(scope.tools[key].$element); scope.toolbar[groupIndex][index] = key; } }; textAngularManager.registerToolbar(scope); scope.$on('$destroy', function(){ textAngularManager.unregisterToolbar(scope.name); }); } }; } ]);})();<|fim▁end|>
if(isSelectableElementTool){ // re-apply the selectable tool events scope['reApplyOnSelectorHandlerstaTextElement' + _serial]();
<|file_name|>products.js<|end_file_name|><|fim▁begin|>define([], function() { Path.map("#!/products").to(function(){ }).enter(function() { require([ 'tpl!template/products.html', 'tpl!template/username.html', 'tpl!template/product-tpl.html', 'bootstrap', 'bootstrapHover', 'utils' ], function(tpl, userTpl, productTpl) { pageStart(tpl, userTpl);<|fim▁hole|> $('.js-products').empty(); if(products != null){ $.each(products, function(index, value){ $('.js-products').append($(productTpl.apply(value))); }); } else{ $('.js-products').append("<h3>No search results</h3>"); } } setTimeout(render, 100); var done = false; $(document).delegate('.js-form-search', 'submit', function(e){ setTimeout(render, 100); }) .delegate('.js-brands span', 'click', function(e){ var $target = $(e.target), search = $target.attr('data-search'), xhr; xhr = $.ajax({ url: 'api/index.php/products', type: 'POST', data: JSON.stringify({ search: search }) }); xhr .done(function(response){ var products = response.data; localStorage.setItem('products', JSON.stringify(products)); setTimeout(render, 100); }).fail(function(jqXHR, status, error){ var response = JSON.parse(jqXHR.responseText); localStorage.setItem('products', null); }) .always(function(response){ }); }) .delegate('.js-flavors a', 'click', function(e){ var $target = $(e.currentTarget), filter = $target[0].text.toLowerCase(), $ele = $('[data-flavor='+filter+']'); if(filter == 'clear'){ $('.js-product').parent().fadeIn().removeClass('hidden'); }else{ $('.js-product').parent().fadeOut(function(){ $(this).addClass('hidden'); $ele.parent().fadeIn().removeClass('hidden'); }); if(!done){ $ele.parent( ":hidden" ).fadeIn().removeClass('hidden'); done = true; } } }); }); }).exit(function() { // Exit from route $('#main').off().empty(); }); });<|fim▁end|>
function render(){ var products = JSON.parse(localStorage.getItem('products'));
<|file_name|>base.js<|end_file_name|><|fim▁begin|>'use strict';exports.__esModule = true;var _stringify = require('babel-runtime/core-js/json/stringify');var _stringify2 = _interopRequireDefault(_stringify);var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck');var _classCallCheck3 = _interopRequireDefault(_classCallCheck2);var _possibleConstructorReturn2 = require('babel-runtime/helpers/possibleConstructorReturn');var _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2);var _inherits2 = require('babel-runtime/helpers/inherits');var _inherits3 = _interopRequireDefault(_inherits2);function _interopRequireDefault(obj) {return obj && obj.__esModule ? obj : { default: obj };}var _class = function (_think$controller$bas) {(0, _inherits3.default)(_class, _think$controller$bas);function _class() {(0, _classCallCheck3.default)(this, _class);return (0, _possibleConstructorReturn3.default)(this, _think$controller$bas.apply(this, arguments));}<|fim▁hole|> * some base method in here */_class.prototype. get = function get(key) { if (key == undefined) { return this.http._get; } return this.http._get[key]; };_class.prototype. post = function post(key) { if (key == undefined) { return this.http._post; } return this.http._post[key]; };_class.prototype. getCookie = function getCookie(key) { if (key == undefined) { return ''; } return this.http._cookie; };_class.prototype. setCookie = function setCookie(key, val) { if (typeof val !== 'string') { val = (0, _stringify2.default)(val); } return this.http._cookie[key] = val; };_class.prototype. apiErrorHandle = function apiErrorHandle(errno) { var API_ERROR_MSG_TABLE = { // user '101': '用户未登录', '102': '用户密码错误', '111': '密码不一致', // category '3000': 'Category name is empty' }; var msg = API_ERROR_MSG_TABLE[errno] || 'system error'; console.log(msg); this.fail(errno, msg); };return _class;}(think.controller.base);exports.default = _class;<|fim▁end|>
/**
<|file_name|>client.go<|end_file_name|><|fim▁begin|>package docker_helpers import "github.com/fsouza/go-dockerclient" type Client interface { InspectImage(name string) (*docker.Image, error) PullImage(opts docker.PullImageOptions, auth docker.AuthConfiguration) error ImportImage(opts docker.ImportImageOptions) error CreateContainer(opts docker.CreateContainerOptions) (*docker.Container, error) StartContainer(id string, hostConfig *docker.HostConfig) error WaitContainer(id string) (int, error) KillContainer(opts docker.KillContainerOptions) error InspectContainer(id string) (*docker.Container, error) AttachToContainer(opts docker.AttachToContainerOptions) error<|fim▁hole|> Logs(opts docker.LogsOptions) error Info() (*docker.Env, error) }<|fim▁end|>
RemoveContainer(opts docker.RemoveContainerOptions) error
<|file_name|>boxed_value.hpp<|end_file_name|><|fim▁begin|>// This file is distributed under the BSD License. // See "license.txt" for details. // Copyright 2009-2012, Jonathan Turner ([email protected]) // Copyright 2009-2015, Jason Turner ([email protected]) // http://www.chaiscript.com #ifndef CHAISCRIPT_BOXED_VALUE_HPP_ #define CHAISCRIPT_BOXED_VALUE_HPP_ #include <functional> #include <map> #include <memory> #include <type_traits> #include "../chaiscript_threading.hpp" #include "../chaiscript_defines.hpp" #include "any.hpp" #include "type_info.hpp" namespace chaiscript { /// \brief A wrapper for holding any valid C++ type. All types in ChaiScript are Boxed_Value objects /// \sa chaiscript::boxed_cast class Boxed_Value { public: /// used for explicitly creating a "void" object struct Void_Type { }; private: /// structure which holds the internal state of a Boxed_Value /// \todo Get rid of Any and merge it with this, reducing an allocation in the process struct Data { Data(const Type_Info &ti, chaiscript::detail::Any to, bool tr, const void *t_void_ptr) : m_type_info(ti), m_obj(std::move(to)), m_data_ptr(ti.is_const()?nullptr:const_cast<void *>(t_void_ptr)), m_const_data_ptr(t_void_ptr), m_is_ref(tr) { } Data &operator=(const Data &rhs) { m_type_info = rhs.m_type_info;<|fim▁hole|> m_obj = rhs.m_obj; m_is_ref = rhs.m_is_ref; m_data_ptr = rhs.m_data_ptr; m_const_data_ptr = rhs.m_const_data_ptr; if (rhs.m_attrs) { m_attrs = std::unique_ptr<std::map<std::string, Boxed_Value>>(new std::map<std::string, Boxed_Value>(*rhs.m_attrs)); } return *this; } Data(const Data &) = delete; #if !defined(__APPLE__) && (!defined(_MSC_VER) || _MSC_VER != 1800) Data(Data &&) = default; Data &operator=(Data &&rhs) = default; #endif Type_Info m_type_info; chaiscript::detail::Any m_obj; void *m_data_ptr; const void *m_const_data_ptr; std::unique_ptr<std::map<std::string, Boxed_Value>> m_attrs; bool m_is_ref; }; struct Object_Data { static std::shared_ptr<Data> get(Boxed_Value::Void_Type) { return std::make_shared<Data>( detail::Get_Type_Info<void>::get(), chaiscript::detail::Any(), false, nullptr) ; } template<typename T> static std::shared_ptr<Data> get(const std::shared_ptr<T> *obj) { return get(*obj); } template<typename T> static std::shared_ptr<Data> get(const std::shared_ptr<T> &obj) { return std::make_shared<Data>( detail::Get_Type_Info<T>::get(), chaiscript::detail::Any(obj), false, obj.get() ); } template<typename T> static std::shared_ptr<Data> get(std::shared_ptr<T> &&obj) { auto ptr = obj.get(); return std::make_shared<Data>( detail::Get_Type_Info<T>::get(), chaiscript::detail::Any(std::move(obj)), false, ptr ); } template<typename T> static std::shared_ptr<Data> get(T *t) { return get(std::ref(*t)); } template<typename T> static std::shared_ptr<Data> get(const T *t) { return get(std::cref(*t)); } template<typename T> static std::shared_ptr<Data> get(std::reference_wrapper<T> obj) { auto p = &obj.get(); return std::make_shared<Data>( detail::Get_Type_Info<T>::get(), chaiscript::detail::Any(std::move(obj)), true, p ); } template<typename T> static std::shared_ptr<Data> get(T t) { auto p = std::make_shared<T>(std::move(t)); auto ptr = p.get(); return std::make_shared<Data>( detail::Get_Type_Info<T>::get(), chaiscript::detail::Any(std::move(p)), false, ptr ); } static std::shared_ptr<Data> get() { return std::make_shared<Data>( Type_Info(), chaiscript::detail::Any(), false, nullptr ); } }; public: /// Basic Boxed_Value constructor template<typename T, typename = typename std::enable_if<!std::is_same<Boxed_Value, typename std::decay<T>::type>::value>::type> explicit Boxed_Value(T &&t) : m_data(Object_Data::get(std::forward<T>(t))) { } /// Unknown-type constructor Boxed_Value() : m_data(Object_Data::get()) { } #if !defined(_MSC_VER) || _MSC_VER != 1800 Boxed_Value(Boxed_Value&&) = default; Boxed_Value& operator=(Boxed_Value&&) = default; #endif Boxed_Value(const Boxed_Value&) = default; Boxed_Value& operator=(const Boxed_Value&) = default; void swap(Boxed_Value &rhs) { std::swap(m_data, rhs.m_data); } /// Copy the values stored in rhs.m_data to m_data. /// m_data pointers are not shared in this case Boxed_Value assign(const Boxed_Value &rhs) { (*m_data) = (*rhs.m_data); return *this; } const Type_Info &get_type_info() const CHAISCRIPT_NOEXCEPT { return m_data->m_type_info; } /// return true if the object is uninitialized bool is_undef() const CHAISCRIPT_NOEXCEPT { return m_data->m_type_info.is_undef(); } bool is_const() const CHAISCRIPT_NOEXCEPT { return m_data->m_type_info.is_const(); } bool is_type(const Type_Info &ti) const CHAISCRIPT_NOEXCEPT { return m_data->m_type_info.bare_equal(ti); } bool is_null() const CHAISCRIPT_NOEXCEPT { return (m_data->m_data_ptr == nullptr && m_data->m_const_data_ptr == nullptr); } const chaiscript::detail::Any & get() const CHAISCRIPT_NOEXCEPT { return m_data->m_obj; } bool is_ref() const CHAISCRIPT_NOEXCEPT { return m_data->m_is_ref; } bool is_pointer() const CHAISCRIPT_NOEXCEPT { return !is_ref(); } void *get_ptr() const CHAISCRIPT_NOEXCEPT { return m_data->m_data_ptr; } const void *get_const_ptr() const CHAISCRIPT_NOEXCEPT { return m_data->m_const_data_ptr; } Boxed_Value get_attr(const std::string &t_name) { if (!m_data->m_attrs) { m_data->m_attrs = std::unique_ptr<std::map<std::string, Boxed_Value>>(new std::map<std::string, Boxed_Value>()); } return (*m_data->m_attrs)[t_name]; } Boxed_Value &copy_attrs(const Boxed_Value &t_obj) { if (t_obj.m_data->m_attrs) { m_data->m_attrs = std::unique_ptr<std::map<std::string, Boxed_Value>>(new std::map<std::string, Boxed_Value>(*t_obj.m_data->m_attrs)); } return *this; } /// \returns true if the two Boxed_Values share the same internal type static bool type_match(const Boxed_Value &l, const Boxed_Value &r) CHAISCRIPT_NOEXCEPT { return l.get_type_info() == r.get_type_info(); } private: std::shared_ptr<Data> m_data; }; /// @brief Creates a Boxed_Value. If the object passed in is a value type, it is copied. If it is a pointer, std::shared_ptr, or std::reference_type /// a copy is not made. /// @param t The value to box /// /// Example: /// /// ~~~{.cpp} /// int i; /// chaiscript::ChaiScript chai; /// chai.add(chaiscript::var(i), "i"); /// chai.add(chaiscript::var(&i), "ip"); /// ~~~ /// /// @sa @ref adding_objects template<typename T> Boxed_Value var(T t) { return Boxed_Value(t); } namespace detail { /// \brief Takes a value, copies it and returns a Boxed_Value object that is immutable /// \param[in] t Value to copy and make const /// \returns Immutable Boxed_Value /// \sa Boxed_Value::is_const template<typename T> Boxed_Value const_var_impl(const T &t) { return Boxed_Value(std::make_shared<typename std::add_const<T>::type >(t)); } /// \brief Takes a pointer to a value, adds const to the pointed to type and returns an immutable Boxed_Value. /// Does not copy the pointed to value. /// \param[in] t Pointer to make immutable /// \returns Immutable Boxed_Value /// \sa Boxed_Value::is_const template<typename T> Boxed_Value const_var_impl(T *t) { return Boxed_Value( const_cast<typename std::add_const<T>::type *>(t) ); } /// \brief Takes a std::shared_ptr to a value, adds const to the pointed to type and returns an immutable Boxed_Value. /// Does not copy the pointed to value. /// \param[in] t Pointer to make immutable /// \returns Immutable Boxed_Value /// \sa Boxed_Value::is_const template<typename T> Boxed_Value const_var_impl(const std::shared_ptr<T> &t) { return Boxed_Value( std::const_pointer_cast<typename std::add_const<T>::type>(t) ); } /// \brief Takes a std::reference_wrapper value, adds const to the referenced type and returns an immutable Boxed_Value. /// Does not copy the referenced value. /// \param[in] t Reference object to make immutable /// \returns Immutable Boxed_Value /// \sa Boxed_Value::is_const template<typename T> Boxed_Value const_var_impl(const std::reference_wrapper<T> &t) { return Boxed_Value( std::cref(t.get()) ); } } /// \brief Takes an object and returns an immutable Boxed_Value. If the object is a std::reference or pointer type /// the value is not copied. If it is an object type, it is copied. /// \param[in] t Object to make immutable /// \returns Immutable Boxed_Value /// \sa chaiscript::Boxed_Value::is_const /// \sa chaiscript::var /// /// Example: /// \code /// enum Colors /// { /// Blue, /// Green, /// Red /// }; /// chaiscript::ChaiScript chai /// chai.add(chaiscript::const_var(Blue), "Blue"); // add immutable constant /// chai.add(chaiscript::const_var(Red), "Red"); /// chai.add(chaiscript::const_var(Green), "Green"); /// \endcode /// /// \todo support C++11 strongly typed enums /// \sa \ref adding_objects template<typename T> Boxed_Value const_var(const T &t) { return detail::const_var_impl(t); } } #endif<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! //! Core library entry point. //! extern crate rustc_serialize; extern crate byteorder; extern crate bincode; extern crate flate2; extern crate rust_htslib as htslib; use std::fs; use std::io::ErrorKind; use std::process::exit; // generic functions<|fim▁hole|> let ref_filename = filename.as_ref(); match fs::metadata(ref_filename) { Ok(meta) => meta.is_file(), Err(err) => match err.kind() { ErrorKind::NotFound => false, _ => { println!("Failed to open file {}: {:}", ref_filename.to_string_lossy(), err); exit(-1) }, }, } } // private modules mod randfile; // public modules pub mod tallyrun; pub mod tallyread; pub mod seqtable; pub mod fasta; pub mod filter; pub mod counts; pub mod bigwig; pub mod scale; pub mod outputfile; // C API pub mod c_api; pub use c_api::*; use std::path::Path;<|fim▁end|>
pub fn file_exists<P: AsRef<Path>>(filename: P) -> bool {
<|file_name|>node-test.js<|end_file_name|><|fim▁begin|>/*eslint-env mocha*/ /* * mochify.js * * Copyright (c) 2014 Maximilian Antoni <[email protected]> * * @license MIT */ 'use strict'; var assert = require('assert'); var fs = require('fs'); var run = require('./fixture/run'); var sandbox = require('./fixture/sandbox'); describe('node', function () { it('passes', function (done) { run('passes', ['--node', '-R', 'tap'], function (code, stdout) { assert.equal(stdout, '# node:\n' + 'ok 1 test passes\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); assert.equal(code, 0); done(); }); }); it('fails and continues if `it` throws', function (done) { run('fails', ['--node', '-R', 'tap'], function (code, stdout) { assert.equal(code, 1); var lines = stdout.trim().split(/\n+/); assert.equal(lines[0], '# node:'); assert.equal(lines[1], 'not ok 1 test fails synchronously'); assert.equal(lines[3], ' Error: Oh noes!'); var p = lines.indexOf('not ok 2 test fails asynchronously'); assert.notEqual(p, -1); assert.equal(lines[p + 2], ' Error: Oh noes!'); p = lines.indexOf('ok 3 test passes synchronously', p + 2); assert.notEqual(p, -1); assert.equal(lines[p + 1], 'ok 4 test passes asynchronously'); assert.equal(lines[p + 2], '# tests 4'); assert.equal(lines[p + 3], '# pass 2'); assert.equal(lines[p + 4], '# fail 2'); assert.equal(lines[p + 5], '1..4'); done(); }); }); it('fails and exits if `describe` throws', function (done) { run('describe-throws', ['--node', '-R', 'tap'], function (code, stdout) { assert.equal(stdout.indexOf('# node:'), 0); assert.equal(stdout.indexOf('i should not show up'), -1); assert.equal(code, 1); done(); }); }); it('coverage tap', function (done) { run('passes', ['--node', '--cover', '-R', 'tap'], function (code, stdout, stderr) { assert.equal(stdout, '# node:\n' + 'ok 1 test passes\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); assert.equal(stderr, '# coverage: 8/8 (100.00 %)\n\n'); assert.equal(code, 0); done(); }); }); it('coverage dot', function (done) { run('passes', ['--node', '--cover', '--no-colors', '-R', 'dot'], function (code, stdout, stderr) { var lines = stdout.trim().split(/\n+/); assert.equal(lines[0], '# node:'); assert.equal(lines[1], ' .'); assert.equal(stderr, '# coverage: 8/8 (100.00 %)\n\n'); assert.equal(code, 0); done(); }); }); it('fails if test fails but coverage is fine', function (done) { run('fails', ['--node', '--cover', '-R', 'tap'], function (code) { assert.equal(code, 1); done(); }); }); it('fails cover', function (done) { run('fails-cover', ['--node', '--cover', '-R', 'tap'], function (code, stdout, stderr) { assert.equal(stdout, '# node:\n' + 'ok 1 test does not cover\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); var coverOut = '\n# coverage: 9/10 (90.00 %)\n\nError: Exit 1\n\n'; assert.equal(stderr.substring(stderr.length - coverOut.length), coverOut); assert.equal(code, 1); done(); }); }); it('times out', function (done) { run('timeout', ['--node', '-R', 'tap', '--timeout', '10'], function (code, stdout) { assert.equal(stdout.indexOf('# node:\n' + 'not ok 1 test times out\n'), 0); assert.equal(code, 1); done(); }); }); it('uses tdd ui', function (done) { run('ui-tdd', ['--node', '-R', 'tap', '--ui', 'tdd'], function (code, stdout) { assert.equal(stdout, '# node:\n' + 'ok 1 test passes\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); assert.equal(code, 0); done(); }); }); it('enables color', function (done) { run('passes', ['--node', '-R', 'dot', '--colors'], function (code, stdout) { assert.equal(stdout.trim().split('\n')[3], ' \u001b[90m.\u001b[0m'); assert.equal(code, 0); done(); }); }); it('passes transform to browserify', function (done) { run('passes', ['--node', '-R', 'tap', '--transform', '../transform.js'], function (code, stdout) { var lines = stdout.split('\n'); assert.equal(lines[0], 'passes/test/passes.js'); assert.equal(code, 0); done(); });<|fim▁hole|> it('passes transform with options to browserify', function (done) { run('passes', ['--node', '-R', 'tap', '--transform', '[', '../transform.js', '-x', ']'], function (code, stdout) { var lines = stdout.split('\n'); assert(JSON.parse(lines[1]).x); assert.equal(code, 0); done(); }); }); it('passes multiple transforms to browserify', function (done) { run('passes', ['--node', '-R', 'tap', '--transform', '../transform.js', '--transform', '../transform.js'], function (code, stdout) { var lines = stdout.split('\n'); assert.equal(lines[0], 'passes/test/passes.js'); assert.equal(lines[2], 'passes/test/passes.js'); assert.equal(code, 0); done(); }); }); it('passes plugin to browserify', function (done) { run('passes', ['--node', '-R', 'tap', '--plugin', '../plugin.js'], function (code, stdout) { var lines = stdout.split('\n'); assert.equal(lines[0], 'passes/test/passes.js'); assert.equal(code, 0); done(); }); }); it('passes plugin with options to browserify', function (done) { run('passes', ['--node', '-R', 'tap', '--plugin', '[', '../plugin.js', '-x', ']'], function (code, stdout) { var lines = stdout.split('\n'); assert(JSON.parse(lines[1]).x); assert.equal(code, 0); done(); }); }); it('passes multiple plugins to browserify', function (done) { run('passes', ['--node', '-R', 'tap', '--plugin', '../plugin.js', '--plugin', '../plugin.js'], function (code, stdout) { var lines = stdout.split('\n'); assert.equal(lines[0], 'passes/test/passes.js'); assert.equal(lines[2], 'passes/test/passes.js'); assert.equal(code, 0); done(); }); }); it('requires file', function (done) { run('require', ['--node', '-R', 'tap', '-r', '../required'], function (code, stdout) { var lines = stdout.split('\n'); assert.equal(lines[1], 'required'); assert.equal(code, 0); done(); }); }); it('passes extension to browserify', function (done) { run('extension', ['--node', '-R', 'tap', '--extension', '.coffee'], function (code, stdout) { var lines = stdout.split('\n'); assert.equal(lines[1], 'coffeescript'); assert.equal(code, 0); done(); }); }); it('passes multiple extensions to browserify', function (done) { run('extension-multiple', ['--node', '-R', 'tap', '--extension', '.coffee', '--extension', '.ts'], function (code, stdout) { var lines = stdout.split('\n'); assert.equal(lines[1], 'coffeescript'); assert.equal(lines[2], 'typescript'); assert.equal(code, 0); done(); }); }); it('passes recursive', function (done) { run('recursive', ['--node', '-R', 'tap', '--recursive'], function (code, stdout) { assert.equal(stdout, '# node:\n' + 'ok 1 recursive passes\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); assert.equal(code, 0); done(); }); }); it('passes non-default recursive', function (done) { run('recursive', ['--node', '-R', 'tap', '--recursive', 'other'], function (code, stdout) { assert.equal(stdout, '# node:\n' + 'ok 1 other recursive passes\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); assert.equal(code, 0); done(); }); }); it('passes non-default recursive with trailing /*.js', function (done) { run('recursive', ['--node', '-R', 'tap', '--recursive', 'other/*.js'], function (code, stdout) { assert.equal(stdout, '# node:\n' + 'ok 1 other recursive passes\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); assert.equal(code, 0); done(); }); }); it('passes browser-field', function (done) { run('browser-field', ['--node', '-R', 'tap'], function (code, stdout) { assert.equal(stdout, '# node:\n' + 'ok 1 browser-field passes in browser\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); assert.equal(code, 0); done(); }); }); it('fails browser-field with --browser-field disabled', function (done) { run('browser-field', ['--node', '-R', 'tap', '--no-browser-field'], function (code, stdout) { assert.equal(stdout.indexOf('# node:\n' + 'not ok 1 browser-field passes in browser\n' + ' Error'), 0); assert.equal(code, 1); done(); }); }); // This test case passes on node 6 but fails on node 8 and 10. The // corresponding chromium test also passes. it.skip('shows unicode diff', function (done) { run('unicode', ['--node', '-R', 'tap'], function (code, stdout) { assert.equal(stdout.indexOf('# node:\n' + 'not ok 1 unicode prints diff\n' + ' AssertionError: \'€\' == \'3\''), 0); assert.equal(code, 1); done(); }); }); it('fails external', function (done) { run('external', ['--node', '-R', 'tap'], function (code, stdout, stderr) { console.log(stderr); assert.notEqual( stderr.indexOf('Cannot find module \'unresolvable\''), -1); assert.equal(code, 1); done(); }); }); it('passes external with --external enabled', function (done) { run('external', ['--node', '-R', 'tap', '--external', 'unresolvable'], function (code, stdout) { assert.equal(stdout, '# node:\n' + 'ok 1 test external\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); assert.equal(code, 0); done(); }); }); it('supports --outfile', sandbox(function (done, tmpdir) { var outfile = tmpdir + '/report.txt'; run('passes', ['--node', '-R', 'tap', '--outfile', outfile], function (code, stdout) { assert.equal(code, 0); assert.equal(stdout, ''); assert.equal(fs.readFileSync(outfile, 'utf8'), '# node:\n' + 'ok 1 test passes\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); done(); }); })); it('supports --mocha-path', sandbox(function (done) { var mochaPath = './node_modules/mocha'; run('passes', ['--node', '-R', 'tap', '--mocha-path', mochaPath], function (code, stdout) { assert.equal(stdout, '# node:\n' + 'ok 1 test passes\n' + '# tests 1\n' + '# pass 1\n' + '# fail 0\n' + '1..1\n'); assert.equal(code, 0); done(); }); })); });<|fim▁end|>
});
<|file_name|>1_twoSum.py<|end_file_name|><|fim▁begin|>class Solution(object): def twoSum(self, nums, target): lookup = {} for i, num in enumerate(nums):<|fim▁hole|> if target - num in lookup: return [lookup[target - num], i] lookup[num] = i return [] if __name__ == '__main__': print Solution().twoSum((0, 2, 7, 11, 15), 9)<|fim▁end|>
<|file_name|>fake_service_builder.go<|end_file_name|><|fim▁begin|>// This file was generated by counterfeiter package fakes import ( . "github.com/cloudfoundry/cli/cf/actors/service_builder" "github.com/cloudfoundry/cli/cf/models" "sync" ) type FakeServiceBuilder struct { GetAllServicesStub func() ([]models.ServiceOffering, error) getAllServicesMutex sync.RWMutex getAllServicesArgsForCall []struct{} getAllServicesReturns struct { result1 []models.ServiceOffering result2 error } GetAllServicesWithPlansStub func() ([]models.ServiceOffering, error) getAllServicesWithPlansMutex sync.RWMutex getAllServicesWithPlansArgsForCall []struct{} getAllServicesWithPlansReturns struct { result1 []models.ServiceOffering result2 error } GetServiceByNameWithPlansStub func(string) (models.ServiceOffering, error) getServiceByNameWithPlansMutex sync.RWMutex getServiceByNameWithPlansArgsForCall []struct { arg1 string } getServiceByNameWithPlansReturns struct { result1 models.ServiceOffering result2 error } GetServiceByNameWithPlansWithOrgNamesStub func(string) (models.ServiceOffering, error) getServiceByNameWithPlansWithOrgNamesMutex sync.RWMutex getServiceByNameWithPlansWithOrgNamesArgsForCall []struct { arg1 string } getServiceByNameWithPlansWithOrgNamesReturns struct { result1 models.ServiceOffering result2 error } GetServiceByNameForSpaceStub func(string, string) (models.ServiceOffering, error) getServiceByNameForSpaceMutex sync.RWMutex getServiceByNameForSpaceArgsForCall []struct { arg1 string arg2 string } getServiceByNameForSpaceReturns struct { result1 models.ServiceOffering result2 error } GetServiceByNameForSpaceWithPlansStub func(string, string) (models.ServiceOffering, error) getServiceByNameForSpaceWithPlansMutex sync.RWMutex getServiceByNameForSpaceWithPlansArgsForCall []struct { arg1 string arg2 string } getServiceByNameForSpaceWithPlansReturns struct { result1 models.ServiceOffering result2 error } GetServicesByNameForSpaceWithPlansStub func(string, string) (models.ServiceOfferings, error) getServicesByNameForSpaceWithPlansMutex sync.RWMutex getServicesByNameForSpaceWithPlansArgsForCall []struct { arg1 string arg2 string } getServicesByNameForSpaceWithPlansReturns struct { result1 models.ServiceOfferings result2 error } GetServiceByNameForOrgStub func(string, string) (models.ServiceOffering, error) getServiceByNameForOrgMutex sync.RWMutex getServiceByNameForOrgArgsForCall []struct { arg1 string arg2 string } getServiceByNameForOrgReturns struct { result1 models.ServiceOffering result2 error } GetServicesForBrokerStub func(string) ([]models.ServiceOffering, error) getServicesForBrokerMutex sync.RWMutex getServicesForBrokerArgsForCall []struct { arg1 string } getServicesForBrokerReturns struct { result1 []models.ServiceOffering result2 error } GetServicesForSpaceStub func(string) ([]models.ServiceOffering, error) getServicesForSpaceMutex sync.RWMutex getServicesForSpaceArgsForCall []struct { arg1 string } getServicesForSpaceReturns struct { result1 []models.ServiceOffering result2 error } GetServicesForSpaceWithPlansStub func(string) ([]models.ServiceOffering, error) getServicesForSpaceWithPlansMutex sync.RWMutex getServicesForSpaceWithPlansArgsForCall []struct { arg1 string } getServicesForSpaceWithPlansReturns struct { result1 []models.ServiceOffering result2 error } GetServiceVisibleToOrgStub func(string, string) (models.ServiceOffering, error) getServiceVisibleToOrgMutex sync.RWMutex getServiceVisibleToOrgArgsForCall []struct { arg1 string arg2 string } getServiceVisibleToOrgReturns struct { result1 models.ServiceOffering result2 error } GetServicesVisibleToOrgStub func(string) ([]models.ServiceOffering, error) getServicesVisibleToOrgMutex sync.RWMutex getServicesVisibleToOrgArgsForCall []struct { arg1 string } getServicesVisibleToOrgReturns struct { result1 []models.ServiceOffering result2 error } } func (fake *FakeServiceBuilder) GetAllServices() ([]models.ServiceOffering, error) { fake.getAllServicesMutex.Lock() defer fake.getAllServicesMutex.Unlock() fake.getAllServicesArgsForCall = append(fake.getAllServicesArgsForCall, struct{}{}) if fake.GetAllServicesStub != nil { return fake.GetAllServicesStub() } else { return fake.getAllServicesReturns.result1, fake.getAllServicesReturns.result2 } } func (fake *FakeServiceBuilder) GetAllServicesCallCount() int { fake.getAllServicesMutex.RLock() defer fake.getAllServicesMutex.RUnlock() return len(fake.getAllServicesArgsForCall) } func (fake *FakeServiceBuilder) GetAllServicesReturns(result1 []models.ServiceOffering, result2 error) { fake.getAllServicesReturns = struct { result1 []models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetAllServicesWithPlans() ([]models.ServiceOffering, error) { fake.getAllServicesWithPlansMutex.Lock() defer fake.getAllServicesWithPlansMutex.Unlock() fake.getAllServicesWithPlansArgsForCall = append(fake.getAllServicesWithPlansArgsForCall, struct{}{}) if fake.GetAllServicesWithPlansStub != nil { return fake.GetAllServicesWithPlansStub() } else { return fake.getAllServicesWithPlansReturns.result1, fake.getAllServicesWithPlansReturns.result2 } } func (fake *FakeServiceBuilder) GetAllServicesWithPlansCallCount() int { fake.getAllServicesWithPlansMutex.RLock() defer fake.getAllServicesWithPlansMutex.RUnlock() return len(fake.getAllServicesWithPlansArgsForCall) } func (fake *FakeServiceBuilder) GetAllServicesWithPlansReturns(result1 []models.ServiceOffering, result2 error) { fake.getAllServicesWithPlansReturns = struct { result1 []models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServiceByNameWithPlans(arg1 string) (models.ServiceOffering, error) { fake.getServiceByNameWithPlansMutex.Lock() defer fake.getServiceByNameWithPlansMutex.Unlock() fake.getServiceByNameWithPlansArgsForCall = append(fake.getServiceByNameWithPlansArgsForCall, struct { arg1 string }{arg1}) if fake.GetServiceByNameWithPlansStub != nil { return fake.GetServiceByNameWithPlansStub(arg1) } else { return fake.getServiceByNameWithPlansReturns.result1, fake.getServiceByNameWithPlansReturns.result2 } } func (fake *FakeServiceBuilder) GetServiceByNameWithPlansCallCount() int { fake.getServiceByNameWithPlansMutex.RLock() defer fake.getServiceByNameWithPlansMutex.RUnlock() return len(fake.getServiceByNameWithPlansArgsForCall) } func (fake *FakeServiceBuilder) GetServiceByNameWithPlansArgsForCall(i int) string { fake.getServiceByNameWithPlansMutex.RLock() defer fake.getServiceByNameWithPlansMutex.RUnlock() return fake.getServiceByNameWithPlansArgsForCall[i].arg1 } func (fake *FakeServiceBuilder) GetServiceByNameWithPlansReturns(result1 models.ServiceOffering, result2 error) { fake.getServiceByNameWithPlansReturns = struct { result1 models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServiceByNameWithPlansWithOrgNames(arg1 string) (models.ServiceOffering, error) { fake.getServiceByNameWithPlansWithOrgNamesMutex.Lock() defer fake.getServiceByNameWithPlansWithOrgNamesMutex.Unlock() fake.getServiceByNameWithPlansWithOrgNamesArgsForCall = append(fake.getServiceByNameWithPlansWithOrgNamesArgsForCall, struct { arg1 string }{arg1}) if fake.GetServiceByNameWithPlansWithOrgNamesStub != nil { return fake.GetServiceByNameWithPlansWithOrgNamesStub(arg1) } else { return fake.getServiceByNameWithPlansWithOrgNamesReturns.result1, fake.getServiceByNameWithPlansWithOrgNamesReturns.result2 } } func (fake *FakeServiceBuilder) GetServiceByNameWithPlansWithOrgNamesCallCount() int { fake.getServiceByNameWithPlansWithOrgNamesMutex.RLock() defer fake.getServiceByNameWithPlansWithOrgNamesMutex.RUnlock() return len(fake.getServiceByNameWithPlansWithOrgNamesArgsForCall) } func (fake *FakeServiceBuilder) GetServiceByNameWithPlansWithOrgNamesArgsForCall(i int) string { fake.getServiceByNameWithPlansWithOrgNamesMutex.RLock() defer fake.getServiceByNameWithPlansWithOrgNamesMutex.RUnlock() return fake.getServiceByNameWithPlansWithOrgNamesArgsForCall[i].arg1 } func (fake *FakeServiceBuilder) GetServiceByNameWithPlansWithOrgNamesReturns(result1 models.ServiceOffering, result2 error) { fake.getServiceByNameWithPlansWithOrgNamesReturns = struct { result1 models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServiceByNameForSpace(arg1 string, arg2 string) (models.ServiceOffering, error) { fake.getServiceByNameForSpaceMutex.Lock() defer fake.getServiceByNameForSpaceMutex.Unlock() fake.getServiceByNameForSpaceArgsForCall = append(fake.getServiceByNameForSpaceArgsForCall, struct { arg1 string arg2 string }{arg1, arg2}) if fake.GetServiceByNameForSpaceStub != nil { return fake.GetServiceByNameForSpaceStub(arg1, arg2) } else { return fake.getServiceByNameForSpaceReturns.result1, fake.getServiceByNameForSpaceReturns.result2 } } func (fake *FakeServiceBuilder) GetServiceByNameForSpaceCallCount() int { fake.getServiceByNameForSpaceMutex.RLock() defer fake.getServiceByNameForSpaceMutex.RUnlock() return len(fake.getServiceByNameForSpaceArgsForCall) } func (fake *FakeServiceBuilder) GetServiceByNameForSpaceArgsForCall(i int) (string, string) { fake.getServiceByNameForSpaceMutex.RLock() defer fake.getServiceByNameForSpaceMutex.RUnlock() return fake.getServiceByNameForSpaceArgsForCall[i].arg1, fake.getServiceByNameForSpaceArgsForCall[i].arg2 } func (fake *FakeServiceBuilder) GetServiceByNameForSpaceReturns(result1 models.ServiceOffering, result2 error) { fake.getServiceByNameForSpaceReturns = struct { result1 models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServiceByNameForSpaceWithPlans(arg1 string, arg2 string) (models.ServiceOffering, error) { fake.getServiceByNameForSpaceWithPlansMutex.Lock() defer fake.getServiceByNameForSpaceWithPlansMutex.Unlock() fake.getServiceByNameForSpaceWithPlansArgsForCall = append(fake.getServiceByNameForSpaceWithPlansArgsForCall, struct { arg1 string arg2 string }{arg1, arg2}) if fake.GetServiceByNameForSpaceWithPlansStub != nil { return fake.GetServiceByNameForSpaceWithPlansStub(arg1, arg2) } else { return fake.getServiceByNameForSpaceWithPlansReturns.result1, fake.getServiceByNameForSpaceWithPlansReturns.result2 } } func (fake *FakeServiceBuilder) GetServiceByNameForSpaceWithPlansCallCount() int { fake.getServiceByNameForSpaceWithPlansMutex.RLock() defer fake.getServiceByNameForSpaceWithPlansMutex.RUnlock() return len(fake.getServiceByNameForSpaceWithPlansArgsForCall) } func (fake *FakeServiceBuilder) GetServiceByNameForSpaceWithPlansArgsForCall(i int) (string, string) { fake.getServiceByNameForSpaceWithPlansMutex.RLock() defer fake.getServiceByNameForSpaceWithPlansMutex.RUnlock() return fake.getServiceByNameForSpaceWithPlansArgsForCall[i].arg1, fake.getServiceByNameForSpaceWithPlansArgsForCall[i].arg2 } func (fake *FakeServiceBuilder) GetServiceByNameForSpaceWithPlansReturns(result1 models.ServiceOffering, result2 error) { fake.getServiceByNameForSpaceWithPlansReturns = struct { result1 models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServicesByNameForSpaceWithPlans(arg1 string, arg2 string) (models.ServiceOfferings, error) { fake.getServicesByNameForSpaceWithPlansMutex.Lock() defer fake.getServicesByNameForSpaceWithPlansMutex.Unlock() fake.getServicesByNameForSpaceWithPlansArgsForCall = append(fake.getServicesByNameForSpaceWithPlansArgsForCall, struct { arg1 string arg2 string }{arg1, arg2}) if fake.GetServicesByNameForSpaceWithPlansStub != nil { return fake.GetServicesByNameForSpaceWithPlansStub(arg1, arg2) } else { return fake.getServicesByNameForSpaceWithPlansReturns.result1, fake.getServicesByNameForSpaceWithPlansReturns.result2 } } func (fake *FakeServiceBuilder) GetServicesByNameForSpaceWithPlansCallCount() int { fake.getServicesByNameForSpaceWithPlansMutex.RLock() defer fake.getServicesByNameForSpaceWithPlansMutex.RUnlock() return len(fake.getServicesByNameForSpaceWithPlansArgsForCall) } func (fake *FakeServiceBuilder) GetServicesByNameForSpaceWithPlansArgsForCall(i int) (string, string) { fake.getServicesByNameForSpaceWithPlansMutex.RLock() defer fake.getServicesByNameForSpaceWithPlansMutex.RUnlock() return fake.getServicesByNameForSpaceWithPlansArgsForCall[i].arg1, fake.getServicesByNameForSpaceWithPlansArgsForCall[i].arg2 } func (fake *FakeServiceBuilder) GetServicesByNameForSpaceWithPlansReturns(result1 models.ServiceOfferings, result2 error) { fake.getServicesByNameForSpaceWithPlansReturns = struct { result1 models.ServiceOfferings result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServiceByNameForOrg(arg1 string, arg2 string) (models.ServiceOffering, error) { fake.getServiceByNameForOrgMutex.Lock() defer fake.getServiceByNameForOrgMutex.Unlock() fake.getServiceByNameForOrgArgsForCall = append(fake.getServiceByNameForOrgArgsForCall, struct { arg1 string arg2 string }{arg1, arg2}) if fake.GetServiceByNameForOrgStub != nil { return fake.GetServiceByNameForOrgStub(arg1, arg2) } else { return fake.getServiceByNameForOrgReturns.result1, fake.getServiceByNameForOrgReturns.result2 } } func (fake *FakeServiceBuilder) GetServiceByNameForOrgCallCount() int { fake.getServiceByNameForOrgMutex.RLock() defer fake.getServiceByNameForOrgMutex.RUnlock() return len(fake.getServiceByNameForOrgArgsForCall) } func (fake *FakeServiceBuilder) GetServiceByNameForOrgArgsForCall(i int) (string, string) { fake.getServiceByNameForOrgMutex.RLock() defer fake.getServiceByNameForOrgMutex.RUnlock() return fake.getServiceByNameForOrgArgsForCall[i].arg1, fake.getServiceByNameForOrgArgsForCall[i].arg2 } func (fake *FakeServiceBuilder) GetServiceByNameForOrgReturns(result1 models.ServiceOffering, result2 error) { fake.getServiceByNameForOrgReturns = struct { result1 models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServicesForBroker(arg1 string) ([]models.ServiceOffering, error) { fake.getServicesForBrokerMutex.Lock() defer fake.getServicesForBrokerMutex.Unlock() fake.getServicesForBrokerArgsForCall = append(fake.getServicesForBrokerArgsForCall, struct { arg1 string }{arg1}) if fake.GetServicesForBrokerStub != nil { return fake.GetServicesForBrokerStub(arg1) } else { return fake.getServicesForBrokerReturns.result1, fake.getServicesForBrokerReturns.result2 } } func (fake *FakeServiceBuilder) GetServicesForBrokerCallCount() int { fake.getServicesForBrokerMutex.RLock() defer fake.getServicesForBrokerMutex.RUnlock() return len(fake.getServicesForBrokerArgsForCall) } func (fake *FakeServiceBuilder) GetServicesForBrokerArgsForCall(i int) string { fake.getServicesForBrokerMutex.RLock() defer fake.getServicesForBrokerMutex.RUnlock() return fake.getServicesForBrokerArgsForCall[i].arg1 } func (fake *FakeServiceBuilder) GetServicesForBrokerReturns(result1 []models.ServiceOffering, result2 error) { fake.getServicesForBrokerReturns = struct { result1 []models.ServiceOffering result2 error }{result1, result2} }<|fim▁hole|> func (fake *FakeServiceBuilder) GetServicesForSpace(arg1 string) ([]models.ServiceOffering, error) { fake.getServicesForSpaceMutex.Lock() defer fake.getServicesForSpaceMutex.Unlock() fake.getServicesForSpaceArgsForCall = append(fake.getServicesForSpaceArgsForCall, struct { arg1 string }{arg1}) if fake.GetServicesForSpaceStub != nil { return fake.GetServicesForSpaceStub(arg1) } else { return fake.getServicesForSpaceReturns.result1, fake.getServicesForSpaceReturns.result2 } } func (fake *FakeServiceBuilder) GetServicesForSpaceCallCount() int { fake.getServicesForSpaceMutex.RLock() defer fake.getServicesForSpaceMutex.RUnlock() return len(fake.getServicesForSpaceArgsForCall) } func (fake *FakeServiceBuilder) GetServicesForSpaceArgsForCall(i int) string { fake.getServicesForSpaceMutex.RLock() defer fake.getServicesForSpaceMutex.RUnlock() return fake.getServicesForSpaceArgsForCall[i].arg1 } func (fake *FakeServiceBuilder) GetServicesForSpaceReturns(result1 []models.ServiceOffering, result2 error) { fake.getServicesForSpaceReturns = struct { result1 []models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServicesForSpaceWithPlans(arg1 string) ([]models.ServiceOffering, error) { fake.getServicesForSpaceWithPlansMutex.Lock() defer fake.getServicesForSpaceWithPlansMutex.Unlock() fake.getServicesForSpaceWithPlansArgsForCall = append(fake.getServicesForSpaceWithPlansArgsForCall, struct { arg1 string }{arg1}) if fake.GetServicesForSpaceWithPlansStub != nil { return fake.GetServicesForSpaceWithPlansStub(arg1) } else { return fake.getServicesForSpaceWithPlansReturns.result1, fake.getServicesForSpaceWithPlansReturns.result2 } } func (fake *FakeServiceBuilder) GetServicesForSpaceWithPlansCallCount() int { fake.getServicesForSpaceWithPlansMutex.RLock() defer fake.getServicesForSpaceWithPlansMutex.RUnlock() return len(fake.getServicesForSpaceWithPlansArgsForCall) } func (fake *FakeServiceBuilder) GetServicesForSpaceWithPlansArgsForCall(i int) string { fake.getServicesForSpaceWithPlansMutex.RLock() defer fake.getServicesForSpaceWithPlansMutex.RUnlock() return fake.getServicesForSpaceWithPlansArgsForCall[i].arg1 } func (fake *FakeServiceBuilder) GetServicesForSpaceWithPlansReturns(result1 []models.ServiceOffering, result2 error) { fake.getServicesForSpaceWithPlansReturns = struct { result1 []models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServiceVisibleToOrg(arg1 string, arg2 string) (models.ServiceOffering, error) { fake.getServiceVisibleToOrgMutex.Lock() defer fake.getServiceVisibleToOrgMutex.Unlock() fake.getServiceVisibleToOrgArgsForCall = append(fake.getServiceVisibleToOrgArgsForCall, struct { arg1 string arg2 string }{arg1, arg2}) if fake.GetServiceVisibleToOrgStub != nil { return fake.GetServiceVisibleToOrgStub(arg1, arg2) } else { return fake.getServiceVisibleToOrgReturns.result1, fake.getServiceVisibleToOrgReturns.result2 } } func (fake *FakeServiceBuilder) GetServiceVisibleToOrgCallCount() int { fake.getServiceVisibleToOrgMutex.RLock() defer fake.getServiceVisibleToOrgMutex.RUnlock() return len(fake.getServiceVisibleToOrgArgsForCall) } func (fake *FakeServiceBuilder) GetServiceVisibleToOrgArgsForCall(i int) (string, string) { fake.getServiceVisibleToOrgMutex.RLock() defer fake.getServiceVisibleToOrgMutex.RUnlock() return fake.getServiceVisibleToOrgArgsForCall[i].arg1, fake.getServiceVisibleToOrgArgsForCall[i].arg2 } func (fake *FakeServiceBuilder) GetServiceVisibleToOrgReturns(result1 models.ServiceOffering, result2 error) { fake.getServiceVisibleToOrgReturns = struct { result1 models.ServiceOffering result2 error }{result1, result2} } func (fake *FakeServiceBuilder) GetServicesVisibleToOrg(arg1 string) ([]models.ServiceOffering, error) { fake.getServicesVisibleToOrgMutex.Lock() defer fake.getServicesVisibleToOrgMutex.Unlock() fake.getServicesVisibleToOrgArgsForCall = append(fake.getServicesVisibleToOrgArgsForCall, struct { arg1 string }{arg1}) if fake.GetServicesVisibleToOrgStub != nil { return fake.GetServicesVisibleToOrgStub(arg1) } else { return fake.getServicesVisibleToOrgReturns.result1, fake.getServicesVisibleToOrgReturns.result2 } } func (fake *FakeServiceBuilder) GetServicesVisibleToOrgCallCount() int { fake.getServicesVisibleToOrgMutex.RLock() defer fake.getServicesVisibleToOrgMutex.RUnlock() return len(fake.getServicesVisibleToOrgArgsForCall) } func (fake *FakeServiceBuilder) GetServicesVisibleToOrgArgsForCall(i int) string { fake.getServicesVisibleToOrgMutex.RLock() defer fake.getServicesVisibleToOrgMutex.RUnlock() return fake.getServicesVisibleToOrgArgsForCall[i].arg1 } func (fake *FakeServiceBuilder) GetServicesVisibleToOrgReturns(result1 []models.ServiceOffering, result2 error) { fake.getServicesVisibleToOrgReturns = struct { result1 []models.ServiceOffering result2 error }{result1, result2} } var _ ServiceBuilder = new(FakeServiceBuilder)<|fim▁end|>
<|file_name|>valid.go<|end_file_name|><|fim▁begin|>// chris 071615 Generation of validator code. package main import ( "fmt" "strings" "unicode" "go/ast" "go/token" "unicode/utf8" ) // XXX There is too much duplication of generated code among the // validators. // validateString writes validator code for a string. func validateString(ctx *generationContext, fieldname string, meta *fieldMetadata) { ctx.addVariable(fmt.Sprintf("field_%s", fieldname), "string") ctx.addVariable("ok", "bool") ctx.write("\tfield_%s, ok = data[\"%s\"]\n", fieldname, fieldname) ctx.write("\tif ok {\n") if meta.max != "" { ctx.addImport("errors") ctx.write("\t\tif len(field_%s) > %s {\n", fieldname, meta.max) ctx.write("\t\t\treturn nil, errors.New(\"%s can have a length of at most %s\")\n", fieldname, meta.max) ctx.write("\t\t}\n") } if meta.min != "" { ctx.addImport("errors") ctx.write("\t\tif len(field_%s) < %s {\n", fieldname, meta.min) ctx.write("\t\t\treturn nil, errors.New(\"%s must have a length of at least %s\")\n", fieldname, meta.min) ctx.write("\t\t}\n") } ctx.write("\t\tret.%s = field_%s\n", fieldname, fieldname) ctx.write("\t} else {\n") if meta.def != nil { ctx.write("\t\t// %s is optional.\n", fieldname) if *meta.def == "" { ctx.write("\t\t// Zero value already set.\n") } else { ctx.write("\t\tret.%s = %s\n", fieldname, *meta.def) } } else { ctx.addImport("errors") ctx.write("\t\treturn nil, errors.New(\"%s is required\")\n", fieldname) } ctx.write("\t}\n") } // validateBool writes validator code for a bool. func validateBool(ctx *generationContext, fieldname string, meta *fieldMetadata) { ctx.addVariable(fmt.Sprintf("field_%s_s", fieldname), "string") ctx.addVariable("ok", "bool") ctx.addVariable("err", "error") ctx.write("\tfield_%s_s, ok = data[\"%s\"]\n", fieldname, fieldname) ctx.write("\tif ok {\n") ctx.write("\t\tret.%s, err = strconv.ParseBool(field_%s_s)\n", fieldname, fieldname) ctx.write("\t\tif err != nil {\n") ctx.write("\t\t\treturn nil, err\n") ctx.write("\t\t}\n") ctx.write("\t} else {\n") if meta.def != nil { ctx.write("\t\t// %s is optional.\n", fieldname) if *meta.def == "" { ctx.write("\t\t// Zero value already set.\n") } else { ctx.write("\t\tret.%s = %s\n", fieldname, *meta.def) } } else { ctx.addImport("errors") ctx.write("\t\treturn nil, errors.New(\"%s is required\")\n", fieldname) } ctx.write("\t}\n") } // validateUint writes validator code for a uint of the given bitSize. func validateUint(ctx *generationContext, fieldname string, meta *fieldMetadata, bitSize int) { ctx.addVariable(fmt.Sprintf("field_%s_s", fieldname), "string") ctx.addVariable("ok", "bool") ctx.addVariable(fmt.Sprintf("field_%s", fieldname), "uint64") ctx.addVariable("err", "error") ctx.write("\tfield_%s_s, ok = data[\"%s\"]\n", fieldname, fieldname) ctx.write("\tif ok {\n") ctx.write("\t\tfield_%s, err = strconv.ParseUint(field_%s_s, 0, %d)\n", fieldname, fieldname, bitSize) ctx.write("\t\tif err != nil {\n") ctx.write("\t\t\treturn nil, err\n") ctx.write("\t\t}\n") if meta.max != "" { ctx.addImport("errors") ctx.write("\t\tif field_%s > %s {\n", fieldname, meta.max) ctx.write("\t\t\treturn nil, errors.New(\"%s can be at most %s\")\n", fieldname, meta.max) ctx.write("\t\t}\n") } if meta.min != "" { ctx.addImport("errors") ctx.write("\t\tif field_%s < %s {\n", fieldname, meta.min) ctx.write("\t\t\treturn nil, errors.New(\"%s must be at least %s\")\n", fieldname, meta.min) ctx.write("\t\t}\n") } // Have to cast since ParseUint returns a uint64. if bitSize == 0 { ctx.write("\t\tret.%s = uint(field_%s)\n", fieldname, fieldname) } else if bitSize != 64 { ctx.write("\t\tret.%s = uint%d(field_%s)\n", fieldname, bitSize, fieldname) } else { ctx.write("\t\tret.%s = field_%s\n", fieldname, fieldname) } ctx.write("\t} else {\n") if meta.def != nil { ctx.write("\t\t// %s is optional.\n", fieldname) if *meta.def == "" { ctx.write("\t\t// Zero value already set.\n") } else { ctx.write("\t\tret.%s = %s\n", fieldname, *meta.def) } } else { ctx.addImport("errors") ctx.write("\t\treturn nil, errors.New(\"%s is required\")\n", fieldname) } ctx.write("\t}\n") } // validateInt writes validator code for an int of the given bitSize. func validateInt(ctx *generationContext, fieldname string, meta *fieldMetadata, bitSize int) { ctx.addVariable(fmt.Sprintf("field_%s_s", fieldname), "string") ctx.addVariable("ok", "bool") ctx.addVariable(fmt.Sprintf("field_%s", fieldname), "int64") ctx.addVariable("err", "error") ctx.write("\tfield_%s_s, ok = data[\"%s\"]\n", fieldname, fieldname) ctx.write("\tif ok {\n") ctx.write("\t\tfield_%s, err = strconv.ParseInt(field_%s_s, 0, %d)\n", fieldname, fieldname, bitSize) ctx.write("\t\tif err != nil {\n") ctx.write("\t\t\treturn nil, err\n") ctx.write("\t\t}\n") if meta.max != "" { ctx.addImport("errors") ctx.write("\t\tif field_%s > %s {\n", fieldname, meta.max) ctx.write("\t\t\treturn nil, errors.New(\"%s can be at most %s\")\n", fieldname, meta.max) ctx.write("\t\t}\n") } if meta.min != "" { ctx.addImport("errors") ctx.write("\t\tif field_%s < %s {\n", fieldname, meta.min) ctx.write("\t\t\treturn nil, errors.New(\"%s must be at least %s\")\n", fieldname, meta.min) ctx.write("\t\t}\n") } // Have to cast since ParseInt returns an int64. if bitSize == 0 { ctx.write("\t\tret.%s = int(field_%s)\n", fieldname, fieldname) } else if bitSize != 64 { ctx.write("\t\tret.%s = int%d(field_%s)\n", fieldname, bitSize, fieldname) } else { ctx.write("\t\tret.%s = field_%s\n", fieldname, fieldname) } ctx.write("\t} else {\n") if meta.def != nil { ctx.write("\t\t// %s is optional.\n", fieldname) if *meta.def == "" { ctx.write("\t\t// Zero value already set.\n") } else { ctx.write("\t\tret.%s = %s\n", fieldname, *meta.def) } } else { ctx.addImport("errors") ctx.write("\t\treturn nil, errors.New(\"%s is required\")\n", fieldname) } ctx.write("\t}\n") } // validateFloat writes validator code for a float of the given bitSize. func validateFloat(ctx *generationContext, fieldname string, meta *fieldMetadata, bitSize int) { ctx.addVariable(fmt.Sprintf("field_%s_s", fieldname), "string") ctx.addVariable("ok", "bool") ctx.addVariable(fmt.Sprintf("field_%s", fieldname), "float64") ctx.addVariable("err", "error") ctx.write("\tfield_%s_s, ok = data[\"%s\"]\n", fieldname, fieldname) ctx.write("\tif ok {\n") ctx.write("\t\tfield_%s, err = strconv.ParseFloat(field_%s_s, %d)\n", fieldname, fieldname, bitSize) ctx.write("\t\tif err != nil {\n") ctx.write("\t\t\treturn nil, err\n") ctx.write("\t\t}\n") if meta.max != "" { ctx.addImport("errors") ctx.write("\t\tif field_%s > %s {\n", fieldname, meta.max) ctx.write("\t\t\treturn nil, errors.New(\"%s can be at most %s\")\n", fieldname, meta.max) ctx.write("\t\t}\n") } if meta.min != "" { ctx.addImport("errors") ctx.write("\t\tif field_%s < %s {\n", fieldname, meta.min) ctx.write("\t\t\treturn nil, errors.New(\"%s must be at least %s\")\n", fieldname, meta.min) ctx.write("\t\t}\n") } // Have to cast since ParseFloat returns a float64. if bitSize == 32 { ctx.write("\t\tret.%s = float32(field_%s)\n", fieldname, fieldname) } else { // 64 ctx.write("\t\tret.%s = field_%s\n", fieldname, fieldname) } ctx.write("\t} else {\n") if meta.def != nil { ctx.write("\t\t// %s is optional.\n", fieldname) if *meta.def == "" { ctx.write("\t\t// Zero value already set.\n") } else { ctx.write("\t\tret.%s = %s\n", fieldname, *meta.def) } } else { ctx.addImport("errors") ctx.write("\t\treturn nil, errors.New(\"%s is required\")\n", fieldname) } ctx.write("\t}\n") } // validateSimpleType delegates validator code generation given the name // of the type. func validateSimpleType(ctx *generationContext, fieldname string, typename string, meta *fieldMetadata) { switch typename { case "string": validateString(ctx, fieldname, meta) case "bool": ctx.addImport("strconv") validateBool(ctx, fieldname, meta) case "uint": ctx.addImport("strconv") validateUint(ctx, fieldname, meta, 0) case "uint8": ctx.addImport("strconv") validateUint(ctx, fieldname, meta, 8) case "uint16": ctx.addImport("strconv") validateUint(ctx, fieldname, meta, 16) case "uint32": ctx.addImport("strconv") validateUint(ctx, fieldname, meta, 32) case "uint64": ctx.addImport("strconv") validateUint(ctx, fieldname, meta, 64) case "int": ctx.addImport("strconv") validateInt(ctx, fieldname, meta, 0) case "int8": ctx.addImport("strconv") validateInt(ctx, fieldname, meta, 8) case "int16": ctx.addImport("strconv") validateInt(ctx, fieldname, meta, 16) case "int32": ctx.addImport("strconv") validateInt(ctx, fieldname, meta, 32) case "int64": ctx.addImport("strconv") validateInt(ctx, fieldname, meta, 64) case "float32": ctx.addImport("strconv")<|fim▁hole|> } } // validateUrl writes validator code for a *mail.Address. func validateMailAddress(ctx *generationContext, fieldname string, meta *fieldMetadata) { ctx.addVariable(fmt.Sprintf("field_%s_s", fieldname), "string") ctx.addVariable("ok", "bool") ctx.addVariable("err", "error") ctx.write("\tfield_%s_s, ok = data[\"%s\"]\n", fieldname, fieldname) ctx.write("\tif ok {\n") if meta.max != "" { ctx.addImport("errors") ctx.write("\t\tif len(field_%s_s) > %s {\n", fieldname, meta.max) ctx.write("\t\t\treturn nil, errors.New(\"%s can have a length of at most %s\")\n", fieldname, meta.max) ctx.write("\t\t}\n") } if meta.min != "" { ctx.addImport("errors") ctx.write("\t\tif len(field_%s_s) < %s {\n", fieldname, meta.min) ctx.write("\t\t\treturn nil, errors.New(\"%s must have a length of at least %s\")\n", fieldname, meta.min) ctx.write("\t\t}\n") } ctx.write("\t\tret.%s, err = mail.ParseAddress(field_%s_s)\n", fieldname, fieldname) ctx.write("\t\tif err != nil {\n") ctx.write("\t\t\treturn nil, err\n") ctx.write("\t\t}\n") ctx.write("\t} else {\n") if meta.def != nil { ctx.write("\t\t// %s is optional.\n", fieldname) if *meta.def == "" { ctx.write("\t\t// Zero value already set.\n") } else { ctx.write("\t\tret.%s = %s\n", fieldname, *meta.def) } } else { ctx.addImport("errors") ctx.write("\t\treturn nil, errors.New(\"%s is required\")\n", fieldname) } ctx.write("\t}\n") } // validateUrl writes validator code for a *url.URL. func validateUrl(ctx *generationContext, fieldname string, meta *fieldMetadata) { ctx.addVariable(fmt.Sprintf("field_%s_s", fieldname), "string") ctx.addVariable("ok", "bool") ctx.addVariable("err", "error") ctx.write("\tfield_%s_s, ok = data[\"%s\"]\n", fieldname, fieldname) ctx.write("\tif ok {\n") if meta.max != "" { ctx.addImport("errors") ctx.write("\t\tif len(field_%s_s) > %s {\n", fieldname, meta.max) ctx.write("\t\t\treturn nil, errors.New(\"%s can have a length of at most %s\")\n", fieldname, meta.max) ctx.write("\t\t}\n") } if meta.min != "" { ctx.addImport("errors") ctx.write("\t\tif len(field_%s_s) < %s {\n", fieldname, meta.min) ctx.write("\t\t\treturn nil, errors.New(\"%s must have a length of at least %s\")\n", fieldname, meta.min) ctx.write("\t\t}\n") } ctx.write("\t\tret.%s, err = url.Parse(field_%s_s)\n", fieldname, fieldname) ctx.write("\t\tif err != nil {\n") ctx.write("\t\t\treturn nil, err\n") ctx.write("\t\t}\n") ctx.write("\t} else {\n") if meta.def != nil { ctx.write("\t\t// %s is optional.\n", fieldname) if *meta.def == "" { ctx.write("\t\t// Zero value already set.\n") } else { ctx.write("\t\tret.%s = %s\n", fieldname, *meta.def) } } else { ctx.addImport("errors") ctx.write("\t\treturn nil, errors.New(\"%s is required\")\n", fieldname) } ctx.write("\t}\n") } // validateSimpleSlice writes validator code for slices of simple types. // It currently supports only slices of bytes. func validateSimpleSlice(ctx *generationContext, fieldname, typename string, meta *fieldMetadata) { if typename != "byte" { return } ctx.addVariable(fmt.Sprintf("field_%s_s", fieldname), "string") ctx.addVariable(fmt.Sprintf("field_%s_sl", fieldname), "[]byte") ctx.addVariable("ok", "bool") ctx.addVariable("err", "error") ctx.write("\tfield_%s_s, ok = data[\"%s\"]\n", fieldname, fieldname) ctx.write("\tif ok {\n") ctx.write("\t\tfield_%s_sl = []byte(field_%s_s)\n", fieldname, fieldname) if meta.max != "" { ctx.addImport("errors") ctx.write("\t\tif len(field_%s_sl) > %s {\n", fieldname, meta.max) ctx.write("\t\t\treturn nil, errors.New(\"%s can have a length of at most %s\")\n", fieldname, meta.max) ctx.write("\t\t}\n") } if meta.min != "" { ctx.addImport("errors") ctx.write("\t\tif len(field_%s_sl) < %s {\n", fieldname, meta.min) ctx.write("\t\t\treturn nil, errors.New(\"%s must have a length of at least %s\")\n", fieldname, meta.min) ctx.write("\t\t}\n") } ctx.write("\t\tret.%s = field_%s_sl\n", fieldname, fieldname) ctx.write("\t} else {\n") if meta.def != nil { ctx.write("\t\t// %s is optional.\n", fieldname) if *meta.def == "" { ctx.write("\t\t// Zero value already set.\n") } else { ctx.write("\t\tret.%s = %s\n", fieldname, *meta.def) } } else { ctx.addImport("errors") ctx.write("\t\treturn nil, errors.New(\"%s is required\")\n", fieldname) } ctx.write("\t}\n") } func makeFunctionName(structname string) string { first, _ := utf8.DecodeRune([]byte(structname)) isPublic := unicode.IsUpper(first) if isPublic { return fmt.Sprintf("Validate%s", structname) } return fmt.Sprintf("validate%s", strings.Title(structname)) } func validatorImpl(ctx *generationContext, structtype *ast.StructType) { for _, field := range structtype.Fields.List { fieldname := field.Names[0].Name var tagstring string if field.Tag != nil && field.Tag.Kind == token.STRING { tagstring = field.Tag.Value } else { tagstring = "" } meta := parseFieldMetadata(tagstring) switch field.Type.(type) { // We'll look for a simple type. case *ast.Ident: ident := field.Type.(*ast.Ident) typename := ident.Name ctx.write("\n\t// %s %s\n", fieldname, typename) validateSimpleType(ctx, fieldname, typename, meta) // We'll look for a pointer type. case *ast.StarExpr: star := field.Type.(*ast.StarExpr) sel, ok := star.X.(*ast.SelectorExpr) if !ok { continue } pkg, ok2 := sel.X.(*ast.Ident) if !ok2 { continue } pkgname := pkg.Name typename := sel.Sel.Name ctx.write("\n\t// %s *%s.%s\n", fieldname, pkgname, typename) if pkgname == "mail" && typename == "Address" { validateMailAddress(ctx, fieldname, meta) } else if pkgname == "url" && typename == "URL" { validateUrl(ctx, fieldname, meta) } // We'll look for a "simple" slice type. case *ast.ArrayType: array := field.Type.(*ast.ArrayType) if array.Len != nil { // This is actually an _array_ type, not // a slice type. continue } ident, ok := array.Elt.(*ast.Ident) if !ok { // We're interested only in "simple" slices. continue } typename := ident.Name ctx.write("\n\t// %s []%s\n", fieldname, typename) validateSimpleSlice(ctx, fieldname, typename, meta) } } } func declareVariables(ctx *generationContext, vars []variableType) { if len(vars) == 0 { return } // Compute maximum length of variable names so we can do // gofmt-compatible alignment. max := 0 for _, x := range vars { if len(x.name) > max { max = len(x.name) } } ctx.write("\tvar (\n") for _, x := range vars { nspaces := max - len(x.name) + 1 spaces := strings.Repeat(" ", nspaces) ctx.write("\t\t%s%s%s\n", x.name, spaces, x.typeexpr) } ctx.write("\t)\n") } // validator writes validator code for the given struct. It iterates // through the struct fields, and for those for which it can generate // validator code, it does so. It returns whether or not the strconv // package is needed by the generated code. func validator(ctx *generationContext, structname string, structtype *ast.StructType) { // First, buffer inner contents of the function into a secondary // context. This is so we can know what variables we'll need to // declare at the top of the function. ctx2 := newContext() // Generate the inner implementation of the validator function. validatorImpl(ctx2, structtype) // Now that that's succeeded, we can actually output all of the // code. funcname := makeFunctionName(structname) ctx.write("\n") // Newline to separate from prior content. // Add descriptive comment, GoDoc/golint compatible. ctx.write("// %s reads data from the given map of strings to\n", funcname) ctx.write("// strings and validates the data into a new *%s.\n", structname) ctx.write("// Fields named in a %s will be recognized as keys.\n", structname) ctx.write("// Keys in the input data that are not fields in the\n") ctx.write("// %s will be ignored. If there is an error\n", structname) ctx.write("// validating any fields, an appropriate error will\n") ctx.write("// be returned.\n") ctx.write("func %s(data map[string]string) (*%s, error) {\n", funcname, structname) ctx.write("\tret := new(%s)\n", structname) // Delcare variables needed by the implementation. declareVariables(ctx, ctx2.getVariables()) // Copy over the inner implementation body itself. Because // we're reading from a buffer, there's no actual error to // handle here. ctx.Buffer.ReadFrom(ctx2.Buffer) ctx.write("\n") ctx.write("\treturn ret, nil\n") ctx.write("}\n") // Migrate needed imports to parent context so caller can get // ahold of them. for _, importName := range ctx2.getImports() { ctx.addImport(importName) } }<|fim▁end|>
validateFloat(ctx, fieldname, meta, 32) case "float64": ctx.addImport("strconv") validateFloat(ctx, fieldname, meta, 64)
<|file_name|>generic_layer_metadata.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ InaSAFE Disaster risk assessment tool developed by AusAid - **metadata module.** Contact : [email protected] .. note:: This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. """ __author__ = '[email protected]' __revision__ = '$Format:%H$' __date__ = '27/05/2015' __copyright__ = ('Copyright 2012, Australia Indonesia Facility for ' 'Disaster Reduction') from xml.etree import ElementTree from safe.metadata import BaseMetadata from safe.metadata.utilities import reading_ancillary_files, prettify_xml class GenericLayerMetadata(BaseMetadata): """ Base class for generic layers such as hazard, exposure and aggregation. This class can be subclassed so you can create only a minimal concrete class that implements only _standard_properties to add specific properties. You can also add a standard XML property that applies to all subclasses here. In both cases do it as explained below. @property and @propname.setter will be generated automatically _standard_properties = { 'TESTprop': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'gco:CharacterString') } from safe.metadata.utils import merge_dictionaries _standard_properties = merge_dictionaries( # change BaseMetadata to GenericLayerMetadata in subclasses BaseMetadata._standard_properties, _standard_properties) .. versionadded:: 3.2 """ def __init__(self, layer_uri, xml_uri=None, json_uri=None): """ Constructor :param layer_uri: uri of the layer for which the metadata ae :type layer_uri: str :param xml_uri: uri of an xml file to use :type xml_uri: str :param json_uri: uri of a json file to use :type json_uri: str """ # initialize base class super(GenericLayerMetadata, self).__init__( layer_uri, xml_uri, json_uri) @property def dict(self): """ calls the overridden method :return: dictionary representation of the metadata :rtype: dict """ return super(GenericLayerMetadata, self).dict @property def json(self): """ calls the overridden method :return: json representation of the metadata :rtype: str<|fim▁hole|> @property def xml(self): """ calls the overridden method :return: xml representation of the metadata :rtype: str """ root = super(GenericLayerMetadata, self).xml return prettify_xml(ElementTree.tostring(root)) def read_json(self): """ calls the overridden method :return: the read metadata :rtype: dict """ with reading_ancillary_files(self): metadata = super(GenericLayerMetadata, self).read_json() return metadata def read_xml(self): """ calls the overridden method :return: the read metadata :rtype: ElementTree.Element """ with reading_ancillary_files(self): root = super(GenericLayerMetadata, self).read_xml() return root def update_report(self): """ update the report. """ # TODO (MB): implement this by reading the kw and definitions self.report = self.report raise NotImplementedError()<|fim▁end|>
""" return super(GenericLayerMetadata, self).json
<|file_name|>layout.ts<|end_file_name|><|fim▁begin|>import { Inject, Service } from 'libs/typedi'; import { computed, observable } from 'mobx'; import { Dimensions } from 'core/interfaces'; import { NotesEditorScroll } from 'features/NotesEditor/core'; @Service() export default class NotesEditorLayout { @Inject(_ => NotesEditorScroll) scroll: NotesEditorScroll; @observable<|fim▁hole|> width: 1000, }; @observable rowHeight = 20; @observable pianoRollWidth = 20; @computed get pianoRollDimensions() { return { height: this.dimensions.height, width: this.pianoRollWidth, }; } }<|fim▁end|>
dimensions: Dimensions = { height: 500,
<|file_name|>MemConfig.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 ARM Limited # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Andreas Sandberg # Andreas Hansson import m5.objects import inspect import sys from textwrap import TextWrapper # Dictionary of mapping names of real memory controller models to # classes. _mem_classes = {} # Memory aliases. We make sure they exist before we add them to the # fina; list. A target may be specified as a tuple, in which case the # first available memory controller model in the tuple will be used. _mem_aliases_all = [ ("simple_mem", "SimpleMemory"), ("ddr3_1600_x64", "DDR3_1600_x64"), ("lpddr2_s4_1066_x32", "LPDDR2_S4_1066_x32"), ("lpddr3_1600_x32", "LPDDR3_1600_x32"), ("wio_200_x128", "WideIO_200_x128"), ("dramsim2", "DRAMSim2") ] <|fim▁hole|># Filtered list of aliases. Only aliases for existing memory # controllers exist in this list. _mem_aliases = {} def is_mem_class(cls): """Determine if a class is a memory controller that can be instantiated""" # We can't use the normal inspect.isclass because the ParamFactory # and ProxyFactory classes have a tendency to confuse it. try: return issubclass(cls, m5.objects.AbstractMemory) and \ not cls.abstract except TypeError: return False def get(name): """Get a memory class from a user provided class name or alias.""" real_name = _mem_aliases.get(name, name) try: mem_class = _mem_classes[real_name] return mem_class except KeyError: print "%s is not a valid memory controller." % (name,) sys.exit(1) def print_mem_list(): """Print a list of available memory classes including their aliases.""" print "Available memory classes:" doc_wrapper = TextWrapper(initial_indent="\t\t", subsequent_indent="\t\t") for name, cls in _mem_classes.items(): print "\t%s" % name # Try to extract the class documentation from the class help # string. doc = inspect.getdoc(cls) if doc: for line in doc_wrapper.wrap(doc): print line if _mem_aliases: print "\nMemory aliases:" for alias, target in _mem_aliases.items(): print "\t%s => %s" % (alias, target) def mem_names(): """Return a list of valid memory names.""" return _mem_classes.keys() + _mem_aliases.keys() # Add all memory controllers in the object hierarchy. for name, cls in inspect.getmembers(m5.objects, is_mem_class): _mem_classes[name] = cls for alias, target in _mem_aliases_all: if isinstance(target, tuple): # Some aliases contain a list of memory controller models # sorted in priority order. Use the first target that's # available. for t in target: if t in _mem_classes: _mem_aliases[alias] = t break elif target in _mem_classes: # Normal alias _mem_aliases[alias] = target def create_mem_ctrl(cls, r, i, nbr_mem_ctrls, intlv_bits, cache_line_size): """ Helper function for creating a single memoy controller from the given options. This function is invoked multiple times in config_mem function to create an array of controllers. """ import math # The default behaviour is to interleave on cache line granularity cache_line_bit = int(math.log(cache_line_size, 2)) - 1 intlv_low_bit = cache_line_bit # Create an instance so we can figure out the address # mapping and row-buffer size ctrl = cls() # Only do this for DRAMs if issubclass(cls, m5.objects.DRAMCtrl): # Inform each controller how many channels to account # for ctrl.channels = nbr_mem_ctrls # If the channel bits are appearing after the column # bits, we need to add the appropriate number of bits # for the row buffer size if ctrl.addr_mapping.value == 'RoRaBaChCo': # This computation only really needs to happen # once, but as we rely on having an instance we # end up having to repeat it for each and every # one rowbuffer_size = ctrl.device_rowbuffer_size.value * \ ctrl.devices_per_rank.value intlv_low_bit = int(math.log(rowbuffer_size, 2)) - 1 # We got all we need to configure the appropriate address # range ctrl.range = m5.objects.AddrRange(r.start, size = r.size(), intlvHighBit = \ intlv_low_bit + intlv_bits, intlvBits = intlv_bits, intlvMatch = i) return ctrl def config_mem(options, system): """ Create the memory controllers based on the options and attach them. If requested, we make a multi-channel configuration of the selected memory controller class by creating multiple instances of the specific class. The individual controllers have their parameters set such that the address range is interleaved between them. """ nbr_mem_ctrls = options.mem_channels import math from m5.util import fatal intlv_bits = int(math.log(nbr_mem_ctrls, 2)) if 2 ** intlv_bits != nbr_mem_ctrls: fatal("Number of memory channels must be a power of 2") cls = get(options.mem_type) mem_ctrls = [] # For every range (most systems will only have one), create an # array of controllers and set their parameters to match their # address mapping in the case of a DRAM for r in system.mem_ranges: for i in xrange(nbr_mem_ctrls): mem_ctrls.append(create_mem_ctrl(cls, r, i, nbr_mem_ctrls, intlv_bits, system.cache_line_size.value)) system.mem_ctrls = mem_ctrls # Connect the controllers to the membus for i in xrange(len(system.mem_ctrls)): system.mem_ctrls[i].port = system.membus.master<|fim▁end|>
<|file_name|>MessageExchangesAdapter.java<|end_file_name|><|fim▁begin|>/** * <copyright> * Copyright (c) 2008 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation * </copyright> */ package org.eclipse.bpel.ui.adapters; import java.util.List; import org.eclipse.bpel.model.BPELPackage; import org.eclipse.bpel.model.MessageExchanges; import org.eclipse.bpel.ui.BPELUIPlugin; import org.eclipse.bpel.ui.IBPELUIConstants; import org.eclipse.bpel.ui.adapters.delegates.ReferenceContainer; import org.eclipse.bpel.ui.editparts.MessageExchangesEditPart; import org.eclipse.bpel.ui.editparts.OutlineTreeEditPart; import org.eclipse.bpel.ui.properties.PropertiesLabelProvider; import org.eclipse.gef.EditPart; import org.eclipse.gef.EditPartFactory; import org.eclipse.swt.graphics.Image; import org.eclipse.bpel.ui.Messages; /** * * @author Miriam Grundig ([email protected]) */ public class MessageExchangesAdapter extends ContainerAdapter implements EditPartFactory, ILabeledElement, IOutlineEditPartFactory, ITrayEditPartFactory { public MessageExchangesAdapter() { super(); } /* IContainer delegate */ public IContainer createContainerDelegate() { return new ReferenceContainer(BPELPackage.eINSTANCE.getMessageExchanges_Children()); } /* EditPartFactory */ public EditPart createEditPart(EditPart context, Object model) {<|fim▁hole|> return result; } /* ITrayEditPartFactory */ public EditPart createTrayEditPart(EditPart context, Object model) { return createEditPart(context, model); } /* ILabeledElement */ public Image getSmallImage(Object object) { return BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_MESSAGEEXCHANGE_16); } public Image getLargeImage(Object object) { return BPELUIPlugin.INSTANCE.getImage(IBPELUIConstants.ICON_MESSAGEEXCHANGE_32); } public String getTypeLabel(Object object) { return Messages.MessageExchangesAdapter_TypeLabel; } public String getLabel(Object object) { return Messages.MessageExchangesAdapter_Label; } /* IOutlineEditPartFactory */ public EditPart createOutlineEditPart(EditPart context, final Object model) { EditPart result = new OutlineTreeEditPart(){ protected List getModelChildren() { MessageExchanges messageExchanges = (MessageExchanges) model; List list = messageExchanges.getChildren(); return list; } }; result.setModel(model); return result; } }<|fim▁end|>
MessageExchangesEditPart result = new MessageExchangesEditPart(); result.setLabelProvider(PropertiesLabelProvider.getInstance()); result.setModel(model);
<|file_name|>udp_dispatcher.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Author: Tamas Gal <[email protected]> # License: MIT #!/usr/bin/env python # vim: ts=4 sw=4 et """ ============================= UDP Forwarder for ControlHost ============================= A simple UDP forwarder for ControlHost messages. This application is used to forward monitoring channel data from Ligier to a given UDP address. """ import socket import sys<|fim▁hole|> __author__ = "Tamas Gal" __email__ = "[email protected]" class UDPForwarder(kp.Module): def configure(self): self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) self.counter = 0 def process(self, blob): if str(blob["CHPrefix"].tag) == "IO_MONIT": self.sock.sendto(blob["CHData"], ("127.0.0.1", 56017)) if self.counter % 100 == 0: sys.stdout.write(".") sys.stdout.flush() self.counter += 1 return blob pipe = kp.Pipeline() pipe.attach( kp.io.CHPump, host="localhost", port=5553, tags="IO_MONIT", timeout=60 * 60 * 24 * 7, max_queue=1000, timeit=True, ) pipe.attach(UDPForwarder) pipe.drain()<|fim▁end|>
import km3pipe as kp
<|file_name|>25.py<|end_file_name|><|fim▁begin|>instr = [x.strip().split(' ') for x in open("input/dec25").readlines()] skip = {} modified = {} #instr[1] = ['add', 'a', '2572'] #skip[2] = skip[3] = skip[4] = skip[5] = skip[6] = skip[7] = skip[8] = skip[9] = True<|fim▁hole|>#modified[6] = modified[7] = modified[8] = True #instr[9] = ['mul', 'a', 'd'] # multiplies a with d #skip[10] = True #modified[9] = modified[10] = True """instr[10] = ['add', 'a', 'b'] # adds b to a, sets b to 0 skip[11] = True skip[12] = True""" #instr[14] = ['mul', 'a', 'd'] # multiplies a with d #skip[15] = True def print_program(inss): i = 0 for inst in inss: prefix = ' # ' if i in skip else ' ' print(prefix, i, inst) i += 1 print_program(instr) # evaluated a couple of numbers, found that it found the binary representation of a number, found # first number above 2572 (which instr 1 - 9 adds to the number) that repeats itself (ends with 0 and is 101010 etc.) # and subtracted 2572 for x in [158]: pc = 0 reg = {'a': x, 'b': 0, 'c': 0, 'd': 0} output = '' while pc < len(instr): if pc in skip: pc += 1 continue inst = instr[pc] if inst[0] == 'add': v = reg[inst[2]] if inst[2] in reg else int(inst[2]) reg[inst[1]] += v reg[inst[2]] = 0 pc += 1 elif inst[0] == 'mul': reg[inst[1]] *= reg[inst[2]] reg[inst[2]] = 0 pc += 1 elif inst[0] == 'cpy': if inst[2] in reg: if inst[1] in reg: reg[inst[2]] = reg[inst[1]] else: reg[inst[2]] = int(inst[1]) pc += 1 elif inst[0] == 'inc': reg[inst[1]] += 1 pc += 1 elif inst[0] == 'dec': reg[inst[1]] -= 1 pc += 1 elif inst[0] == 'jnz': if (inst[1] in reg and reg[inst[1]] != 0) or (inst[1] not in reg and int(inst[1]) != 0): if inst[2] in reg: pc += reg[inst[2]] else: pc += int(inst[2]) else: pc += 1 elif inst[0] == 'tgl': if inst[1] in reg: d = pc + reg[inst[1]] # valid if d < len(instr) and d >= 0: if d in modified: print("modified instruction tggled") if len(instr[d]) == 2: if instr[d][0] == 'inc': instr[d][0] = 'dec' else: instr[d][0] = 'inc' elif len(instr[d]) == 3: if instr[d][0] == 'jnz': instr[d][0] = 'cpy' else: instr[d][0] = 'jnz' else: print(" invalid register", inst[1]) pc += 1 elif inst[0] == 'out': v = reg[inst[1]] if inst[1] in reg else inst[1] output += str(v) print(output) #if len(output) > 1 and output != '01': # break #elif len(output) > 1: # print("THIS IS IT", x) pc += 1 else: print("INVALID INSTRUCTION", inst) if pc == 8: print(reg) if pc == 28: print('loop', reg) if pc == 29: print(x, bin(x), bin(x+2572), output) break print(reg['a'])<|fim▁end|>
#instr[6] = ['add', 'a', 'c'] # adds c to d, sets c to 0 #skip[7] = True #skip[8] = True
<|file_name|>path.rs<|end_file_name|><|fim▁begin|>//! This module contains functions for file system path manipulation. use std::{ ffi::{OsStr, OsString}, path::{Path, PathBuf}, }; /// This traits extends the available methods on [`Path`]. pub trait PathExt { /// Iterator over all file extensions of a [`Path`]. /// /// This iterator provides access to all file extensions from starting with the last extension. /// File extensions are separated by a `.`-character. This supplements the [`Path::extension`] method, /// which only allows you to access the last file extension. /// /// Accessing multiple extension can be useful, if extensions are chained to provide hints how the /// file is structured, e.g., `archive.tar.xz`. ///<|fim▁hole|> /// /// ```rust /// # use misc_utils::path::PathExt; /// # use std::ffi::OsStr; /// # use std::path::Path; /// # /// let p = &Path::new("/home/user/projects/misc_utils/This.File.has.many.extensions"); /// assert_eq!( /// p.extensions().collect::<Vec<_>>(), /// vec![ /// OsStr::new("extensions"), /// OsStr::new("many"), /// OsStr::new("has"), /// OsStr::new("File") /// ] /// ); /// ``` fn extensions(&'_ self) -> PathExtensions<'_>; } impl PathExt for Path { fn extensions(&'_ self) -> PathExtensions<'_> { PathExtensions(self) } } /// This traits extends the available methods on [`PathBuf`]. pub trait PathBufExt { /// Appends `extension` to [`self.file_name`](Path::file_name). /// /// Returns false and does nothing if [`self.file_name`](Path::file_name) is [`None`], returns `true` and appends the extension otherwise. /// /// The API and documentation should fully mirror [`PathBuf::set_extension`]. fn add_extension<S: AsRef<OsStr>>(&mut self, extension: S) -> bool; } impl PathBufExt for PathBuf { fn add_extension<S: AsRef<OsStr>>(&mut self, extension: S) -> bool { if self.file_name().is_none() { return false; } let mut stem = match self.file_name() { Some(stem) => stem.to_os_string(), None => OsString::new(), }; if !extension.as_ref().is_empty() { stem.push("."); stem.push(extension.as_ref()); } self.set_file_name(&stem); true } } /// Iterator over all file extensions of a [`Path`]. /// /// This iterator provides access to all file extensions from starting with the last extension. /// File extensions are separated by a `.`-character. This supplements the [`Path::extension`] method, /// which only allows you to access the last file extension. /// /// Accessing multiple extension can be useful, if extensions are chained to provide hints how the /// file is structured, e.g., `archive.tar.xz`. /// /// # Example /// /// ```rust /// # use misc_utils::path::PathExt; /// # use std::ffi::OsStr; /// # use std::path::Path; /// # /// let p = &Path::new("/home/user/projects/misc_utils/This.File.has.many.extensions"); /// assert_eq!( /// p.extensions().collect::<Vec<_>>(), /// vec![ /// OsStr::new("extensions"), /// OsStr::new("many"), /// OsStr::new("has"), /// OsStr::new("File") /// ] /// ); /// ``` #[derive(Copy, Clone, Debug)] pub struct PathExtensions<'a>(&'a Path); impl<'a> Iterator for PathExtensions<'a> { type Item = &'a OsStr; fn next(&mut self) -> Option<&'a OsStr> { let (new_filestem, new_extension) = (self.0.file_stem(), self.0.extension()); if new_extension.is_none() { self.0 = Path::new(""); None } else { if let Some(new_filestem) = new_filestem { self.0 = Path::new(new_filestem); } else { self.0 = Path::new("") }; new_extension } } } #[test] fn test_path_extensions() { let p = &Path::new("/home/user/projects/misc_utils/Cargo.toml"); assert_eq!(p.extensions().collect::<Vec<_>>(), vec![OsStr::new("toml")]); let p = &Path::new("/home/user/projects/misc_utils/This.File.has.many.extensions"); assert_eq!( p.extensions().collect::<Vec<_>>(), vec![ OsStr::new("extensions"), OsStr::new("many"), OsStr::new("has"), OsStr::new("File") ] ); let p = &Path::new("/home/user/projects/misc_utils/.hidden"); assert_eq!(p.extensions().collect::<Vec<_>>(), Vec::<&OsStr>::new()); let p = &Path::new("Just-A.file"); assert_eq!(p.extensions().collect::<Vec<_>>(), vec![OsStr::new("file")]); } #[test] fn test_pathbuf_extensions() { let p = PathBuf::from("/home/user/projects/misc_utils/Cargo.toml"); assert_eq!(p.extensions().collect::<Vec<_>>(), vec![OsStr::new("toml")]); let p = PathBuf::from("/home/user/projects/misc_utils/This.File.has.many.extensions"); assert_eq!( p.extensions().collect::<Vec<_>>(), vec![ OsStr::new("extensions"), OsStr::new("many"), OsStr::new("has"), OsStr::new("File") ] ); let p = PathBuf::from("/home/user/projects/misc_utils/.hidden"); assert_eq!(p.extensions().collect::<Vec<_>>(), Vec::<&OsStr>::new()); let p = PathBuf::from("Just-A.file"); assert_eq!(p.extensions().collect::<Vec<_>>(), vec![OsStr::new("file")]); } #[test] fn test_add_extension() { let mut pb = PathBuf::from("some.file"); assert_eq!(pb, Path::new("some.file")); assert!(pb.add_extension("a")); assert_eq!(pb, Path::new("some.file.a")); assert!(pb.add_extension("b")); assert_eq!(pb, Path::new("some.file.a.b")); assert!(pb.add_extension("c")); assert_eq!(pb, Path::new("some.file.a.b.c")); let mut pb = PathBuf::from("/"); assert!(!pb.add_extension("ext")); }<|fim▁end|>
/// # Example
<|file_name|>activate.service.js<|end_file_name|><|fim▁begin|>(function() { 'use strict'; <|fim▁hole|> Activate.$inject = ['$resource']; function Activate ($resource) { var service = $resource('api/activate', {}, { 'get': { method: 'GET', params: {}, isArray: false} }); return service; } })();<|fim▁end|>
angular .module('echarliApp') .factory('Activate', Activate);
<|file_name|>0003_change_type_va_first_use.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import DataMigration from django.db import models class Migration(DataMigration): def forwards(self, orm): for u in orm.Upfront.objects.all(): if u.va_first_use == 'Y': u.new_va_first_use = True elif u.va_first_use == 'N': u.new_va_first_use = False u.save() def backwards(self, orm): raise RuntimeError("Cannot reverse this migration.") models = { u'mortgageinsurance.monthly': { 'Meta': {'object_name': 'Monthly'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'insurer': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'loan_term': ('django.db.models.fields.IntegerField', [], {}), 'max_fico': ('django.db.models.fields.IntegerField', [], {}), 'max_loan_amt': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'max_ltv': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}),<|fim▁hole|> 'min_fico': ('django.db.models.fields.IntegerField', [], {}), 'min_loan_amt': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}), 'min_ltv': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}), 'pmt_type': ('django.db.models.fields.CharField', [], {'max_length': '12'}), 'premium': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}) }, u'mortgageinsurance.upfront': { 'Meta': {'object_name': 'Upfront'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'loan_type': ('django.db.models.fields.CharField', [], {'max_length': '12'}), 'max_ltv': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}), 'min_ltv': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}), 'new_va_first_use': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'premium': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '3'}), 'va_first_use': ('django.db.models.fields.CharField', [], {'max_length': '3', 'blank': 'True'}), 'va_status': ('django.db.models.fields.CharField', [], {'max_length': '12', 'blank': 'True'}) } } complete_apps = ['mortgageinsurance'] symmetrical = True<|fim▁end|>
<|file_name|>failure.py<|end_file_name|><|fim▁begin|>""" SleekXMPP: The Sleek XMPP Library Copyright (C) 2011 Nathanael C. Fritz This file is part of SleekXMPP. See the file LICENSE for copying permission. """ from sleekxmpp.stanza import StreamFeatures from sleekxmpp.xmlstream import ElementBase, StanzaBase, ET from sleekxmpp.xmlstream import register_stanza_plugin <|fim▁hole|> """ name = 'failure' namespace = 'urn:ietf:params:xml:ns:xmpp-sasl' interfaces = set(('condition', 'text')) plugin_attrib = name sub_interfaces = set(('text',)) conditions = set(('aborted', 'account-disabled', 'credentials-expired', 'encryption-required', 'incorrect-encoding', 'invalid-authzid', 'invalid-mechanism', 'malformed-request', 'mechansism-too-weak', 'not-authorized', 'temporary-auth-failure')) def setup(self, xml=None): """ Populate the stanza object using an optional XML object. Overrides ElementBase.setup. Sets a default error type and condition, and changes the parent stanza's type to 'error'. Arguments: xml -- Use an existing XML object for the stanza's values. """ # StanzaBase overrides self.namespace self.namespace = Failure.namespace if StanzaBase.setup(self, xml): #If we had to generate XML then set default values. self['condition'] = 'not-authorized' self.xml.tag = self.tag_name() def get_condition(self): """Return the condition element's name.""" for child in self.xml.getchildren(): if "{%s}" % self.namespace in child.tag: cond = child.tag.split('}', 1)[-1] if cond in self.conditions: return cond return 'not-authorized' def set_condition(self, value): """ Set the tag name of the condition element. Arguments: value -- The tag name of the condition element. """ if value in self.conditions: del self['condition'] self.xml.append(ET.Element("{%s}%s" % (self.namespace, value))) return self def del_condition(self): """Remove the condition element.""" for child in self.xml.getchildren(): if "{%s}" % self.condition_ns in child.tag: tag = child.tag.split('}', 1)[-1] if tag in self.conditions: self.xml.remove(child) return self<|fim▁end|>
class Failure(StanzaBase): """
<|file_name|>tracelog.py<|end_file_name|><|fim▁begin|># # A PyGtk-based Python Trace Collector window # # Copyright (C) 2007 TK Soh <[email protected]> # import pygtk pygtk.require("2.0") import gtk import gobject import pango import threading import Queue import win32trace try: from gitgtk.gitlib import toutf except ImportError: import locale _encoding = locale.getpreferredencoding() def toutf(s): return s.decode(_encoding, 'replace').encode('utf-8') class TraceLog(): def __init__(self): self.window = gtk.Window(gtk.WINDOW_TOPLEVEL) self.window.set_title("Python Trace Collector") # construct window self.window.set_default_size(700, 400) self.main_area = gtk.VBox() self.window.add(self.main_area) # mimic standard dialog widgets self.action_area = gtk.HBox() self.main_area.pack_end(self.action_area, False, False, 5) sep = gtk.HSeparator() self.main_area.pack_end(sep, False, False, 0) self.vbox = gtk.VBox() self.main_area.pack_end(self.vbox) # add python trace ouput window<|fim▁hole|> self.textview.set_editable(False) self.textview.modify_font(pango.FontDescription("Monospace")) scrolledwindow.add(self.textview) self.textview.set_editable(False) self.textbuffer = self.textview.get_buffer() self.vbox.pack_start(scrolledwindow, True, True) self.vbox.show_all() # add buttons self._button_quit = gtk.Button("Quit") self._button_quit.connect('clicked', self._on_ok_clicked) self.action_area.pack_end(self._button_quit, False, False, 5) self._button_clear = gtk.Button("Clear") self._button_clear.connect('clicked', self._on_clear_clicked) self.action_area.pack_end(self._button_clear, False, False, 5) # add assorted window event handlers self.window.connect('map_event', self._on_window_map_event) self.window.connect('delete_event', self._on_window_close_clicked) def _on_ok_clicked(self, button): self._stop_read_thread() gtk.main_quit() def _on_clear_clicked(self, button): self.write("", False) def _on_window_close_clicked(self, event, param): self._stop_read_thread() gtk.main_quit() def _on_window_map_event(self, event, param): self._begin_trace() def _begin_trace(self): self.queue = Queue.Queue() win32trace.InitRead() self.write("Collecting Python Trace Output...\n") gobject.timeout_add(10, self._process_queue) self._start_read_thread() def _start_read_thread(self): self._read_trace = True self.thread1 = threading.Thread(target=self._do_read_trace) self.thread1.start() def _stop_read_thread(self): self._read_trace = False # wait for worker thread to to fix Unhandled exception in thread self.thread1.join() def _process_queue(self): """ Handle all the messages currently in the queue (if any). """ while self.queue.qsize(): try: msg = self.queue.get(0) self.write(msg) except Queue.Empty: pass return True def _do_read_trace(self): """ print buffer collected in win32trace """ while self._read_trace: msg = win32trace.read() if msg: self.queue.put(msg) def write(self, msg, append=True): msg = toutf(msg) if append: enditer = self.textbuffer.get_end_iter() self.textbuffer.insert(enditer, msg) else: self.textbuffer.set_text(msg) def main(self): self.window.show_all() gtk.main() def run(): dlg = TraceLog() dlg.main() if __name__ == "__main__": run()<|fim▁end|>
scrolledwindow = gtk.ScrolledWindow() scrolledwindow.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC) self.textview = gtk.TextView(buffer=None)
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import numpy as np def weighted_pick(weights): t = np.cumsum(weights) s = np.sum(weights) return(int(np.searchsorted(t, np.random.rand(1)*s))) def list_to_string(ascii_list): res = u"" for a in ascii_list: if a >= 0 and a < 256: res += unichr(a)<|fim▁hole|><|fim▁end|>
return res
<|file_name|>22-cli-productivity-illustration.js<|end_file_name|><|fim▁begin|>import * as React from "react"; import Slide from "../../components/slide"; const bgImage = require("../../images/hulk-feriggno-bixby.jpg"); class ImageSlide extends React.Component { render() { return ( <span/> ); }<|fim▁hole|> <Slide key="intro-cli-productivity-illustration" bgImage={bgImage}> <ImageSlide/> </Slide> );<|fim▁end|>
} export default (
<|file_name|>methodData.hpp<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ #ifndef SHARE_VM_OOPS_METHODDATAOOP_HPP #define SHARE_VM_OOPS_METHODDATAOOP_HPP #include "interpreter/bytecodes.hpp" #include "memory/universe.hpp" #include "oops/method.hpp" #include "oops/oop.hpp" #include "runtime/orderAccess.hpp" class BytecodeStream; class KlassSizeStats; // The MethodData object collects counts and other profile information // during zeroth-tier (interpretive) and first-tier execution. // The profile is used later by compilation heuristics. Some heuristics // enable use of aggressive (or "heroic") optimizations. An aggressive // optimization often has a down-side, a corner case that it handles // poorly, but which is thought to be rare. The profile provides // evidence of this rarity for a given method or even BCI. It allows // the compiler to back out of the optimization at places where it // has historically been a poor choice. Other heuristics try to use // specific information gathered about types observed at a given site. // // All data in the profile is approximate. It is expected to be accurate // on the whole, but the system expects occasional inaccuraces, due to // counter overflow, multiprocessor races during data collection, space // limitations, missing MDO blocks, etc. Bad or missing data will degrade // optimization quality but will not affect correctness. Also, each MDO // is marked with its birth-date ("creation_mileage") which can be used // to assess the quality ("maturity") of its data. // // Short (<32-bit) counters are designed to overflow to a known "saturated" // state. Also, certain recorded per-BCI events are given one-bit counters // which overflow to a saturated state which applied to all counters at // that BCI. In other words, there is a small lattice which approximates // the ideal of an infinite-precision counter for each event at each BCI, // and the lattice quickly "bottoms out" in a state where all counters // are taken to be indefinitely large. // // The reader will find many data races in profile gathering code, starting // with invocation counter incrementation. None of these races harm correct // execution of the compiled code. // forward decl class ProfileData; // DataLayout // // Overlay for generic profiling data. class DataLayout VALUE_OBJ_CLASS_SPEC { private: // Every data layout begins with a header. This header // contains a tag, which is used to indicate the size/layout // of the data, 4 bits of flags, which can be used in any way, // 4 bits of trap history (none/one reason/many reasons), // and a bci, which is used to tie this piece of data to a // specific bci in the bytecodes. union { intptr_t _bits; struct { u1 _tag; u1 _flags; u2 _bci; } _struct; } _header; // The data layout has an arbitrary number of cells, each sized // to accomodate a pointer or an integer. intptr_t _cells[1]; // Some types of data layouts need a length field. static bool needs_array_len(u1 tag); public: enum { counter_increment = 1 }; enum { cell_size = sizeof(intptr_t) }; // Tag values enum { no_tag, bit_data_tag, counter_data_tag, jump_data_tag, receiver_type_data_tag, virtual_call_data_tag, ret_data_tag, branch_data_tag, multi_branch_data_tag, arg_info_data_tag }; enum { // The _struct._flags word is formatted as [trap_state:4 | flags:4]. // The trap state breaks down further as [recompile:1 | reason:3]. // This further breakdown is defined in deoptimization.cpp. // See Deoptimization::trap_state_reason for an assert that // trap_bits is big enough to hold reasons < Reason_RECORDED_LIMIT. // // The trap_state is collected only if ProfileTraps is true. trap_bits = 1+3, // 3: enough to distinguish [0..Reason_RECORDED_LIMIT]. trap_shift = BitsPerByte - trap_bits, trap_mask = right_n_bits(trap_bits), trap_mask_in_place = (trap_mask << trap_shift), flag_limit = trap_shift, flag_mask = right_n_bits(flag_limit), first_flag = 0 }; // Size computation static int header_size_in_bytes() { return cell_size; } static int header_size_in_cells() { return 1; } static int compute_size_in_bytes(int cell_count) { return header_size_in_bytes() + cell_count * cell_size; } // Initialization void initialize(u1 tag, u2 bci, int cell_count); // Accessors u1 tag() { return _header._struct._tag; } // Return a few bits of trap state. Range is [0..trap_mask]. // The state tells if traps with zero, one, or many reasons have occurred. // It also tells whether zero or many recompilations have occurred. // The associated trap histogram in the MDO itself tells whether // traps are common or not. If a BCI shows that a trap X has // occurred, and the MDO shows N occurrences of X, we make the // simplifying assumption that all N occurrences can be blamed // on that BCI. int trap_state() { return ((_header._struct._flags >> trap_shift) & trap_mask); } void set_trap_state(int new_state) { assert(ProfileTraps, "used only under +ProfileTraps"); uint old_flags = (_header._struct._flags & flag_mask); _header._struct._flags = (new_state << trap_shift) | old_flags; } u1 flags() { return _header._struct._flags; } u2 bci() { return _header._struct._bci; } void set_header(intptr_t value) { _header._bits = value; } void release_set_header(intptr_t value) { OrderAccess::release_store_ptr(&_header._bits, value); } intptr_t header() { return _header._bits; } void set_cell_at(int index, intptr_t value) { _cells[index] = value; } void release_set_cell_at(int index, intptr_t value) { OrderAccess::release_store_ptr(&_cells[index], value); } intptr_t cell_at(int index) { return _cells[index]; } void set_flag_at(int flag_number) { assert(flag_number < flag_limit, "oob"); _header._struct._flags |= (0x1 << flag_number); } bool flag_at(int flag_number) { assert(flag_number < flag_limit, "oob"); return (_header._struct._flags & (0x1 << flag_number)) != 0; } // Low-level support for code generation. static ByteSize header_offset() { return byte_offset_of(DataLayout, _header); } static ByteSize tag_offset() { return byte_offset_of(DataLayout, _header._struct._tag); } static ByteSize flags_offset() { return byte_offset_of(DataLayout, _header._struct._flags); } static ByteSize bci_offset() { return byte_offset_of(DataLayout, _header._struct._bci); } static ByteSize cell_offset(int index) { return byte_offset_of(DataLayout, _cells) + in_ByteSize(index * cell_size); } // Return a value which, when or-ed as a byte into _flags, sets the flag. static int flag_number_to_byte_constant(int flag_number) { assert(0 <= flag_number && flag_number < flag_limit, "oob"); DataLayout temp; temp.set_header(0); temp.set_flag_at(flag_number); return temp._header._struct._flags; } // Return a value which, when or-ed as a word into _header, sets the flag. static intptr_t flag_mask_to_header_mask(int byte_constant) { DataLayout temp; temp.set_header(0); temp._header._struct._flags = byte_constant; return temp._header._bits; } ProfileData* data_in(); // GC support void clean_weak_klass_links(BoolObjectClosure* cl); }; // ProfileData class hierarchy class ProfileData; class BitData; class CounterData; class ReceiverTypeData; class VirtualCallData; class RetData; class JumpData; class BranchData; class ArrayData; class MultiBranchData; class ArgInfoData; // ProfileData // // A ProfileData object is created to refer to a section of profiling // data in a structured way. class ProfileData : public ResourceObj { private: #ifndef PRODUCT enum { tab_width_one = 16, tab_width_two = 36 }; #endif // !PRODUCT // This is a pointer to a section of profiling data. DataLayout* _data; protected: DataLayout* data() { return _data; } enum { cell_size = DataLayout::cell_size }; public: // How many cells are in this? virtual int cell_count() { ShouldNotReachHere(); return -1; } // Return the size of this data. int size_in_bytes() { return DataLayout::compute_size_in_bytes(cell_count()); } protected: // Low-level accessors for underlying data void set_intptr_at(int index, intptr_t value) { assert(0 <= index && index < cell_count(), "oob"); data()->set_cell_at(index, value); } void release_set_intptr_at(int index, intptr_t value) { assert(0 <= index && index < cell_count(), "oob"); data()->release_set_cell_at(index, value); } intptr_t intptr_at(int index) { assert(0 <= index && index < cell_count(), "oob"); return data()->cell_at(index); } void set_uint_at(int index, uint value) { set_intptr_at(index, (intptr_t) value); } void release_set_uint_at(int index, uint value) { release_set_intptr_at(index, (intptr_t) value); } uint uint_at(int index) { return (uint)intptr_at(index); } void set_int_at(int index, int value) { set_intptr_at(index, (intptr_t) value); } void release_set_int_at(int index, int value) { release_set_intptr_at(index, (intptr_t) value); } int int_at(int index) { return (int)intptr_at(index); } int int_at_unchecked(int index) { return (int)data()->cell_at(index); } void set_oop_at(int index, oop value) { set_intptr_at(index, (intptr_t) value); } oop oop_at(int index) { return (oop)intptr_at(index); } void set_flag_at(int flag_number) { data()->set_flag_at(flag_number); } bool flag_at(int flag_number) { return data()->flag_at(flag_number); } // two convenient imports for use by subclasses: static ByteSize cell_offset(int index) { return DataLayout::cell_offset(index); } static int flag_number_to_byte_constant(int flag_number) { return DataLayout::flag_number_to_byte_constant(flag_number); } ProfileData(DataLayout* data) { _data = data; } public: // Constructor for invalid ProfileData. ProfileData(); u2 bci() { return data()->bci(); } address dp() { return (address)_data; } int trap_state() { return data()->trap_state(); } void set_trap_state(int new_state) { data()->set_trap_state(new_state); } // Type checking virtual bool is_BitData() { return false; } virtual bool is_CounterData() { return false; } virtual bool is_JumpData() { return false; } virtual bool is_ReceiverTypeData(){ return false; } virtual bool is_VirtualCallData() { return false; } virtual bool is_RetData() { return false; } virtual bool is_BranchData() { return false; } virtual bool is_ArrayData() { return false; } virtual bool is_MultiBranchData() { return false; } virtual bool is_ArgInfoData() { return false; } BitData* as_BitData() { assert(is_BitData(), "wrong type"); return is_BitData() ? (BitData*) this : NULL; } CounterData* as_CounterData() { assert(is_CounterData(), "wrong type"); return is_CounterData() ? (CounterData*) this : NULL; } JumpData* as_JumpData() { assert(is_JumpData(), "wrong type"); return is_JumpData() ? (JumpData*) this : NULL; } ReceiverTypeData* as_ReceiverTypeData() { assert(is_ReceiverTypeData(), "wrong type"); return is_ReceiverTypeData() ? (ReceiverTypeData*)this : NULL; } VirtualCallData* as_VirtualCallData() { assert(is_VirtualCallData(), "wrong type"); return is_VirtualCallData() ? (VirtualCallData*)this : NULL; } RetData* as_RetData() { assert(is_RetData(), "wrong type"); return is_RetData() ? (RetData*) this : NULL; } BranchData* as_BranchData() { assert(is_BranchData(), "wrong type"); return is_BranchData() ? (BranchData*) this : NULL; } ArrayData* as_ArrayData() { assert(is_ArrayData(), "wrong type"); return is_ArrayData() ? (ArrayData*) this : NULL; } MultiBranchData* as_MultiBranchData() { assert(is_MultiBranchData(), "wrong type"); return is_MultiBranchData() ? (MultiBranchData*)this : NULL; } ArgInfoData* as_ArgInfoData() { assert(is_ArgInfoData(), "wrong type"); return is_ArgInfoData() ? (ArgInfoData*)this : NULL; } // Subclass specific initialization virtual void post_initialize(BytecodeStream* stream, MethodData* mdo) {} // GC support virtual void clean_weak_klass_links(BoolObjectClosure* is_alive_closure) {} // CI translation: ProfileData can represent both MethodDataOop data // as well as CIMethodData data. This function is provided for translating // an oop in a ProfileData to the ci equivalent. Generally speaking, // most ProfileData don't require any translation, so we provide the null // translation here, and the required translators are in the ci subclasses. virtual void translate_from(ProfileData* data) {} virtual void print_data_on(outputStream* st) { ShouldNotReachHere(); } #ifndef PRODUCT void print_shared(outputStream* st, const char* name); void tab(outputStream* st); #endif }; // BitData // // A BitData holds a flag or two in its header. class BitData : public ProfileData { protected: enum { // null_seen: // saw a null operand (cast/aastore/instanceof) null_seen_flag = DataLayout::first_flag + 0 }; enum { bit_cell_count = 0 }; // no additional data fields needed. public: BitData(DataLayout* layout) : ProfileData(layout) { } virtual bool is_BitData() { return true; } static int static_cell_count() { return bit_cell_count; } virtual int cell_count() { return static_cell_count(); } // Accessor // The null_seen flag bit is specially known to the interpreter. // Consulting it allows the compiler to avoid setting up null_check traps. bool null_seen() { return flag_at(null_seen_flag); } void set_null_seen() { set_flag_at(null_seen_flag); } // Code generation support static int null_seen_byte_constant() { return flag_number_to_byte_constant(null_seen_flag); } static ByteSize bit_data_size() { return cell_offset(bit_cell_count); } #ifndef PRODUCT void print_data_on(outputStream* st); #endif }; // CounterData // // A CounterData corresponds to a simple counter. class CounterData : public BitData { protected: enum { count_off, counter_cell_count }; public: CounterData(DataLayout* layout) : BitData(layout) {} virtual bool is_CounterData() { return true; } static int static_cell_count() { return counter_cell_count; } virtual int cell_count() { return static_cell_count(); } // Direct accessor uint count() { return uint_at(count_off); } // Code generation support static ByteSize count_offset() { return cell_offset(count_off); } static ByteSize counter_data_size() { return cell_offset(counter_cell_count); } void set_count(uint count) { set_uint_at(count_off, count); } #ifndef PRODUCT void print_data_on(outputStream* st); #endif }; // JumpData // // A JumpData is used to access profiling information for a direct // branch. It is a counter, used for counting the number of branches, // plus a data displacement, used for realigning the data pointer to // the corresponding target bci. class JumpData : public ProfileData { protected: enum { taken_off_set, displacement_off_set, jump_cell_count }; void set_displacement(int displacement) { set_int_at(displacement_off_set, displacement); }<|fim▁hole|> assert(layout->tag() == DataLayout::jump_data_tag || layout->tag() == DataLayout::branch_data_tag, "wrong type"); } virtual bool is_JumpData() { return true; } static int static_cell_count() { return jump_cell_count; } virtual int cell_count() { return static_cell_count(); } // Direct accessor uint taken() { return uint_at(taken_off_set); } void set_taken(uint cnt) { set_uint_at(taken_off_set, cnt); } // Saturating counter uint inc_taken() { uint cnt = taken() + 1; // Did we wrap? Will compiler screw us?? if (cnt == 0) cnt--; set_uint_at(taken_off_set, cnt); return cnt; } int displacement() { return int_at(displacement_off_set); } // Code generation support static ByteSize taken_offset() { return cell_offset(taken_off_set); } static ByteSize displacement_offset() { return cell_offset(displacement_off_set); } // Specific initialization. void post_initialize(BytecodeStream* stream, MethodData* mdo); #ifndef PRODUCT void print_data_on(outputStream* st); #endif }; // ReceiverTypeData // // A ReceiverTypeData is used to access profiling information about a // dynamic type check. It consists of a counter which counts the total times // that the check is reached, and a series of (Klass*, count) pairs // which are used to store a type profile for the receiver of the check. class ReceiverTypeData : public CounterData { protected: enum { receiver0_offset = counter_cell_count, count0_offset, receiver_type_row_cell_count = (count0_offset + 1) - receiver0_offset }; public: ReceiverTypeData(DataLayout* layout) : CounterData(layout) { assert(layout->tag() == DataLayout::receiver_type_data_tag || layout->tag() == DataLayout::virtual_call_data_tag, "wrong type"); } virtual bool is_ReceiverTypeData() { return true; } static int static_cell_count() { return counter_cell_count + (uint) TypeProfileWidth * receiver_type_row_cell_count; } virtual int cell_count() { return static_cell_count(); } // Direct accessors static uint row_limit() { return TypeProfileWidth; } static int receiver_cell_index(uint row) { return receiver0_offset + row * receiver_type_row_cell_count; } static int receiver_count_cell_index(uint row) { return count0_offset + row * receiver_type_row_cell_count; } Klass* receiver(uint row) { assert(row < row_limit(), "oob"); Klass* recv = (Klass*)intptr_at(receiver_cell_index(row)); assert(recv == NULL || recv->is_klass(), "wrong type"); return recv; } void set_receiver(uint row, Klass* k) { assert((uint)row < row_limit(), "oob"); set_intptr_at(receiver_cell_index(row), (uintptr_t)k); } uint receiver_count(uint row) { assert(row < row_limit(), "oob"); return uint_at(receiver_count_cell_index(row)); } void set_receiver_count(uint row, uint count) { assert(row < row_limit(), "oob"); set_uint_at(receiver_count_cell_index(row), count); } void clear_row(uint row) { assert(row < row_limit(), "oob"); // Clear total count - indicator of polymorphic call site. // The site may look like as monomorphic after that but // it allow to have more accurate profiling information because // there was execution phase change since klasses were unloaded. // If the site is still polymorphic then MDO will be updated // to reflect it. But it could be the case that the site becomes // only bimorphic. Then keeping total count not 0 will be wrong. // Even if we use monomorphic (when it is not) for compilation // we will only have trap, deoptimization and recompile again // with updated MDO after executing method in Interpreter. // An additional receiver will be recorded in the cleaned row // during next call execution. // // Note: our profiling logic works with empty rows in any slot. // We do sorting a profiling info (ciCallProfile) for compilation. // set_count(0); set_receiver(row, NULL); set_receiver_count(row, 0); } // Code generation support static ByteSize receiver_offset(uint row) { return cell_offset(receiver_cell_index(row)); } static ByteSize receiver_count_offset(uint row) { return cell_offset(receiver_count_cell_index(row)); } static ByteSize receiver_type_data_size() { return cell_offset(static_cell_count()); } // GC support virtual void clean_weak_klass_links(BoolObjectClosure* is_alive_closure); #ifndef PRODUCT void print_receiver_data_on(outputStream* st); void print_data_on(outputStream* st); #endif }; // VirtualCallData // // A VirtualCallData is used to access profiling information about a // virtual call. For now, it has nothing more than a ReceiverTypeData. class VirtualCallData : public ReceiverTypeData { public: VirtualCallData(DataLayout* layout) : ReceiverTypeData(layout) { assert(layout->tag() == DataLayout::virtual_call_data_tag, "wrong type"); } virtual bool is_VirtualCallData() { return true; } static int static_cell_count() { // At this point we could add more profile state, e.g., for arguments. // But for now it's the same size as the base record type. return ReceiverTypeData::static_cell_count(); } virtual int cell_count() { return static_cell_count(); } // Direct accessors static ByteSize virtual_call_data_size() { return cell_offset(static_cell_count()); } #ifndef PRODUCT void print_data_on(outputStream* st); #endif }; // RetData // // A RetData is used to access profiling information for a ret bytecode. // It is composed of a count of the number of times that the ret has // been executed, followed by a series of triples of the form // (bci, count, di) which count the number of times that some bci was the // target of the ret and cache a corresponding data displacement. class RetData : public CounterData { protected: enum { bci0_offset = counter_cell_count, count0_offset, displacement0_offset, ret_row_cell_count = (displacement0_offset + 1) - bci0_offset }; void set_bci(uint row, int bci) { assert((uint)row < row_limit(), "oob"); set_int_at(bci0_offset + row * ret_row_cell_count, bci); } void release_set_bci(uint row, int bci) { assert((uint)row < row_limit(), "oob"); // 'release' when setting the bci acts as a valid flag for other // threads wrt bci_count and bci_displacement. release_set_int_at(bci0_offset + row * ret_row_cell_count, bci); } void set_bci_count(uint row, uint count) { assert((uint)row < row_limit(), "oob"); set_uint_at(count0_offset + row * ret_row_cell_count, count); } void set_bci_displacement(uint row, int disp) { set_int_at(displacement0_offset + row * ret_row_cell_count, disp); } public: RetData(DataLayout* layout) : CounterData(layout) { assert(layout->tag() == DataLayout::ret_data_tag, "wrong type"); } virtual bool is_RetData() { return true; } enum { no_bci = -1 // value of bci when bci1/2 are not in use. }; static int static_cell_count() { return counter_cell_count + (uint) BciProfileWidth * ret_row_cell_count; } virtual int cell_count() { return static_cell_count(); } static uint row_limit() { return BciProfileWidth; } static int bci_cell_index(uint row) { return bci0_offset + row * ret_row_cell_count; } static int bci_count_cell_index(uint row) { return count0_offset + row * ret_row_cell_count; } static int bci_displacement_cell_index(uint row) { return displacement0_offset + row * ret_row_cell_count; } // Direct accessors int bci(uint row) { return int_at(bci_cell_index(row)); } uint bci_count(uint row) { return uint_at(bci_count_cell_index(row)); } int bci_displacement(uint row) { return int_at(bci_displacement_cell_index(row)); } // Interpreter Runtime support address fixup_ret(int return_bci, MethodData* mdo); // Code generation support static ByteSize bci_offset(uint row) { return cell_offset(bci_cell_index(row)); } static ByteSize bci_count_offset(uint row) { return cell_offset(bci_count_cell_index(row)); } static ByteSize bci_displacement_offset(uint row) { return cell_offset(bci_displacement_cell_index(row)); } // Specific initialization. void post_initialize(BytecodeStream* stream, MethodData* mdo); #ifndef PRODUCT void print_data_on(outputStream* st); #endif }; // BranchData // // A BranchData is used to access profiling data for a two-way branch. // It consists of taken and not_taken counts as well as a data displacement // for the taken case. class BranchData : public JumpData { protected: enum { not_taken_off_set = jump_cell_count, branch_cell_count }; void set_displacement(int displacement) { set_int_at(displacement_off_set, displacement); } public: BranchData(DataLayout* layout) : JumpData(layout) { assert(layout->tag() == DataLayout::branch_data_tag, "wrong type"); } virtual bool is_BranchData() { return true; } static int static_cell_count() { return branch_cell_count; } virtual int cell_count() { return static_cell_count(); } // Direct accessor uint not_taken() { return uint_at(not_taken_off_set); } void set_not_taken(uint cnt) { set_uint_at(not_taken_off_set, cnt); } uint inc_not_taken() { uint cnt = not_taken() + 1; // Did we wrap? Will compiler screw us?? if (cnt == 0) cnt--; set_uint_at(not_taken_off_set, cnt); return cnt; } // Code generation support static ByteSize not_taken_offset() { return cell_offset(not_taken_off_set); } static ByteSize branch_data_size() { return cell_offset(branch_cell_count); } // Specific initialization. void post_initialize(BytecodeStream* stream, MethodData* mdo); #ifndef PRODUCT void print_data_on(outputStream* st); #endif }; // ArrayData // // A ArrayData is a base class for accessing profiling data which does // not have a statically known size. It consists of an array length // and an array start. class ArrayData : public ProfileData { protected: friend class DataLayout; enum { array_len_off_set, array_start_off_set }; uint array_uint_at(int index) { int aindex = index + array_start_off_set; return uint_at(aindex); } int array_int_at(int index) { int aindex = index + array_start_off_set; return int_at(aindex); } oop array_oop_at(int index) { int aindex = index + array_start_off_set; return oop_at(aindex); } void array_set_int_at(int index, int value) { int aindex = index + array_start_off_set; set_int_at(aindex, value); } // Code generation support for subclasses. static ByteSize array_element_offset(int index) { return cell_offset(array_start_off_set + index); } public: ArrayData(DataLayout* layout) : ProfileData(layout) {} virtual bool is_ArrayData() { return true; } static int static_cell_count() { return -1; } int array_len() { return int_at_unchecked(array_len_off_set); } virtual int cell_count() { return array_len() + 1; } // Code generation support static ByteSize array_len_offset() { return cell_offset(array_len_off_set); } static ByteSize array_start_offset() { return cell_offset(array_start_off_set); } }; // MultiBranchData // // A MultiBranchData is used to access profiling information for // a multi-way branch (*switch bytecodes). It consists of a series // of (count, displacement) pairs, which count the number of times each // case was taken and specify the data displacment for each branch target. class MultiBranchData : public ArrayData { protected: enum { default_count_off_set, default_disaplacement_off_set, case_array_start }; enum { relative_count_off_set, relative_displacement_off_set, per_case_cell_count }; void set_default_displacement(int displacement) { array_set_int_at(default_disaplacement_off_set, displacement); } void set_displacement_at(int index, int displacement) { array_set_int_at(case_array_start + index * per_case_cell_count + relative_displacement_off_set, displacement); } public: MultiBranchData(DataLayout* layout) : ArrayData(layout) { assert(layout->tag() == DataLayout::multi_branch_data_tag, "wrong type"); } virtual bool is_MultiBranchData() { return true; } static int compute_cell_count(BytecodeStream* stream); int number_of_cases() { int alen = array_len() - 2; // get rid of default case here. assert(alen % per_case_cell_count == 0, "must be even"); return (alen / per_case_cell_count); } uint default_count() { return array_uint_at(default_count_off_set); } int default_displacement() { return array_int_at(default_disaplacement_off_set); } uint count_at(int index) { return array_uint_at(case_array_start + index * per_case_cell_count + relative_count_off_set); } int displacement_at(int index) { return array_int_at(case_array_start + index * per_case_cell_count + relative_displacement_off_set); } // Code generation support static ByteSize default_count_offset() { return array_element_offset(default_count_off_set); } static ByteSize default_displacement_offset() { return array_element_offset(default_disaplacement_off_set); } static ByteSize case_count_offset(int index) { return case_array_offset() + (per_case_size() * index) + relative_count_offset(); } static ByteSize case_array_offset() { return array_element_offset(case_array_start); } static ByteSize per_case_size() { return in_ByteSize(per_case_cell_count) * cell_size; } static ByteSize relative_count_offset() { return in_ByteSize(relative_count_off_set) * cell_size; } static ByteSize relative_displacement_offset() { return in_ByteSize(relative_displacement_off_set) * cell_size; } // Specific initialization. void post_initialize(BytecodeStream* stream, MethodData* mdo); #ifndef PRODUCT void print_data_on(outputStream* st); #endif }; class ArgInfoData : public ArrayData { public: ArgInfoData(DataLayout* layout) : ArrayData(layout) { assert(layout->tag() == DataLayout::arg_info_data_tag, "wrong type"); } virtual bool is_ArgInfoData() { return true; } int number_of_args() { return array_len(); } uint arg_modified(int arg) { return array_uint_at(arg); } void set_arg_modified(int arg, uint val) { array_set_int_at(arg, val); } #ifndef PRODUCT void print_data_on(outputStream* st); #endif }; // MethodData* // // A MethodData* holds information which has been collected about // a method. Its layout looks like this: // // ----------------------------- // | header | // | klass | // ----------------------------- // | method | // | size of the MethodData* | // ----------------------------- // | Data entries... | // | (variable size) | // | | // . . // . . // . . // | | // ----------------------------- // // The data entry area is a heterogeneous array of DataLayouts. Each // DataLayout in the array corresponds to a specific bytecode in the // method. The entries in the array are sorted by the corresponding // bytecode. Access to the data is via resource-allocated ProfileData, // which point to the underlying blocks of DataLayout structures. // // During interpretation, if profiling in enabled, the interpreter // maintains a method data pointer (mdp), which points at the entry // in the array corresponding to the current bci. In the course of // intepretation, when a bytecode is encountered that has profile data // associated with it, the entry pointed to by mdp is updated, then the // mdp is adjusted to point to the next appropriate DataLayout. If mdp // is NULL to begin with, the interpreter assumes that the current method // is not (yet) being profiled. // // In MethodData* parlance, "dp" is a "data pointer", the actual address // of a DataLayout element. A "di" is a "data index", the offset in bytes // from the base of the data entry array. A "displacement" is the byte offset // in certain ProfileData objects that indicate the amount the mdp must be // adjusted in the event of a change in control flow. // class MethodData : public Metadata { friend class VMStructs; private: friend class ProfileData; // Back pointer to the Method* Method* _method; // Size of this oop in bytes int _size; // Cached hint for bci_to_dp and bci_to_data int _hint_di; MethodData(methodHandle method, int size, TRAPS); public: static MethodData* allocate(ClassLoaderData* loader_data, methodHandle method, TRAPS); MethodData() {}; // For ciMethodData bool is_methodData() const volatile { return true; } // Whole-method sticky bits and flags enum { _trap_hist_limit = 17, // decoupled from Deoptimization::Reason_LIMIT _trap_hist_mask = max_jubyte, _extra_data_count = 4 // extra DataLayout headers, for trap history }; // Public flag values private: uint _nof_decompiles; // count of all nmethod removals uint _nof_overflow_recompiles; // recompile count, excluding recomp. bits uint _nof_overflow_traps; // trap count, excluding _trap_hist union { intptr_t _align; u1 _array[_trap_hist_limit]; } _trap_hist; // Support for interprocedural escape analysis, from Thomas Kotzmann. intx _eflags; // flags on escape information intx _arg_local; // bit set of non-escaping arguments intx _arg_stack; // bit set of stack-allocatable arguments intx _arg_returned; // bit set of returned arguments int _creation_mileage; // method mileage at MDO creation // How many invocations has this MDO seen? // These counters are used to determine the exact age of MDO. // We need those because in tiered a method can be concurrently // executed at different levels. InvocationCounter _invocation_counter; // Same for backedges. InvocationCounter _backedge_counter; // Counter values at the time profiling started. int _invocation_counter_start; int _backedge_counter_start; // Number of loops and blocks is computed when compiling the first // time with C1. It is used to determine if method is trivial. short _num_loops; short _num_blocks; // Highest compile level this method has ever seen. u1 _highest_comp_level; // Same for OSR level u1 _highest_osr_comp_level; // Does this method contain anything worth profiling? bool _would_profile; // Size of _data array in bytes. (Excludes header and extra_data fields.) int _data_size; // Beginning of the data entries intptr_t _data[1]; // Helper for size computation static int compute_data_size(BytecodeStream* stream); static int bytecode_cell_count(Bytecodes::Code code); enum { no_profile_data = -1, variable_cell_count = -2 }; // Helper for initialization DataLayout* data_layout_at(int data_index) const { assert(data_index % sizeof(intptr_t) == 0, "unaligned"); return (DataLayout*) (((address)_data) + data_index); } // Initialize an individual data segment. Returns the size of // the segment in bytes. int initialize_data(BytecodeStream* stream, int data_index); // Helper for data_at DataLayout* limit_data_position() const { return (DataLayout*)((address)data_base() + _data_size); } bool out_of_bounds(int data_index) const { return data_index >= data_size(); } // Give each of the data entries a chance to perform specific // data initialization. void post_initialize(BytecodeStream* stream); // hint accessors int hint_di() const { return _hint_di; } void set_hint_di(int di) { assert(!out_of_bounds(di), "hint_di out of bounds"); _hint_di = di; } ProfileData* data_before(int bci) { // avoid SEGV on this edge case if (data_size() == 0) return NULL; int hint = hint_di(); if (data_layout_at(hint)->bci() <= bci) return data_at(hint); return first_data(); } // What is the index of the first data entry? int first_di() const { return 0; } // Find or create an extra ProfileData: ProfileData* bci_to_extra_data(int bci, bool create_if_missing); // return the argument info cell ArgInfoData *arg_info(); public: static int header_size() { return sizeof(MethodData)/wordSize; } // Compute the size of a MethodData* before it is created. static int compute_allocation_size_in_bytes(methodHandle method); static int compute_allocation_size_in_words(methodHandle method); static int compute_extra_data_count(int data_size, int empty_bc_count); // Determine if a given bytecode can have profile information. static bool bytecode_has_profile(Bytecodes::Code code) { return bytecode_cell_count(code) != no_profile_data; } // Perform initialization of a new MethodData* void initialize(methodHandle method); // My size int size_in_bytes() const { return _size; } int size() const { return align_object_size(align_size_up(_size, BytesPerWord)/BytesPerWord); } #if INCLUDE_SERVICES void collect_statistics(KlassSizeStats *sz) const; #endif int creation_mileage() const { return _creation_mileage; } void set_creation_mileage(int x) { _creation_mileage = x; } int invocation_count() { if (invocation_counter()->carry()) { return InvocationCounter::count_limit; } return invocation_counter()->count(); } int backedge_count() { if (backedge_counter()->carry()) { return InvocationCounter::count_limit; } return backedge_counter()->count(); } int invocation_count_start() { if (invocation_counter()->carry()) { return 0; } return _invocation_counter_start; } int backedge_count_start() { if (backedge_counter()->carry()) { return 0; } return _backedge_counter_start; } int invocation_count_delta() { return invocation_count() - invocation_count_start(); } int backedge_count_delta() { return backedge_count() - backedge_count_start(); } void reset_start_counters() { _invocation_counter_start = invocation_count(); _backedge_counter_start = backedge_count(); } InvocationCounter* invocation_counter() { return &_invocation_counter; } InvocationCounter* backedge_counter() { return &_backedge_counter; } void set_would_profile(bool p) { _would_profile = p; } bool would_profile() const { return _would_profile; } int highest_comp_level() { return _highest_comp_level; } void set_highest_comp_level(int level) { _highest_comp_level = level; } int highest_osr_comp_level() { return _highest_osr_comp_level; } void set_highest_osr_comp_level(int level) { _highest_osr_comp_level = level; } int num_loops() const { return _num_loops; } void set_num_loops(int n) { _num_loops = n; } int num_blocks() const { return _num_blocks; } void set_num_blocks(int n) { _num_blocks = n; } bool is_mature() const; // consult mileage and ProfileMaturityPercentage static int mileage_of(Method* m); // Support for interprocedural escape analysis, from Thomas Kotzmann. enum EscapeFlag { estimated = 1 << 0, return_local = 1 << 1, return_allocated = 1 << 2, allocated_escapes = 1 << 3, unknown_modified = 1 << 4 }; intx eflags() { return _eflags; } intx arg_local() { return _arg_local; } intx arg_stack() { return _arg_stack; } intx arg_returned() { return _arg_returned; } uint arg_modified(int a) { ArgInfoData *aid = arg_info(); assert(a >= 0 && a < aid->number_of_args(), "valid argument number"); return aid->arg_modified(a); } void set_eflags(intx v) { _eflags = v; } void set_arg_local(intx v) { _arg_local = v; } void set_arg_stack(intx v) { _arg_stack = v; } void set_arg_returned(intx v) { _arg_returned = v; } void set_arg_modified(int a, uint v) { ArgInfoData *aid = arg_info(); assert(a >= 0 && a < aid->number_of_args(), "valid argument number"); aid->set_arg_modified(a, v); } void clear_escape_info() { _eflags = _arg_local = _arg_stack = _arg_returned = 0; } // Location and size of data area address data_base() const { return (address) _data; } int data_size() const { return _data_size; } // Accessors Method* method() const { return _method; } // Get the data at an arbitrary (sort of) data index. ProfileData* data_at(int data_index) const; // Walk through the data in order. ProfileData* first_data() const { return data_at(first_di()); } ProfileData* next_data(ProfileData* current) const; bool is_valid(ProfileData* current) const { return current != NULL; } // Convert a dp (data pointer) to a di (data index). int dp_to_di(address dp) const { return dp - ((address)_data); } address di_to_dp(int di) { return (address)data_layout_at(di); } // bci to di/dp conversion. address bci_to_dp(int bci); int bci_to_di(int bci) { return dp_to_di(bci_to_dp(bci)); } // Get the data at an arbitrary bci, or NULL if there is none. ProfileData* bci_to_data(int bci); // Same, but try to create an extra_data record if one is needed: ProfileData* allocate_bci_to_data(int bci) { ProfileData* data = bci_to_data(bci); return (data != NULL) ? data : bci_to_extra_data(bci, true); } // Add a handful of extra data records, for trap tracking. DataLayout* extra_data_base() const { return limit_data_position(); } DataLayout* extra_data_limit() const { return (DataLayout*)((address)this + size_in_bytes()); } int extra_data_size() const { return (address)extra_data_limit() - (address)extra_data_base(); } static DataLayout* next_extra(DataLayout* dp) { return (DataLayout*)((address)dp + in_bytes(DataLayout::cell_offset(0))); } // Return (uint)-1 for overflow. uint trap_count(int reason) const { assert((uint)reason < _trap_hist_limit, "oob"); return (int)((_trap_hist._array[reason]+1) & _trap_hist_mask) - 1; } // For loops: static uint trap_reason_limit() { return _trap_hist_limit; } static uint trap_count_limit() { return _trap_hist_mask; } uint inc_trap_count(int reason) { // Count another trap, anywhere in this method. assert(reason >= 0, "must be single trap"); if ((uint)reason < _trap_hist_limit) { uint cnt1 = 1 + _trap_hist._array[reason]; if ((cnt1 & _trap_hist_mask) != 0) { // if no counter overflow... _trap_hist._array[reason] = cnt1; return cnt1; } else { return _trap_hist_mask + (++_nof_overflow_traps); } } else { // Could not represent the count in the histogram. return (++_nof_overflow_traps); } } uint overflow_trap_count() const { return _nof_overflow_traps; } uint overflow_recompile_count() const { return _nof_overflow_recompiles; } void inc_overflow_recompile_count() { _nof_overflow_recompiles += 1; } uint decompile_count() const { return _nof_decompiles; } void inc_decompile_count() { _nof_decompiles += 1; if (decompile_count() > (uint)PerMethodRecompilationCutoff) { method()->set_not_compilable(CompLevel_full_optimization, true, "decompile_count > PerMethodRecompilationCutoff"); } } // Support for code generation static ByteSize data_offset() { return byte_offset_of(MethodData, _data[0]); } static ByteSize invocation_counter_offset() { return byte_offset_of(MethodData, _invocation_counter); } static ByteSize backedge_counter_offset() { return byte_offset_of(MethodData, _backedge_counter); } // Deallocation support - no pointer fields to deallocate void deallocate_contents(ClassLoaderData* loader_data) {} // GC support void set_size(int object_size_in_bytes) { _size = object_size_in_bytes; } // Printing #ifndef PRODUCT void print_on (outputStream* st) const; #endif void print_value_on(outputStream* st) const; #ifndef PRODUCT // printing support for method data void print_data_on(outputStream* st) const; #endif const char* internal_name() const { return "{method data}"; } // verification void verify_on(outputStream* st); void verify_data_on(outputStream* st); }; #endif // SHARE_VM_OOPS_METHODDATAOOP_HPP<|fim▁end|>
public: JumpData(DataLayout* layout) : ProfileData(layout) {
<|file_name|>with_primitives.rs<|end_file_name|><|fim▁begin|>// @has with_primitives.json "$.index[*][?(@.name=='WithPrimitives')].visibility" \"public\" // @has - "$.index[*][?(@.name=='WithPrimitives')].kind" \"struct\" // @has - "$.index[*][?(@.name=='WithPrimitives')].inner.generics.params[0].name" \"\'a\" // @has - "$.index[*][?(@.name=='WithPrimitives')].inner.generics.params[0].kind.lifetime.outlives" [] // @has - "$.index[*][?(@.name=='WithPrimitives')].inner.struct_type" \"plain\" // @has - "$.index[*][?(@.name=='WithPrimitives')].inner.fields_stripped" true<|fim▁hole|> s: &'a str, }<|fim▁end|>
pub struct WithPrimitives<'a> { num: u32,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>__version__ = '1.0.1'<|fim▁end|>
from __future__ import unicode_literals from .service import Service # noqa:flake8
<|file_name|>if.py<|end_file_name|><|fim▁begin|>x = int(input()) y = int(input()) print('In this test case x =', x, 'and y =', y) if x >= y: print('(The maximum is x)') theMax = x<|fim▁hole|>else: print('(The maximum is y)') theMax = y print('The maximum is', theMax)<|fim▁end|>
<|file_name|>SDODataObjectGetDataObjectConversionTest.java<|end_file_name|><|fim▁begin|>/******************************************************************************* * Copyright (c) 1998, 2012 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.testing.sdo.model.dataobject; import commonj.sdo.Property; import junit.textui.TestRunner; import org.eclipse.persistence.sdo.SDOConstants; import org.eclipse.persistence.sdo.SDODataObject; import org.eclipse.persistence.sdo.SDOProperty; import org.eclipse.persistence.sdo.SDOType; import org.eclipse.persistence.exceptions.SDOException; public class SDODataObjectGetDataObjectConversionTest extends SDODataObjectConversionTestCases { public SDODataObjectGetDataObjectConversionTest(String name) { super(name); } public static void main(String[] args) { String[] arguments = { "-c", "org.eclipse.persistence.testing.sdo.model.dataobject.SDODataObjectGetDataObjectConversionTest" }; TestRunner.main(arguments); } public void testGetDataObjectConversionFromDefinedProperty() { SDOType dataObjectType = (SDOType) typeHelper.getType(SDOConstants.SDO_URL, SDOConstants.DATAOBJECT); SDOProperty property = ((SDOProperty)dataObject.getInstanceProperty(PROPERTY_NAME)); property.setType(dataObjectType); SDODataObject b = new SDODataObject(); <|fim▁hole|> dataObject.setDataObject(property, b);// add it to instance list this.assertEquals(b, dataObject.getDataObject(property)); } public void testGetDataObjectConversionFromDefinedPropertyWithPath() { SDOType dataObjectType = (SDOType) typeHelper.getType(SDOConstants.SDO_URL, SDOConstants.DATAOBJECT); // dataObject's type add boolean property SDOProperty property = ((SDOProperty)dataObject.getInstanceProperty(PROPERTY_NAME)); property.setType(dataObjectType); SDODataObject b = new SDODataObject(); dataObject.setDataObject(PROPERTY_NAME, b);// add it to instance list this.assertEquals(b, dataObject.getDataObject(property)); } //2. purpose: getDataObject with Undefined Boolean Property public void testGetDataObjectConversionFromUndefinedProperty() { SDOType dataObjectType = (SDOType) typeHelper.getType(SDOConstants.SDO_URL, SDOConstants.DATAOBJECT); SDOProperty property = new SDOProperty(aHelperContext); property.setName(PROPERTY_NAME); property.setType(dataObjectType); try { dataObject.getDataObject(property); fail("IllegalArgumentException should be thrown."); } catch (IllegalArgumentException e) { } } //3. purpose: getDataObject with property set to boolean value public void testGetDataObjectConversionFromProperty() { SDOType dataObjectType = (SDOType) typeHelper.getType(SDOConstants.SDO_URL, SDOConstants.DATAOBJECT); // dataObject's type add boolean property SDOProperty property = ((SDOProperty)dataObject.getInstanceProperty(PROPERTY_NAME)); property.setType(dataObjectType); type.setOpen(true); boolean b = true; dataObject.set(property, b);// add it to instance list try { dataObject.getDataObject(property); fail("ClassCastException should be thrown."); } catch (ClassCastException e) { } } //purpose: getDataObject with nul value public void testGetDataObjectConversionWithNullArgument() { try { Property p = null; dataObject.getDataObject(p); fail("IllegalArgumentException should be thrown."); } catch (IllegalArgumentException e) { } } //purpose: getBoolean with Defined Boolean Property public void testGetDataObjectConversionFromPropertyIndex() { SDOType dataObjectType = (SDOType) typeHelper.getType(SDOConstants.SDO_URL, SDOConstants.DATAOBJECT); // dataObject's type add boolean property SDOProperty property = ((SDOProperty)dataObject.getInstanceProperty(PROPERTY_NAME)); property.setType(dataObjectType); type.addDeclaredProperty(property); type.setOpen(true); SDODataObject b = new SDODataObject(); dataObject.setDataObject(PROPERTY_INDEX, b);// add it to instance list this.assertEquals(b, dataObject.getDataObject(PROPERTY_INDEX)); } //purpose: getDouble with nul value public void testGetDataObjectWithInvalidIndex() { try { int p = -1; dataObject.getDataObject(p); } catch (SDOException e) { assertEquals(SDOException.PROPERTY_NOT_FOUND_AT_INDEX ,e.getErrorCode()); return; } fail("an SDOException should have occurred."); } }<|fim▁end|>
<|file_name|>auth.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import datetime from frappe import _ import frappe import frappe.database import frappe.utils from frappe.utils import cint, flt, get_datetime, datetime, date_diff, today import frappe.utils.user from frappe import conf from frappe.sessions import Session, clear_sessions, delete_session from frappe.modules.patch_handler import check_session_stopped from frappe.translate import get_lang_code from frappe.utils.password import check_password, delete_login_failed_cache from frappe.core.doctype.activity_log.activity_log import add_authentication_log from frappe.twofactor import (should_run_2fa, authenticate_for_2factor, confirm_otp_token, get_cached_user_pass) from frappe.website.utils import get_home_page from six.moves.urllib.parse import quote class HTTPRequest: def __init__(self): # Get Environment variables self.domain = frappe.request.host if self.domain and self.domain.startswith('www.'): self.domain = self.domain[4:] if frappe.get_request_header('X-Forwarded-For'): frappe.local.request_ip = (frappe.get_request_header('X-Forwarded-For').split(",")[0]).strip() elif frappe.get_request_header('REMOTE_ADDR'): frappe.local.request_ip = frappe.get_request_header('REMOTE_ADDR') else: frappe.local.request_ip = '127.0.0.1' # language self.set_lang() # load cookies frappe.local.cookie_manager = CookieManager() # set db self.connect() # login frappe.local.login_manager = LoginManager() if frappe.form_dict._lang: lang = get_lang_code(frappe.form_dict._lang) if lang: frappe.local.lang = lang self.validate_csrf_token() # write out latest cookies frappe.local.cookie_manager.init_cookies() # check status check_session_stopped() def validate_csrf_token(self): if frappe.local.request and frappe.local.request.method in ("POST", "PUT", "DELETE"): if not frappe.local.session: return if not frappe.local.session.data.csrf_token \ or frappe.local.session.data.device=="mobile" \ or frappe.conf.get('ignore_csrf', None): # not via boot return csrf_token = frappe.get_request_header("X-Frappe-CSRF-Token") if not csrf_token and "csrf_token" in frappe.local.form_dict: csrf_token = frappe.local.form_dict.csrf_token del frappe.local.form_dict["csrf_token"] if frappe.local.session.data.csrf_token != csrf_token: frappe.local.flags.disable_traceback = True frappe.throw(_("Invalid Request"), frappe.CSRFTokenError)<|fim▁hole|> def get_db_name(self): """get database name from conf""" return conf.db_name def connect(self, ac_name = None): """connect to db, from ac_name or db_name""" frappe.local.db = frappe.database.get_db(user = self.get_db_name(), \ password = getattr(conf, 'db_password', '')) class LoginManager: def __init__(self): self.user = None self.info = None self.full_name = None self.user_type = None if frappe.local.form_dict.get('cmd')=='login' or frappe.local.request.path=="/api/method/login": if self.login()==False: return self.resume = False # run login triggers self.run_trigger('on_session_creation') else: try: self.resume = True self.make_session(resume=True) self.get_user_info() self.set_user_info(resume=True) except AttributeError: self.user = "Guest" self.get_user_info() self.make_session() self.set_user_info() def login(self): # clear cache frappe.clear_cache(user = frappe.form_dict.get('usr')) user, pwd = get_cached_user_pass() self.authenticate(user=user, pwd=pwd) if self.force_user_to_reset_password(): doc = frappe.get_doc("User", self.user) frappe.local.response["redirect_to"] = doc.reset_password(send_email=False, password_expired=True) frappe.local.response["message"] = "Password Reset" return False if should_run_2fa(self.user): authenticate_for_2factor(self.user) if not confirm_otp_token(self): return False self.post_login() def post_login(self): self.run_trigger('on_login') validate_ip_address(self.user) self.validate_hour() self.get_user_info() self.make_session() self.setup_boot_cache() self.set_user_info() def get_user_info(self, resume=False): self.info = frappe.db.get_value("User", self.user, ["user_type", "first_name", "last_name", "user_image"], as_dict=1) self.user_type = self.info.user_type def setup_boot_cache(self): frappe.cache_manager.build_table_count_cache() frappe.cache_manager.build_domain_restriced_doctype_cache() frappe.cache_manager.build_domain_restriced_page_cache() def set_user_info(self, resume=False): # set sid again frappe.local.cookie_manager.init_cookies() self.full_name = " ".join(filter(None, [self.info.first_name, self.info.last_name])) if self.info.user_type=="Website User": frappe.local.cookie_manager.set_cookie("system_user", "no") if not resume: frappe.local.response["message"] = "No App" frappe.local.response["home_page"] = '/' + get_home_page() else: frappe.local.cookie_manager.set_cookie("system_user", "yes") if not resume: frappe.local.response['message'] = 'Logged In' frappe.local.response["home_page"] = "/desk" if not resume: frappe.response["full_name"] = self.full_name # redirect information redirect_to = frappe.cache().hget('redirect_after_login', self.user) if redirect_to: frappe.local.response["redirect_to"] = redirect_to frappe.cache().hdel('redirect_after_login', self.user) frappe.local.cookie_manager.set_cookie("full_name", self.full_name) frappe.local.cookie_manager.set_cookie("user_id", self.user) frappe.local.cookie_manager.set_cookie("user_image", self.info.user_image or "") def make_session(self, resume=False): # start session frappe.local.session_obj = Session(user=self.user, resume=resume, full_name=self.full_name, user_type=self.user_type) # reset user if changed to Guest self.user = frappe.local.session_obj.user frappe.local.session = frappe.local.session_obj.data self.clear_active_sessions() def clear_active_sessions(self): """Clear other sessions of the current user if `deny_multiple_sessions` is not set""" if not (cint(frappe.conf.get("deny_multiple_sessions")) or cint(frappe.db.get_system_setting('deny_multiple_sessions'))): return if frappe.session.user != "Guest": clear_sessions(frappe.session.user, keep_current=True) def authenticate(self, user=None, pwd=None): if not (user and pwd): user, pwd = frappe.form_dict.get('usr'), frappe.form_dict.get('pwd') if not (user and pwd): self.fail(_('Incomplete login details'), user=user) if cint(frappe.db.get_value("System Settings", "System Settings", "allow_login_using_mobile_number")): user = frappe.db.get_value("User", filters={"mobile_no": user}, fieldname="name") or user if cint(frappe.db.get_value("System Settings", "System Settings", "allow_login_using_user_name")): user = frappe.db.get_value("User", filters={"username": user}, fieldname="name") or user self.check_if_enabled(user) if not frappe.form_dict.get('tmp_id'): self.user = self.check_password(user, pwd) else: self.user = user def force_user_to_reset_password(self): if not self.user: return from frappe.core.doctype.user.user import STANDARD_USERS if self.user in STANDARD_USERS: return False reset_pwd_after_days = cint(frappe.db.get_single_value("System Settings", "force_user_to_reset_password")) if reset_pwd_after_days: last_password_reset_date = frappe.db.get_value("User", self.user, "last_password_reset_date") or today() last_pwd_reset_days = date_diff(today(), last_password_reset_date) if last_pwd_reset_days > reset_pwd_after_days: return True def check_if_enabled(self, user): """raise exception if user not enabled""" doc = frappe.get_doc("System Settings") if cint(doc.allow_consecutive_login_attempts) > 0: check_consecutive_login_attempts(user, doc) if user=='Administrator': return if not cint(frappe.db.get_value('User', user, 'enabled')): self.fail('User disabled or missing', user=user) def check_password(self, user, pwd): """check password""" try: # returns user in correct case return check_password(user, pwd) except frappe.AuthenticationError: self.update_invalid_login(user) self.fail('Incorrect password', user=user) def fail(self, message, user=None): if not user: user = _('Unknown User') frappe.local.response['message'] = message add_authentication_log(message, user, status="Failed") frappe.db.commit() raise frappe.AuthenticationError def update_invalid_login(self, user): last_login_tried = get_last_tried_login_data(user) failed_count = 0 if last_login_tried > get_datetime(): failed_count = get_login_failed_count(user) frappe.cache().hset('login_failed_count', user, failed_count + 1) def run_trigger(self, event='on_login'): for method in frappe.get_hooks().get(event, []): frappe.call(frappe.get_attr(method), login_manager=self) def validate_hour(self): """check if user is logging in during restricted hours""" login_before = int(frappe.db.get_value('User', self.user, 'login_before', ignore=True) or 0) login_after = int(frappe.db.get_value('User', self.user, 'login_after', ignore=True) or 0) if not (login_before or login_after): return from frappe.utils import now_datetime current_hour = int(now_datetime().strftime('%H')) if login_before and current_hour > login_before: frappe.throw(_("Login not allowed at this time"), frappe.AuthenticationError) if login_after and current_hour < login_after: frappe.throw(_("Login not allowed at this time"), frappe.AuthenticationError) def login_as_guest(self): """login as guest""" self.login_as("Guest") def login_as(self, user): self.user = user self.post_login() def logout(self, arg='', user=None): if not user: user = frappe.session.user self.run_trigger('on_logout') if user == frappe.session.user: delete_session(frappe.session.sid, user=user, reason="User Manually Logged Out") self.clear_cookies() else: clear_sessions(user) def clear_cookies(self): clear_cookies() class CookieManager: def __init__(self): self.cookies = {} self.to_delete = [] def init_cookies(self): if not frappe.local.session.get('sid'): return # sid expires in 3 days expires = datetime.datetime.now() + datetime.timedelta(days=3) if frappe.session.sid: self.set_cookie("sid", frappe.session.sid, expires=expires, httponly=True) if frappe.session.session_country: self.set_cookie("country", frappe.session.session_country) def set_cookie(self, key, value, expires=None, secure=False, httponly=False, samesite="Lax"): if not secure and hasattr(frappe.local, 'request'): secure = frappe.local.request.scheme == "https" # Cordova does not work with Lax if frappe.local.session.data.device == "mobile": samesite = None self.cookies[key] = { "value": value, "expires": expires, "secure": secure, "httponly": httponly, "samesite": samesite } def delete_cookie(self, to_delete): if not isinstance(to_delete, (list, tuple)): to_delete = [to_delete] self.to_delete.extend(to_delete) def flush_cookies(self, response): for key, opts in self.cookies.items(): response.set_cookie(key, quote((opts.get("value") or "").encode('utf-8')), expires=opts.get("expires"), secure=opts.get("secure"), httponly=opts.get("httponly"), samesite=opts.get("samesite")) # expires yesterday! expires = datetime.datetime.now() + datetime.timedelta(days=-1) for key in set(self.to_delete): response.set_cookie(key, "", expires=expires) @frappe.whitelist() def get_logged_user(): return frappe.session.user def clear_cookies(): if hasattr(frappe.local, "session"): frappe.session.sid = "" frappe.local.cookie_manager.delete_cookie(["full_name", "user_id", "sid", "user_image", "system_user"]) def get_last_tried_login_data(user, get_last_login=False): locked_account_time = frappe.cache().hget('locked_account_time', user) if get_last_login and locked_account_time: return locked_account_time last_login_tried = frappe.cache().hget('last_login_tried', user) if not last_login_tried or last_login_tried < get_datetime(): last_login_tried = get_datetime() + datetime.timedelta(seconds=60) frappe.cache().hset('last_login_tried', user, last_login_tried) return last_login_tried def get_login_failed_count(user): return cint(frappe.cache().hget('login_failed_count', user)) or 0 def check_consecutive_login_attempts(user, doc): login_failed_count = get_login_failed_count(user) last_login_tried = (get_last_tried_login_data(user, True) + datetime.timedelta(seconds=doc.allow_login_after_fail)) if login_failed_count >= cint(doc.allow_consecutive_login_attempts): locked_account_time = frappe.cache().hget('locked_account_time', user) if not locked_account_time: frappe.cache().hset('locked_account_time', user, get_datetime()) if last_login_tried > get_datetime(): frappe.throw(_("Your account has been locked and will resume after {0} seconds") .format(doc.allow_login_after_fail), frappe.SecurityException) else: delete_login_failed_cache(user) def validate_ip_address(user): """check if IP Address is valid""" user = frappe.get_cached_doc("User", user) if not frappe.flags.in_test else frappe.get_doc("User", user) ip_list = user.get_restricted_ip_list() if not ip_list: return system_settings = frappe.get_cached_doc("System Settings") if not frappe.flags.in_test else frappe.get_single("System Settings") # check if bypass restrict ip is enabled for all users bypass_restrict_ip_check = system_settings.bypass_restrict_ip_check_if_2fa_enabled # check if two factor auth is enabled if system_settings.enable_two_factor_auth and not bypass_restrict_ip_check: # check if bypass restrict ip is enabled for login user bypass_restrict_ip_check = user.bypass_restrict_ip_check_if_2fa_enabled for ip in ip_list: if frappe.local.request_ip.startswith(ip) or bypass_restrict_ip_check: return frappe.throw(_("Access not allowed from this IP Address"), frappe.AuthenticationError)<|fim▁end|>
def set_lang(self): from frappe.translate import guess_language frappe.local.lang = guess_language()
<|file_name|>test_client.py<|end_file_name|><|fim▁begin|>from time import time import unittest from unittest.mock import patch, Mock from urllib.error import URLError from suds import WebFault from ..exceptions import ( ConnectError, ServiceError, ApiLimitError, AccountFault, TableFault, ListFault) from .. import client class InteractClientTests(unittest.TestCase): """ Test InteractClient """ def setUp(self): self.client = Mock() self.configuration = { 'username': 'username', 'password': 'password', 'pod': 'pod', 'client': self.client, } self.interact = client.InteractClient(**self.configuration) def test_starts_disconnected(self): self.assertFalse(self.interact.connected) @patch.object(client, 'time') def test_connected_property_returns_time_of_connection_after_successful_connect(self, mtime): mtime.return_value = connection_time = time() self.interact.connect() self.assertEqual(self.interact.connected, connection_time) @patch.object(client, 'time') @patch.object(client.InteractClient, 'login') def test_session_property_returns_session_id_and_start_after_successful_connect( self, login, mtime): mtime.return_value = session_start = time() session_id = "session_id" login.return_value = Mock(session_id=session_id) self.interact.connect() self.assertEqual(self.interact.session, (session_id, session_start)) @patch.object(client.InteractClient, 'login') def test_connect_reuses_session_if_possible_and_does_not_login( self, login): self.interact.session = "session_id" self.interact.connect() self.assertFalse(login.called) @patch.object(client.InteractClient, 'login') def test_connect_gets_new_session_if_session_is_expired(self, login): self.interact.connect() self.interact.disconnect() self.interact.session_lifetime = -1 self.interact.connect() self.assertEqual(login.call_count, 2) def test_connected_property_returns_false_after_disconnect(self): self.interact.disconnect() self.assertFalse(self.interact.connected) def test_client_property_returns_configured_client(self): self.assertEqual(self.interact.client, self.client) def test_call_method_calls_soap_method_with_passed_arguments(self): self.interact.call('somemethod', 'arg') self.client.service.somemethod.assert_called_with('arg') def test_call_method_returns_soap_method_return_value(self): self.client.service.bananas.return_value = 1 self.assertEqual(self.interact.call('bananas'), 1) def test_call_method_raises_ConnectError_for_url_timeout(self): self.client.service.rm_rf.side_effect = URLError('Timeout') with self.assertRaises(ConnectError): self.interact.call('rm_rf', '/.') def test_call_method_raises_ServiceError_for_unhandled_webfault(self): self.client.service.rm_rf.side_effect = WebFault(Mock(), Mock()) with self.assertRaises(ServiceError): self.interact.call('rm_rf', '/.') def test_call_method_raises_ListFault_for_list_fault_exception_from_service(self): self.client.service.list_thing.side_effect = WebFault( Mock(faultstring='ListFault'), Mock()) with self.assertRaises(ListFault): self.interact.call('list_thing') def test_call_method_raises_ApiLimitError_for_rate_limit_exception_from_service(self): self.client.service.rm_rf.side_effect = WebFault( Mock(faultstring='API_LIMIT_EXCEEDED'), Mock()) with self.assertRaises(ApiLimitError): self.interact.call('rm_rf', '/.') def test_call_method_raises_TableFault_for_table_fault_exception_from_service(self): self.client.service.give_me_a_table.side_effect = WebFault( Mock(faultstring='TableFault'), Mock()) with self.assertRaises(TableFault): self.interact.call('give_me_a_table', 'awesome_table') @patch.object(client.InteractClient, 'WSDLS', {'pod': 'pod_wsdl'}) def test_wsdl_property_returns_correct_value(self): self.assertEqual(self.interact.wsdl, 'pod_wsdl') @patch.object(client.InteractClient, 'ENDPOINTS', {'pod': 'pod_endpoint'}) def test_endpoint_property_returns_correct_value(self): self.assertEqual(self.interact.endpoint, 'pod_endpoint') @patch.object(client.InteractClient, 'connect', Mock()) def test_entering_context_calls_connect(self): self.assertFalse(self.interact.connect.called) with self.interact: self.assertTrue(self.interact.connect.called) @patch.object(client.InteractClient, 'disconnect', Mock()) def test_leaving_context_calls_disconnect(self): with self.interact: self.assertFalse(self.interact.disconnect.called) self.assertTrue(self.interact.disconnect.called)<|fim▁hole|> @patch.object(client.InteractClient, 'login') def test_connect_method_raises_account_fault_on_credential_failure(self, login): login.side_effect = AccountFault with self.assertRaises(AccountFault): self.interact.connect() @patch.object(client.InteractClient, 'login', Mock(return_value=Mock(sessionId=1))) def test_connect_method_returns_true_on_success(self): self.assertTrue(self.interact.connect()) def test_connect_method_sets_soapheaders(self): soapheaders = Mock() self.interact.client.factory.create.return_value = soapheaders self.interact.connect() self.interact.client.set_options.assert_called_once_with(soapheaders=soapheaders) @patch.object(client.InteractClient, 'login') @patch.object(client.InteractClient, 'logout') def test_connect_abandons_session_if_session_is_expired(self, logout, login): self.interact.session_lifetime = -1 self.interact.session = session_id = '1234' self.interact.connect() logout.assert_called_once_with() self.assertNotEqual(self.interact.session[0], session_id) @patch.object(client.InteractClient, 'logout') def test_disconnect_does_not_logout_if_session_is_available(self, logout): self.session = 'session_id' self.interact.disconnect() self.assertEqual(logout.call_count, 0) @patch.object(client.InteractClient, 'logout') def test_disconnect_calls_logout_if_session_is_expired(self, logout): self.interact.session = 'session_id' self.interact.session_lifetime = -1 self.interact.disconnect() self.assertEqual(logout.call_count, 1) self.assertIsNone(self.interact.session) @patch.object(client.InteractClient, 'logout') def test_disconnect_calls_logout_if_abandon_session_is_passed(self, logout): self.interact.connect() self.interact.disconnect(abandon_session=True) self.assertEqual(logout.call_count, 1) self.assertIsNone(self.interact.session)<|fim▁end|>
<|file_name|>client.py<|end_file_name|><|fim▁begin|>try: from urllib.parse import quote, urljoin except ImportError: from urllib import quote from urlparse import urljoin import requests class BandsintownError(Exception): def __init__(self, message, response=None): self.message = message self.response = response def __str__(self): return self.message class BandsintownInvalidAppIdError(BandsintownError): pass class BandsintownInvalidDateFormatError(BandsintownError): pass class Client(object): api_base_url = 'https://rest.bandsintown.com' def __init__(self, app_id): """ Args: app_id: Required app id, can be any string """ self.app_id = app_id self.default_params = {'app_id': self.app_id} def request(self, path, params={}): """ Executes a request to the Bandsintown API and returns the response object from `requests` Args: path: The API path to append to the base API URL for the request params: Optional dict to tack on query string parameters to request Returns: Response object from `requests` """ url = urljoin(self.api_base_url, path) request_params = self.default_params.copy() request_params.update(params) response = requests.get( url, headers={'Accept': 'application/json'}, params=request_params ) data = response.json() if 'message' in data and data['message'] == 'Missing required request parameters: [app_id]': message = 'Missing required API key, which must be a single string argument to Client instantiation, e.g.: client = Client("my-app-id")' raise BandsintownInvalidAppIdError(message, response) else: return data def artists(self, artistname): """ Searches for a single artist using this endpoint: https://app.swaggerhub.com/apis/Bandsintown/PublicAPI/3.0.0#/single_artist_information/artist Args: artistname: Artist name to search for Returns: A dict of artist data when the artist is found, and returns None when not found Usage: client = Client(app_id='my-app-id') client.artists('Bad Religion') """ try: return self.request('artists/%s' % quote(artistname)) except ValueError: # Currently the API's response when the artist doesn't exist is # badly formed JSON. In such a case, we're catching the exception # and returning None return None def artists_events(self, artistname, date=None): """ Searches for events for a single artist, with an optional date range, using this endpoint: https://app.swaggerhub.com/apis/Bandsintown/PublicAPI/3.0.0#/upcoming_artist_events/artistEvents Args: artistname: Artist name to search for date: Optional date string filter, can be a specific date in the format: "yyyy-mm-dd", a range "yyyy-mm-dd,yyyy-mm-dd", or can be a few keyword values like "upcoming" or "all" Returns: A list of event data, which could be empty, None if artist not found, raises `BandsintownInvalidDateFormatError` for bad `date` param, or raises `BandsintownError` for other unknown error Usage: client = Client(app_id=1234) client.artists_events('Bad Religion') client.artists_events('Bad Religion', date='2018-02-01,2018-02-28')<|fim▁hole|> params = {} if date: params['date'] = date data = self.request('artists/%s/events' % quote(artistname), params) if 'errors' in data: if data['errors'][0] == 'Invalid date format': raise BandsintownInvalidDateFormatError( 'Invalid date parameter: "%s", must be in the format: "yyyy-mm-dd", or "yyyy-mm-dd,yyyy-mm-dd" for a range, or keywords "upcoming" or "all"' % date ) elif data['errors'][0] == 'Unknown Artist': return None else: raise BandsintownError('Unknown error with request', data) return data<|fim▁end|>
"""
<|file_name|>x-tag-no-polyfills.js<|end_file_name|><|fim▁begin|>(function () { /*** Variables ***/ var win = window, doc = document, attrProto = { setAttribute: Element.prototype.setAttribute, removeAttribute: Element.prototype.removeAttribute }, hasShadow = Element.prototype.createShadowRoot, container = doc.createElement('div'), noop = function(){}, trueop = function(){ return true; }, regexReplaceCommas = /,/g, regexCamelToDash = /([a-z])([A-Z])/g, regexPseudoParens = /\(|\)/g, regexPseudoCapture = /:(\w+)\u276A(.+?(?=\u276B))|:(\w+)/g, regexDigits = /(\d+)/g, keypseudo = { action: function (pseudo, event) { return pseudo.value.match(regexDigits).indexOf(String(event.keyCode)) > -1 == (pseudo.name == 'keypass') || null; } }, /* - The prefix object generated here is added to the xtag object as xtag.prefix later in the code - Prefix provides a variety of prefix variations for the browser in which your code is running - The 4 variations of prefix are as follows: * prefix.dom: the correct prefix case and form when used on DOM elements/style properties * prefix.lowercase: a lowercase version of the prefix for use in various user-code situations * prefix.css: the lowercase, dashed version of the prefix * prefix.js: addresses prefixed APIs present in global and non-Element contexts */ prefix = (function () { var styles = win.getComputedStyle(doc.documentElement, ''), pre = (Array.prototype.slice .call(styles) .join('') .match(/-(moz|webkit|ms)-/) || (styles.OLink === '' && ['', 'o']) )[1]; return { dom: pre == 'ms' ? 'MS' : pre, lowercase: pre, css: '-' + pre + '-', js: pre == 'ms' ? pre : pre[0].toUpperCase() + pre.substr(1) }; })(), matchSelector = Element.prototype.matches || Element.prototype.matchesSelector || Element.prototype[prefix.lowercase + 'MatchesSelector']; /*** Functions ***/ // Utilities /* This is an enhanced typeof check for all types of objects. Where typeof would normaly return 'object' for many common DOM objects (like NodeLists and HTMLCollections). - For example: typeOf(document.children) will correctly return 'htmlcollection' */ var typeCache = {}, typeString = typeCache.toString, typeRegexp = /\s([a-zA-Z]+)/; function typeOf(obj) { var type = typeString.call(obj); return typeCache[type] || (typeCache[type] = type.match(typeRegexp)[1].toLowerCase()); } function clone(item, type){ var fn = clone[type || typeOf(item)]; return fn ? fn(item) : item; } clone.object = function(src){ var obj = {}; for (var key in src) obj[key] = clone(src[key]); return obj; }; clone.array = function(src){ var i = src.length, array = new Array(i); while (i--) array[i] = clone(src[i]); return array; }; /* The toArray() method allows for conversion of any object to a true array. For types that cannot be converted to an array, the method returns a 1 item array containing the passed-in object. */ var unsliceable = { 'undefined': 1, 'null': 1, 'number': 1, 'boolean': 1, 'string': 1, 'function': 1 }; function toArray(obj){ return unsliceable[typeOf(obj)] ? [obj] : Array.prototype.slice.call(obj, 0); } // DOM var str = ''; function query(element, selector){ return (selector || str).length ? toArray(element.querySelectorAll(selector)) : []; } // Pseudos function parsePseudo(fn){fn();} // Mixins function mergeOne(source, key, current){ var type = typeOf(current); if (type == 'object' && typeOf(source[key]) == 'object') xtag.merge(source[key], current); else source[key] = clone(current, type); return source; } function mergeMixin(tag, original, mixin, name) { var key, keys = {}; for (var z in original) keys[z.split(':')[0]] = z; for (z in mixin) { key = keys[z.split(':')[0]]; if (typeof original[key] == 'function') { if (!key.match(':mixins')) { original[key + ':mixins'] = original[key]; delete original[key]; key = key + ':mixins'; } original[key].__mixin__ = xtag.applyPseudos(z + (z.match(':mixins') ? '' : ':mixins'), mixin[z], tag.pseudos, original[key].__mixin__); } else { original[z] = mixin[z]; delete original[key]; } } } var uniqueMixinCount = 0; function addMixin(tag, original, mixin){ for (var z in mixin){ original[z + ':__mixin__(' + (uniqueMixinCount++) + ')'] = xtag.applyPseudos(z, mixin[z], tag.pseudos); } } function resolveMixins(mixins, output){ var index = mixins.length; while (index--){ output.unshift(mixins[index]); if (xtag.mixins[mixins[index]].mixins) resolveMixins(xtag.mixins[mixins[index]].mixins, output); } return output; } function applyMixins(tag) { resolveMixins(tag.mixins, []).forEach(function(name){ var mixin = xtag.mixins[name]; for (var type in mixin) { var item = mixin[type], original = tag[type]; if (!original) tag[type] = item; else { switch (type){ case 'mixins': break; case 'events': addMixin(tag, original, item); break; case 'accessors': case 'prototype': for (var z in item) { if (!original[z]) original[z] = item[z]; else mergeMixin(tag, original[z], item[z], name); } break; default: mergeMixin(tag, original, item, name); } } } }); return tag; } // Events function delegateAction(pseudo, event) { var match, target = event.target, root = event.currentTarget; while (!match && target && target != root) { if (target.tagName && matchSelector.call(target, pseudo.value)) match = target; target = target.parentNode; } if (!match && root.tagName && matchSelector.call(root, pseudo.value)) match = root; return match ? pseudo.listener = pseudo.listener.bind(match) : null; } function touchFilter(event){ return event.button === 0; } function writeProperty(key, event, base, desc){ if (desc) event[key] = base[key]; else Object.defineProperty(event, key, { writable: true, enumerable: true, value: base[key] }); } var skipProps = {}; for (var z in doc.createEvent('CustomEvent')) skipProps[z] = 1; function inheritEvent(event, base){ var desc = Object.getOwnPropertyDescriptor(event, 'target'); for (var z in base) { if (!skipProps[z]) writeProperty(z, event, base, desc); } event.baseEvent = base; } // Accessors function modAttr(element, attr, name, value, method){ attrProto[method].call(element, name, attr && attr.boolean ? '' : value); } function syncAttr(element, attr, name, value, method){ if (attr && (attr.property || attr.selector)) { var nodes = attr.property ? [element.xtag[attr.property]] : attr.selector ? xtag.query(element, attr.selector) : [], index = nodes.length; while (index--) nodes[index][method](name, value); } } function attachProperties(tag, prop, z, accessor, attr, name){ var key = z.split(':'), type = key[0]; if (type == 'get') { key[0] = prop; tag.prototype[prop].get = xtag.applyPseudos(key.join(':'), accessor[z], tag.pseudos, accessor[z]); } else if (type == 'set') { key[0] = prop; var setter = tag.prototype[prop].set = xtag.applyPseudos(key.join(':'), attr ? function(value){ var old, method = 'setAttribute'; if (attr.boolean){ value = !!value; old = this.hasAttribute(name); if (!value) method = 'removeAttribute'; } else { value = attr.validate ? attr.validate.call(this, value) : value; old = this.getAttribute(name); } modAttr(this, attr, name, value, method); accessor[z].call(this, value, old); syncAttr(this, attr, name, value, method); } : accessor[z] ? function(value){ accessor[z].call(this, value); } : null, tag.pseudos, accessor[z]); if (attr) attr.setter = accessor[z]; } else tag.prototype[prop][z] = accessor[z]; } function parseAccessor(tag, prop){ tag.prototype[prop] = {}; var accessor = tag.accessors[prop], attr = accessor.attribute, name; if (attr) { name = attr.name = (attr ? (attr.name || prop.replace(regexCamelToDash, '$1-$2')) : prop).toLowerCase(); attr.key = prop; tag.attributes[name] = attr; } for (var z in accessor) attachProperties(tag, prop, z, accessor, attr, name); if (attr) { if (!tag.prototype[prop].get) { var method = (attr.boolean ? 'has' : 'get') + 'Attribute'; tag.prototype[prop].get = function(){ return this[method](name); }; } if (!tag.prototype[prop].set) tag.prototype[prop].set = function(value){ value = attr.boolean ? !!value : attr.validate ? attr.validate.call(this, value) : value; var method = attr.boolean ? (value ? 'setAttribute' : 'removeAttribute') : 'setAttribute'; modAttr(this, attr, name, value, method); syncAttr(this, attr, name, value, method); }; } } var unwrapComment = /\/\*!?(?:\@preserve)?[ \t]*(?:\r\n|\n)([\s\S]*?)(?:\r\n|\n)\s*\*\//; function parseMultiline(fn){ return typeof fn == 'function' ? unwrapComment.exec(fn.toString())[1] : fn; } /*** X-Tag Object Definition ***/ var xtag = { tags: {}, defaultOptions: { pseudos: [], mixins: [], events: {}, methods: {}, accessors: {}, lifecycle: {}, attributes: {}, 'prototype': { xtag: { get: function(){ return this.__xtag__ ? this.__xtag__ : (this.__xtag__ = { data: {} }); } } } }, register: function (name, options) { var _name; if (typeof name == 'string') _name = name.toLowerCase(); else throw 'First argument must be a Custom Element string name'; xtag.tags[_name] = options || {}; var basePrototype = options.prototype; delete options.prototype; var tag = xtag.tags[_name].compiled = applyMixins(xtag.merge({}, xtag.defaultOptions, options)); var proto = tag.prototype; var lifecycle = tag.lifecycle; for (var z in tag.events) tag.events[z] = xtag.parseEvent(z, tag.events[z]); for (z in lifecycle) lifecycle[z.split(':')[0]] = xtag.applyPseudos(z, lifecycle[z], tag.pseudos, lifecycle[z]); for (z in tag.methods) proto[z.split(':')[0]] = { value: xtag.applyPseudos(z, tag.methods[z], tag.pseudos, tag.methods[z]), enumerable: true }; for (z in tag.accessors) parseAccessor(tag, z); if (tag.shadow) tag.shadow = tag.shadow.nodeName ? tag.shadow : xtag.createFragment(tag.shadow); if (tag.content) tag.content = tag.content.nodeName ? tag.content.innerHTML : parseMultiline(tag.content); var created = lifecycle.created; var finalized = lifecycle.finalized; proto.createdCallback = { enumerable: true, value: function(){ var element = this; if (tag.shadow && hasShadow) this.createShadowRoot().appendChild(tag.shadow.cloneNode(true)); if (tag.content) this.appendChild(document.createElement('div')).outerHTML = tag.content; var output = created ? created.apply(this, arguments) : null; xtag.addEvents(this, tag.events); for (var name in tag.attributes) { var attr = tag.attributes[name], hasAttr = this.hasAttribute(name), hasDefault = attr.def !== undefined; if (hasAttr || attr.boolean || hasDefault) { this[attr.key] = attr.boolean ? hasAttr : !hasAttr && hasDefault ? attr.def : this.getAttribute(name); } } tag.pseudos.forEach(function(obj){ obj.onAdd.call(element, obj); }); this.xtagComponentReady = true; if (finalized) finalized.apply(this, arguments); return output; } }; var inserted = lifecycle.inserted; var removed = lifecycle.removed; if (inserted || removed) { proto.attachedCallback = { value: function(){ if (removed) this.xtag.__parentNode__ = this.parentNode; if (inserted) return inserted.apply(this, arguments); }, enumerable: true }; } if (removed) { proto.detachedCallback = { value: function(){ var args = toArray(arguments); args.unshift(this.xtag.__parentNode__); var output = removed.apply(this, args); delete this.xtag.__parentNode__; return output; }, enumerable: true }; } if (lifecycle.attributeChanged) proto.attributeChangedCallback = { value: lifecycle.attributeChanged, enumerable: true }; proto.setAttribute = { writable: true, enumerable: true, value: function (name, value){ var old; var _name = name.toLowerCase(); var attr = tag.attributes[_name]; if (attr) { old = this.getAttribute(_name); value = attr.boolean ? '' : attr.validate ? attr.validate.call(this, value) : value; } modAttr(this, attr, _name, value, 'setAttribute'); if (attr) { if (attr.setter) attr.setter.call(this, attr.boolean ? true : value, old); syncAttr(this, attr, _name, value, 'setAttribute'); } } }; proto.removeAttribute = { writable: true, enumerable: true, value: function (name){ var _name = name.toLowerCase(); var attr = tag.attributes[_name]; var old = this.hasAttribute(_name); modAttr(this, attr, _name, '', 'removeAttribute'); if (attr) { if (attr.setter) attr.setter.call(this, attr.boolean ? false : undefined, old); syncAttr(this, attr, _name, '', 'removeAttribute'); } } }; var definition = {}; var instance = basePrototype instanceof win.HTMLElement; var extended = tag['extends'] && (definition['extends'] = tag['extends']); if (basePrototype) Object.getOwnPropertyNames(basePrototype).forEach(function(z){ var prop = proto[z]; var desc = instance ? Object.getOwnPropertyDescriptor(basePrototype, z) : basePrototype[z]; if (prop) { for (var y in desc) { if (typeof desc[y] == 'function' && prop[y]) prop[y] = xtag.wrap(desc[y], prop[y]); else prop[y] = desc[y]; } } proto[z] = prop || desc; }); definition['prototype'] = Object.create( extended ? Object.create(doc.createElement(extended).constructor).prototype : win.HTMLElement.prototype, proto ); return doc.registerElement(_name, definition); }, /* Exposed Variables */ mixins: {}, prefix: prefix, captureEvents: { focus: 1, blur: 1, scroll: 1, DOMMouseScroll: 1 }, customEvents: { animationstart: { attach: [prefix.dom + 'AnimationStart'] }, animationend: { attach: [prefix.dom + 'AnimationEnd'] }, transitionend: { attach: [prefix.dom + 'TransitionEnd'] }, move: { attach: ['pointermove'] }, enter: { attach: ['pointerenter'] }, leave: { attach: ['pointerleave'] }, scrollwheel: { attach: ['DOMMouseScroll', 'mousewheel'], condition: function(event){ event.delta = event.wheelDelta ? event.wheelDelta / 40 : Math.round(event.detail / 3.5 * -1); return true; } }, tap: { attach: ['pointerdown', 'pointerup'], condition: function(event, custom){ if (event.type == 'pointerdown') { custom.startX = event.clientX; custom.startY = event.clientY; } else if (event.button === 0 && Math.abs(custom.startX - event.clientX) < 10 && Math.abs(custom.startY - event.clientY) < 10) return true; } }, tapstart: { attach: ['pointerdown'], condition: touchFilter }, tapend: { attach: ['pointerup'], condition: touchFilter }, tapmove: { attach: ['pointerdown'], condition: function(event, custom){ if (event.type == 'pointerdown') { var listener = custom.listener.bind(this); if (!custom.tapmoveListeners) custom.tapmoveListeners = xtag.addEvents(document, { pointermove: listener, pointerup: listener, pointercancel: listener }); } else if (event.type == 'pointerup' || event.type == 'pointercancel') { xtag.removeEvents(document, custom.tapmoveListeners); custom.tapmoveListeners = null; } return true; } }, taphold: { attach: ['pointerdown', 'pointerup'], condition: function(event, custom){ if (event.type == 'pointerdown') { (custom.pointers = custom.pointers || {})[event.pointerId] = setTimeout( xtag.fireEvent.bind(null, this, 'taphold'), custom.duration || 1000 ); } else if (event.type == 'pointerup') { if (custom.pointers) { clearTimeout(custom.pointers[event.pointerId]); delete custom.pointers[event.pointerId]; } } else return true; } } }, pseudos: { __mixin__: {}, mixins: { onCompiled: function(fn, pseudo){ var mixin = pseudo.source && pseudo.source.__mixin__ || pseudo.source; if (mixin) switch (pseudo.value) { case null: case '': case 'before': return function(){ mixin.apply(this, arguments); return fn.apply(this, arguments); }; case 'after': return function(){ var returns = fn.apply(this, arguments); mixin.apply(this, arguments); return returns; }; case 'none': return fn; } else return fn; } }, keypass: keypseudo, keyfail: keypseudo, delegate: { action: delegateAction }, preventable: { action: function (pseudo, event) { return !event.defaultPrevented; } }, duration: { onAdd: function(pseudo){ pseudo.source.duration = Number(pseudo.value); } }, capture: { onCompiled: function(fn, pseudo){ if (pseudo.source) pseudo.source.capture = true; } } }, /* UTILITIES */ clone: clone, typeOf: typeOf, toArray: toArray, wrap: function (original, fn) { return function(){ var output = original.apply(this, arguments); fn.apply(this, arguments); return output; }; }, /* Recursively merges one object with another. The first argument is the destination object, all other objects passed in as arguments are merged from right to left, conflicts are overwritten */ merge: function(source, k, v){ if (typeOf(k) == 'string') return mergeOne(source, k, v); for (var i = 1, l = arguments.length; i < l; i++){ var object = arguments[i]; for (var key in object) mergeOne(source, key, object[key]); } return source; }, /* ----- This should be simplified! ----- Generates a random ID string */ uid: function(){ return Math.random().toString(36).substr(2,10); }, /* DOM */ query: query, skipTransition: function(element, fn, bind){ var prop = prefix.js + 'TransitionProperty'; element.style[prop] = element.style.transitionProperty = 'none'; var callback = fn ? fn.call(bind || element) : null; return xtag.skipFrame(function(){ element.style[prop] = element.style.transitionProperty = ''; if (callback) callback.call(bind || element); }); }, requestFrame: (function(){ var raf = win.requestAnimationFrame || win[prefix.lowercase + 'RequestAnimationFrame'] || function(fn){ return win.setTimeout(fn, 20); }; return function(fn){ return raf(fn); }; })(), cancelFrame: (function(){ var cancel = win.cancelAnimationFrame || win[prefix.lowercase + 'CancelAnimationFrame'] || win.clearTimeout; return function(id){ return cancel(id); }; })(), skipFrame: function(fn){ var id = xtag.requestFrame(function(){ id = xtag.requestFrame(fn); }); return id; }, matchSelector: function (element, selector) { return matchSelector.call(element, selector); }, set: function (element, method, value) { element[method] = value; if (window.CustomElements) CustomElements.upgradeAll(element); }, innerHTML: function(el, html){ xtag.set(el, 'innerHTML', html); }, hasClass: function (element, klass) { return element.className.split(' ').indexOf(klass.trim())>-1; }, addClass: function (element, klass) { var list = element.className.trim().split(' '); klass.trim().split(' ').forEach(function (name) { if (!~list.indexOf(name)) list.push(name); }); element.className = list.join(' ').trim(); return element; }, removeClass: function (element, klass) { var classes = klass.trim().split(' '); element.className = element.className.trim().split(' ').filter(function (name) { return name && !~classes.indexOf(name); }).join(' '); return element; }, toggleClass: function (element, klass) { return xtag[xtag.hasClass(element, klass) ? 'removeClass' : 'addClass'].call(null, element, klass); }, /* Runs a query on only the children of an element */ queryChildren: function (element, selector) { var id = element.id, attr = '#' + (element.id = id || 'x_' + xtag.uid()) + ' > ', parent = element.parentNode || !container.appendChild(element); selector = attr + (selector + '').replace(regexReplaceCommas, ',' + attr); var result = element.parentNode.querySelectorAll(selector); if (!id) element.removeAttribute('id'); if (!parent) container.removeChild(element); return toArray(result); }, /* Creates a document fragment with the content passed in - content can be a string of HTML, an element, or an array/collection of elements */ createFragment: function(content) { var template = document.createElement('template'); if (content) { if (content.nodeName) toArray(arguments).forEach(function(e){ template.content.appendChild(e); }); else template.innerHTML = parseMultiline(content); } return document.importNode(template.content, true); }, /* Removes an element from the DOM for more performant node manipulation. The element is placed back into the DOM at the place it was taken from. */ manipulate: function(element, fn){ var next = element.nextSibling, parent = element.parentNode, returned = fn.call(element) || element; if (next) parent.insertBefore(returned, next); else parent.appendChild(returned); }, /* PSEUDOS */ applyPseudos: function(key, fn, target, source) { var listener = fn, pseudos = {}; if (key.match(':')) { var matches = [], valueFlag = 0; key.replace(regexPseudoParens, function(match){ if (match == '(') return ++valueFlag == 1 ? '\u276A' : '('; return !--valueFlag ? '\u276B' : ')'; }).replace(regexPseudoCapture, function(z, name, value, solo){ matches.push([name || solo, value]); }); var i = matches.length; while (i--) parsePseudo(function(){ var name = matches[i][0], value = matches[i][1]; if (!xtag.pseudos[name]) throw "pseudo not found: " + name + " " + value; value = (value === '' || typeof value == 'undefined') ? null : value; var pseudo = pseudos[i] = Object.create(xtag.pseudos[name]); pseudo.key = key; pseudo.name = name; pseudo.value = value; pseudo['arguments'] = (value || '').split(','); pseudo.action = pseudo.action || trueop; pseudo.source = source; pseudo.onAdd = pseudo.onAdd || noop; pseudo.onRemove = pseudo.onRemove || noop; var original = pseudo.listener = listener; listener = function(){ var output = pseudo.action.apply(this, [pseudo].concat(toArray(arguments))); if (output === null || output === false) return output; output = pseudo.listener.apply(this, arguments); pseudo.listener = original; return output; }; if (!target) pseudo.onAdd.call(fn, pseudo); else target.push(pseudo); }); } for (var z in pseudos) { if (pseudos[z].onCompiled) listener = pseudos[z].onCompiled(listener, pseudos[z]) || listener; } <|fim▁hole|> pseudos.forEach(function(obj){ obj.onRemove.call(target, obj); }); }, /*** Events ***/ parseEvent: function(type, fn) { var pseudos = type.split(':'), key = pseudos.shift(), custom = xtag.customEvents[key], event = xtag.merge({ type: key, stack: noop, condition: trueop, capture: xtag.captureEvents[key], attach: [], _attach: [], pseudos: '', _pseudos: [], onAdd: noop, onRemove: noop }, custom || {}); event.attach = toArray(event.base || event.attach); event.chain = key + (event.pseudos.length ? ':' + event.pseudos : '') + (pseudos.length ? ':' + pseudos.join(':') : ''); var stack = xtag.applyPseudos(event.chain, fn, event._pseudos, event); event.stack = function(e){ e.currentTarget = e.currentTarget || this; var detail = e.detail || {}; if (!detail.__stack__) return stack.apply(this, arguments); else if (detail.__stack__ == stack) { e.stopPropagation(); e.cancelBubble = true; return stack.apply(this, arguments); } }; event.listener = function(e){ var args = toArray(arguments), output = event.condition.apply(this, args.concat([event])); if (!output) return output; // The second condition in this IF is to address the following Blink regression: https://code.google.com/p/chromium/issues/detail?id=367537 // Remove this when affected browser builds with this regression fall below 5% marketshare if (e.type != key && (e.baseEvent && e.type != e.baseEvent.type)) { xtag.fireEvent(e.target, key, { baseEvent: e, detail: output !== true && (output.__stack__ = stack) ? output : { __stack__: stack } }); } else return event.stack.apply(this, args); }; event.attach.forEach(function(name) { event._attach.push(xtag.parseEvent(name, event.listener)); }); return event; }, addEvent: function (element, type, fn, capture) { var event = typeof fn == 'function' ? xtag.parseEvent(type, fn) : fn; event._pseudos.forEach(function(obj){ obj.onAdd.call(element, obj); }); event._attach.forEach(function(obj) { xtag.addEvent(element, obj.type, obj); }); event.onAdd.call(element, event, event.listener); element.addEventListener(event.type, event.stack, capture || event.capture); return event; }, addEvents: function (element, obj) { var events = {}; for (var z in obj) { events[z] = xtag.addEvent(element, z, obj[z]); } return events; }, removeEvent: function (element, type, event) { event = event || type; event.onRemove.call(element, event, event.listener); xtag.removePseudos(element, event._pseudos); event._attach.forEach(function(obj) { xtag.removeEvent(element, obj); }); element.removeEventListener(event.type, event.stack); }, removeEvents: function(element, obj){ for (var z in obj) xtag.removeEvent(element, obj[z]); }, fireEvent: function(element, type, options){ var event = doc.createEvent('CustomEvent'); options = options || {}; event.initCustomEvent(type, options.bubbles !== false, options.cancelable !== false, options.detail ); if (options.baseEvent) inheritEvent(event, options.baseEvent); element.dispatchEvent(event); } }; if (typeof define === 'function' && define.amd) define(xtag); else if (typeof module !== 'undefined' && module.exports) module.exports = xtag; else win.xtag = xtag; doc.addEventListener('WebComponentsReady', function(){ xtag.fireEvent(doc.body, 'DOMComponentsLoaded'); }); })();<|fim▁end|>
return listener; }, removePseudos: function(target, pseudos){
<|file_name|>let-else-non-diverging.rs<|end_file_name|><|fim▁begin|>#![feature(let_else)] fn main() { let Some(x) = Some(1) else { //~ ERROR does not diverge Some(2) }; let Some(x) = Some(1) else { //~ ERROR does not diverge if 1 == 1 { panic!(); } };<|fim▁hole|> let Some(x) = Some(1) else { Some(2) }; //~ ERROR does not diverge }<|fim▁end|>
<|file_name|>update_creative_set.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This code example updates a creative set by adding a companion creative. To determine which creative sets exist, run get_all_creative_sets.py.<|fim▁hole|>The LoadFromStorage method is pulling credentials and properties from a "googleads.yaml" file. By default, it looks for this file in your home directory. For more information, see the "Caching authentication information" section of our README. """ # Import appropriate modules from the client library. from googleads import ad_manager # Set the ID of the creative set to update. CREATIVE_SET_ID = 'INSERT_CREATIVE_SET_ID_HERE' COMPANION_CREATIVE_ID = 'INSERT_COMPANION_CREATIVE_ID_HERE' def main(client, creative_set_id, companion_creative_id): # Initialize appropriate service. creative_set_service = client.GetService('CreativeSetService', version='v202108') # Create statement to select a single creative set by ID. statement = (ad_manager.StatementBuilder(version='v202108') .Where('id = :creativeSetId') .WithBindVariable('creativeSetId', int(creative_set_id))) # Get creative set. response = creative_set_service.getCreativeSetsByStatement( statement.ToStatement()) if 'results' in response and len(response['results']): updated_created_sets = [] for creative_set in response['results']: creative_set['companionCreativeIds'].append(companion_creative_id) updated_created_sets.append(creative_set) # Update the creative sets on the server. creative_sets = creative_set_service.updateCreativeSet(updated_created_sets) # Display results. for creative_set in creative_sets: print(('Creative set with ID "%s", master creative ID "%s", and ' 'companion creative IDs {%s} was updated.') % (creative_set['id'], creative_set['masterCreativeId'], ','.join(creative_set['companionCreativeIds']))) else: print('No creative sets found to update.') if __name__ == '__main__': # Initialize client object. ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage() main(ad_manager_client, CREATIVE_SET_ID, COMPANION_CREATIVE_ID)<|fim▁end|>
<|file_name|>tokens.py<|end_file_name|><|fim▁begin|>from contrib import * import re def tokenize(text): tokens = re.findall('(?u)[\w.-]+',text) tokens = [t for t in tokens if not re.match('[\d.-]+$',t)] #tokens = [t for t in tokens if len(t)>2] # TODO remove stopwords return u' '.join(tokens) ## text = KV('data/text.db',5)<|fim▁hole|> print(k) tokens[k] = tokenize(v.decode('utf8')) tokens.sync()<|fim▁end|>
## tokens = KV('data/tokens.db',5) text = KO('data/text') tokens = KO('data/tokens') for k,v in text.items():
<|file_name|>async_pipe.d.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be<|fim▁hole|> */ import { ChangeDetectorRef, OnDestroy } from '@angular/core'; import { EventEmitter, Observable } from '../facade/async'; /** * @ngModule CommonModule * @whatItDoes Unwraps a value from an asynchronous primitive. * @howToUse `observable_or_promise_expression | async` * @description * The `async` pipe subscribes to an `Observable` or `Promise` and returns the latest value it has * emitted. When a new value is emitted, the `async` pipe marks the component to be checked for * changes. When the component gets destroyed, the `async` pipe unsubscribes automatically to avoid * potential memory leaks. * * * ## Examples * * This example binds a `Promise` to the view. Clicking the `Resolve` button resolves the * promise. * * {@example common/pipes/ts/async_pipe.ts region='AsyncPipePromise'} * * It's also possible to use `async` with Observables. The example below binds the `time` Observable * to the view. The Observable continuesly updates the view with the current time. * * {@example common/pipes/ts/async_pipe.ts region='AsyncPipeObservable'} * * @stable */ export declare class AsyncPipe implements OnDestroy { private _strategy; constructor(_ref: ChangeDetectorRef); ngOnDestroy(): void; transform(obj: Observable<any> | Promise<any> | EventEmitter<any>): any; }<|fim▁end|>
* found in the LICENSE file at https://angular.io/license
<|file_name|>jquery.waypoints.min.js<|end_file_name|><|fim▁begin|>/*! Waypoints - 4.0.0 Copyright © 2011-2015 Caleb Troughton Licensed under the MIT license. https://github.com/imakewebthings/waypoints/blog/master/licenses.txt */ ! function () { "use strict"; function t(o) { if (!o) throw new Error("No options passed to Waypoint constructor"); if (!o.element) throw new Error("No element option passed to Waypoint constructor"); if (!o.handler) throw new Error("No handler option passed to Waypoint constructor"); this.key = "waypoint-" + e, this.options = t.Adapter.extend({}, t.defaults, o), this.element = this.options.element, this.adapter = new t.Adapter(this.element), this.callback = o.handler, this.axis = this.options.horizontal ? "horizontal" : "vertical", this.enabled = this.options.enabled, this.triggerPoint = null, this.group = t.Group.findOrCreate({ name: this.options.group , axis: this.axis }), this.context = t.Context.findOrCreateByElement(this.options.context), t.offsetAliases[this.options.offset] && (this.options.offset = t.offsetAliases[this.options.offset]), this.group.add(this), this.context.add(this), i[this.key] = this, e += 1 } var e = 0 , i = {}; t.prototype.queueTrigger = function (t) { this.group.queueTrigger(this, t) }, t.prototype.trigger = function (t) { this.enabled && this.callback && this.callback.apply(this, t) }, t.prototype.destroy = function () { this.context.remove(this), this.group.remove(this), delete i[this.key] }, t.prototype.disable = function () { return this.enabled = !1, this }, t.prototype.enable = function () { return this.context.refresh(), this.enabled = !0, this }, t.prototype.next = function () { return this.group.next(this) }, t.prototype.previous = function () { return this.group.previous(this) }, t.invokeAll = function (t) { var e = []; for (var o in i) e.push(i[o]); for (var n = 0, r = e.length; r > n; n++) e[n][t]() }, t.destroyAll = function () { t.invokeAll("destroy") }, t.disableAll = function () { t.invokeAll("disable") }, t.enableAll = function () { t.invokeAll("enable") }, t.refreshAll = function () { t.Context.refreshAll() }, t.viewportHeight = function () { return window.innerHeight || document.documentElement.clientHeight }, t.viewportWidth = function () { return document.documentElement.clientWidth }, t.adapters = [], t.defaults = { context: window , continuous: !0 , enabled: !0 , group: "default" , horizontal: !1 , offset: 0 }, t.offsetAliases = { "bottom-in-view": function () { return this.context.innerHeight() - this.adapter.outerHeight() } , "right-in-view": function () { return this.context.innerWidth() - this.adapter.outerWidth() } }, window.Waypoint = t }() , function () { "use strict"; function t(t) { window.setTimeout(t, 1e3 / 60) } function e(t) { this.element = t, this.Adapter = n.Adapter, this.adapter = new this.Adapter(t), this.key = "waypoint-context-" + i, this.didScroll = !1, this.didResize = !1, this.oldScroll = { x: this.adapter.scrollLeft() , y: this.adapter.scrollTop() }, this.waypoints = { vertical: {} , horizontal: {} }, t.waypointContextKey = this.key, o[t.waypointContextKey] = this, i += 1, this.createThrottledScrollHandler(), this.createThrottledResizeHandler() } var i = 0 , o = {} , n = window.Waypoint , r = window.onload; e.prototype.add = function (t) { var e = t.options.horizontal ? "horizontal" : "vertical"; this.waypoints[e][t.key] = t, this.refresh() }, e.prototype.checkEmpty = function () { var t = this.Adapter.isEmptyObject(this.waypoints.horizontal) , e = this.Adapter.isEmptyObject(this.waypoints.vertical); t && e && (this.adapter.off(".waypoints"), delete o[this.key]) }, e.prototype.createThrottledResizeHandler = function () { function t() { e.handleResize(), e.didResize = !1 } var e = this; this.adapter.on("resize.waypoints", function () { e.didResize || (e.didResize = !0, n.requestAnimationFrame(t)) }) }, e.prototype.createThrottledScrollHandler = function () { function t() { e.handleScroll(), e.didScroll = !1 } var e = this; this.adapter.on("scroll.waypoints", function () { (!e.didScroll || n.isTouch) && (e.didScroll = !0, n.requestAnimationFrame(t)) }) }, e.prototype.handleResize = function () { n.Context.refreshAll() }, e.prototype.handleScroll = function () { var t = {} , e = { horizontal: { newScroll: this.adapter.scrollLeft() , oldScroll: this.oldScroll.x , forward: "right" , backward: "left" } , vertical: { newScroll: this.adapter.scrollTop() , oldScroll: this.oldScroll.y , forward: "down" , backward: "up" } }; for (var i in e) { var o = e[i] , n = o.newScroll > o.oldScroll , r = n ? o.forward : o.backward; for (var s in this.waypoints[i]) { var a = this.waypoints[i][s] , l = o.oldScroll < a.triggerPoint , h = o.newScroll >= a.triggerPoint , p = l && h , u = !l && !h; (p || u) && (a.queueTrigger(r), t[a.group.id] = a.group) } } for (var c in t) t[c].flushTriggers(); this.oldScroll = { x: e.horizontal.newScroll , y: e.vertical.newScroll } }, e.prototype.innerHeight = function () { return this.element == this.element.window ? n.viewportHeight() : this.adapter.innerHeight() }, e.prototype.remove = function (t) { delete this.waypoints[t.axis][t.key], this.checkEmpty() }, e.prototype.innerWidth = function () { return this.element == this.element.window ? n.viewportWidth() : this.adapter.innerWidth() }, e.prototype.destroy = function () { var t = []; for (var e in this.waypoints) for (var i in this.waypoints[e]) t.push(this.waypoints[e][i]); for (var o = 0, n = t.length; n > o; o++) t[o].destroy() }, e.prototype.refresh = function () { var t, e = this.element == this.element.window , i = e ? void 0 : this.adapter.offset() , o = {}; this.handleScroll(), t = { horizontal: { contextOffset: e ? 0 : i.left , contextScroll: e ? 0 : this.oldScroll.x , contextDimension: this.innerWidth() , oldScroll: this.oldScroll.x , forward: "right" , backward: "left" , offsetProp: "left" } , vertical: { contextOffset: e ? 0 : i.top , contextScroll: e ? 0 : this.oldScroll.y , contextDimension: this.innerHeight() , oldScroll: this.oldScroll.y , forward: "down" , backward: "up" , offsetProp: "top" } }; for (var r in t) { var s = t[r]; for (var a in this.waypoints[r]) { var l, h, p, u, c, d = this.waypoints[r][a] , f = d.options.offset , w = d.triggerPoint , y = 0 , g = null == w; d.element !== d.element.window && (y = d.adapter.offset()[s.offsetProp]), "function" == typeof f ? f = f.apply(d) : "string" == typeof f && (f = parseFloat(f), d.options.offset.indexOf("%") > -1 && (f = Math.ceil(s.contextDimension * f / 100))), l = s.contextScroll - s.contextOffset, d.triggerPoint = y + l - f, h = w < s.oldScroll, p = d.triggerPoint >= s.oldScroll, u = h && p, c = !h && !p, !g && u ? (d.queueTrigger(s.backward), o[d.group.id] = d.group) : !g && c ? (d.queueTrigger(s.forward), o[d.group.id] = d.group) : g && s.oldScroll >= d.triggerPoint && (d.queueTrigger(s.forward), o[d.group.id] = d.group) } } return n.requestAnimationFrame(function () { for (var t in o) o[t].flushTriggers() }), this }, e.findOrCreateByElement = function (t) { return e.findByElement(t) || new e(t) }, e.refreshAll = function () { for (var t in o) o[t].refresh() }, e.findByElement = function (t) { return o[t.waypointContextKey] }, window.onload = function () { r && r(), e.refreshAll() }, n.requestAnimationFrame = function (e) { var i = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || t; i.call(window, e) }, n.Context = e }() , function () { "use strict"; function t(t, e) { return t.triggerPoint - e.triggerPoint } function e(t, e) { return e.triggerPoint - t.triggerPoint } function i(t) { this.name = t.name, this.axis = t.axis, this.id = this.name + "-" + this.axis, this.waypoints = [], this.clearTriggerQueues(), o[this.axis][this.name] = this } var o = { vertical: {} , horizontal: {} } , n = window.Waypoint; i.prototype.add = function (t) { this.waypoints.push(t) }, i.prototype.clearTriggerQueues = function () { this.triggerQueues = { up: [] , down: [] , left: [] , right: [] } }, i.prototype.flushTriggers = function () { for (var i in this.triggerQueues) { var o = this.triggerQueues[i] , n = "up" === i || "left" === i; o.sort(n ? e : t); for (var r = 0, s = o.length; s > r; r += 1) { var a = o[r]; (a.options.continuous || r === o.length - 1) && a.trigger([i]) } } this.clearTriggerQueues() }, i.prototype.next = function (e) {<|fim▁hole|> , o = i === this.waypoints.length - 1; return o ? null : this.waypoints[i + 1] }, i.prototype.previous = function (e) { this.waypoints.sort(t); var i = n.Adapter.inArray(e, this.waypoints); return i ? this.waypoints[i - 1] : null }, i.prototype.queueTrigger = function (t, e) { this.triggerQueues[e].push(t) }, i.prototype.remove = function (t) { var e = n.Adapter.inArray(t, this.waypoints); e > -1 && this.waypoints.splice(e, 1) }, i.prototype.first = function () { return this.waypoints[0] }, i.prototype.last = function () { return this.waypoints[this.waypoints.length - 1] }, i.findOrCreate = function (t) { return o[t.axis][t.name] || new i(t) }, n.Group = i }() , function () { "use strict"; function t(t) { this.$element = e(t) } var e = window.jQuery , i = window.Waypoint; e.each(["innerHeight", "innerWidth", "off", "offset", "on", "outerHeight", "outerWidth", "scrollLeft", "scrollTop"], function (e, i) { t.prototype[i] = function () { var t = Array.prototype.slice.call(arguments); return this.$element[i].apply(this.$element, t) } }), e.each(["extend", "inArray", "isEmptyObject"], function (i, o) { t[o] = e[o] }), i.adapters.push({ name: "jquery" , Adapter: t }), i.Adapter = t }() , function () { "use strict"; function t(t) { return function () { var i = [] , o = arguments[0]; return t.isFunction(arguments[0]) && (o = t.extend({}, arguments[1]), o.handler = arguments[0]), this.each(function () { var n = t.extend({}, o, { element: this }); "string" == typeof n.context && (n.context = t(this).closest(n.context)[0]), i.push(new e(n)) }), i } } var e = window.Waypoint; window.jQuery && (window.jQuery.fn.waypoint = t(window.jQuery)), window.Zepto && (window.Zepto.fn.waypoint = t(window.Zepto)) }();<|fim▁end|>
this.waypoints.sort(t); var i = n.Adapter.inArray(e, this.waypoints)
<|file_name|>test_connect_combo_selection.py<|end_file_name|><|fim▁begin|>import pytest import numpy as np from qtpy import QtWidgets from echo.core import CallbackProperty from echo.selection import SelectionCallbackProperty, ChoiceSeparator from echo.qt.connect import connect_combo_selection class Example(object): a = SelectionCallbackProperty(default_index=1) b = CallbackProperty() def test_connect_combo_selection(): t = Example() a_prop = getattr(type(t), 'a') a_prop.set_choices(t, [4, 3.5]) a_prop.set_display_func(t, lambda x: 'value: {0}'.format(x)) combo = QtWidgets.QComboBox() c1 = connect_combo_selection(t, 'a', combo) # noqa assert combo.itemText(0) == 'value: 4' assert combo.itemText(1) == 'value: 3.5' assert combo.itemData(0).data == 4 assert combo.itemData(1).data == 3.5 combo.setCurrentIndex(1) assert t.a == 3.5 combo.setCurrentIndex(0) assert t.a == 4 combo.setCurrentIndex(-1) assert t.a is None t.a = 3.5 assert combo.currentIndex() == 1 t.a = 4 assert combo.currentIndex() == 0 with pytest.raises(ValueError) as exc: t.a = 2 assert exc.value.args[0] == 'value 2 is not in valid choices: [4, 3.5]' t.a = None assert combo.currentIndex() == -1 # Changing choices should change Qt combo box. Let's first try with a case # in which there is a matching data value in the new combo box t.a = 3.5 assert combo.currentIndex() == 1 a_prop.set_choices(t, (4, 5, 3.5)) assert combo.count() == 3 assert t.a == 3.5 assert combo.currentIndex() == 2 assert combo.itemText(0) == 'value: 4' assert combo.itemText(1) == 'value: 5' assert combo.itemText(2) == 'value: 3.5' assert combo.itemData(0).data == 4 assert combo.itemData(1).data == 5 assert combo.itemData(2).data == 3.5 # Now we change the choices so that there is no matching data - in this case # the index should change to that given by default_index a_prop.set_choices(t, (4, 5, 6)) assert t.a == 5 assert combo.currentIndex() == 1 assert combo.count() == 3 assert combo.itemText(0) == 'value: 4' assert combo.itemText(1) == 'value: 5' assert combo.itemText(2) == 'value: 6' assert combo.itemData(0).data == 4 assert combo.itemData(1).data == 5 assert combo.itemData(2).data == 6 # Finally, if there are too few choices for the default_index to be valid, # pick the last item in the combo a_prop.set_choices(t, (9,)) assert t.a == 9 assert combo.currentIndex() == 0 assert combo.count() == 1 assert combo.itemText(0) == 'value: 9'<|fim▁hole|> # Now just make sure that ChoiceSeparator works separator = ChoiceSeparator('header') a_prop.set_choices(t, (separator, 1, 2)) assert combo.count() == 3 assert combo.itemText(0) == 'header' assert combo.itemData(0).data is separator # And setting choices to an empty iterable shouldn't cause issues a_prop.set_choices(t, ()) assert combo.count() == 0 # Try including an array in the choices a_prop.set_choices(t, (4, 5, np.array([1, 2, 3]))) def test_connect_combo_selection_invalid(): t = Example() combo = QtWidgets.QComboBox() with pytest.raises(TypeError) as exc: connect_combo_selection(t, 'b', combo) assert exc.value.args[0] == 'connect_combo_selection requires a SelectionCallbackProperty'<|fim▁end|>
assert combo.itemData(0).data == 9
<|file_name|>FWebReportPage.java<|end_file_name|><|fim▁begin|>package org.mo.game.editor.face.apl.logic.report; import org.mo.jfa.common.page.FAbstractFormPage; public class FWebReportPage<|fim▁hole|> extends FAbstractFormPage{ private static final long serialVersionUID = 1L; private String _tempName; public String getTempName(){ return _tempName; } public void setTempName(String tempName){ _tempName = tempName; } }<|fim▁end|>
<|file_name|>test_bayesian_linear_regression.py<|end_file_name|><|fim▁begin|>import unittest import numpy as np from robo.models.bayesian_linear_regression import BayesianLinearRegression class TestBayesianLinearRegression(unittest.TestCase): def setUp(self): self.X = np.random.rand(10, 1) y = self.X * 2 self.y = y[:, 0]<|fim▁hole|> X_test = np.random.rand(10, 1) m, v = self.model.predict(X_test) assert len(m.shape) == 1 assert m.shape[0] == X_test.shape[0] assert len(v.shape) == 1 assert v.shape[0] == X_test.shape[0] np.testing.assert_almost_equal(m, X_test[:, 0] * 2, decimal=2) np.testing.assert_almost_equal(v, np.ones([v.shape[0]]) / 1000., decimal=3) def test_marginal_log_likelihood(self): theta = np.array([np.log(1), np.log(1000)]) mll = self.model.marginal_log_likelihood(theta) def test_negative_mll(self): theta = np.array([np.log(1), np.log(1000)]) mll = self.model.negative_mll(theta) def test_get_incumbent(self): inc, inc_val = self.model.get_incumbent() b = np.argmin(self.y) assert np.all(inc == self.X[b]) assert inc_val == self.y[b] if __name__ == "__main__": unittest.main()<|fim▁end|>
self.model = BayesianLinearRegression(alpha=1, beta=1000) self.model.train(self.X, self.y, do_optimize=False) def test_predict(self):
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use serialize::json; use std::collections::HashMap; use std::f32; use std::f64; use time; use time::Timespec; use postgres::{PostgresConnection, NoSsl}; use postgres::types::array::ArrayBase; use postgres::types::{ToSql, FromSql}; mod array; mod range; fn test_type<T: PartialEq+FromSql+ToSql, S: Str>(sql_type: &str, checks: &[(T, S)]) { let conn = or_fail!(PostgresConnection::connect("postgres://postgres@localhost", &NoSsl)); for &(ref val, ref repr) in checks.iter() { let stmt = or_fail!(conn.prepare(format!("SELECT {:s}::{}", *repr, sql_type)[])); let result = or_fail!(stmt.query([])).next().unwrap().get(0u); assert!(val == &result); let stmt = or_fail!(conn.prepare(format!("SELECT $1::{}", sql_type)[])); let result = or_fail!(stmt.query(&[val])).next().unwrap().get(0u); assert!(val == &result); } } #[test] fn test_bool_params() { test_type("BOOL", [(Some(true), "'t'"), (Some(false), "'f'"), (None, "NULL")]); } #[test] fn test_i8_params() { test_type("\"char\"", [(Some('a' as i8), "'a'"), (None, "NULL")]); } #[test] fn test_name_params() { test_type("NAME", [(Some("hello world".to_string()), "'hello world'"), (Some("イロハニホヘト チリヌルヲ".to_string()), "'イロハニホヘト チリヌルヲ'"), (None, "NULL")]); } #[test] fn test_i16_params() { test_type("SMALLINT", [(Some(15001i16), "15001"), (Some(-15001i16), "-15001"), (None, "NULL")]); } #[test] fn test_i32_params() { test_type("INT", [(Some(2147483548i32), "2147483548"), (Some(-2147483548i32), "-2147483548"), (None, "NULL")]); } #[test] fn test_i64_params() { test_type("BIGINT", [(Some(9223372036854775708i64), "9223372036854775708"), (Some(-9223372036854775708i64), "-9223372036854775708"), (None, "NULL")]); } #[test] fn test_f32_params() { test_type("REAL", [(Some(f32::INFINITY), "'infinity'"), (Some(f32::NEG_INFINITY), "'-infinity'"), (Some(1000.55), "1000.55"), (None, "NULL")]); } #[test] fn test_f64_params() { test_type("DOUBLE PRECISION", [(Some(f64::INFINITY), "'infinity'"), (Some(f64::NEG_INFINITY), "'-infinity'"), (Some(10000.55), "10000.55"), (None, "NULL")]); } #[test] fn test_varchar_params() { test_type("VARCHAR", [(Some("hello world".to_string()), "'hello world'"), (Some("イロハニホヘト チリヌルヲ".to_string()), "'イロハニホヘト チリヌルヲ'"), (None, "NULL")]); } #[test] fn test_text_params() { test_type("TEXT", [(Some("hello world".to_string()), "'hello world'"), (Some("イロハニホヘト チリヌルヲ".to_string()), "'イロハニホヘト チリヌルヲ'"), (None, "NULL")]); } #[test] fn test_bpchar_params() { let conn = or_fail!(PostgresConnection::connect("postgres://postgres@localhost", &NoSsl)); or_fail!(conn.execute("CREATE TEMPORARY TABLE foo ( id SERIAL PRIMARY KEY, b CHAR(5) )", [])); or_fail!(conn.execute("INSERT INTO foo (b) VALUES ($1), ($2), ($3)", &[&Some("12345"), &Some("123"), &None::<&'static str>])); let stmt = or_fail!(conn.prepare("SELECT b FROM foo ORDER BY id")); let res = or_fail!(stmt.query([])); assert_eq!(vec!(Some("12345".to_string()), Some("123 ".to_string()), None), res.map(|row| row.get(0u)).collect()); } #[test] fn test_bytea_params() { test_type("BYTEA", [(Some(vec!(0u8, 1, 2, 3, 254, 255)), "'\\x00010203feff'"), (None, "NULL")]); } #[test] fn test_json_params() { test_type("JSON", [(Some(json::from_str("[10, 11, 12]").unwrap()), "'[10, 11, 12]'"), (Some(json::from_str("{\"f\": \"asd\"}").unwrap()), "'{\"f\": \"asd\"}'"), (None, "NULL")]) } #[test] fn test_tm_params() { fn make_check<'a>(time: &'a str) -> (Option<Timespec>, &'a str) { (Some(time::strptime(time, "'%Y-%m-%d %H:%M:%S.%f'").unwrap().to_timespec()), time) } test_type("TIMESTAMP", [make_check("'1970-01-01 00:00:00.01'"), make_check("'1965-09-25 11:19:33.100314'"), make_check("'2010-02-09 23:11:45.1202'"), (None, "NULL")]); test_type("TIMESTAMP WITH TIME ZONE", [make_check("'1970-01-01 00:00:00.01'"), make_check("'1965-09-25 11:19:33.100314'"), make_check("'2010-02-09 23:11:45.1202'"), (None, "NULL")]); } macro_rules! test_range( ($name:expr, $t:ty, $low:expr, $low_str:expr, $high:expr, $high_str:expr) => ({ let tests = [(Some(range!('(', ')')), "'(,)'".to_string()), (Some(range!('[' $low, ')')), format!("'[{},)'", $low_str)), (Some(range!('(' $low, ')')), format!("'({},)'", $low_str)), (Some(range!('(', $high ']')), format!("'(,{}]'", $high_str)), (Some(range!('(', $high ')')), format!("'(,{})'", $high_str)), (Some(range!('[' $low, $high ']')), format!("'[{},{}]'", $low_str, $high_str)), (Some(range!('[' $low, $high ')')), format!("'[{},{})'", $low_str, $high_str)), (Some(range!('(' $low, $high ']')), format!("'({},{}]'", $low_str, $high_str)), (Some(range!('(' $low, $high ')')), format!("'({},{})'", $low_str, $high_str)), (Some(range!(empty)), "'empty'".to_string()), (None, "NULL".to_string())]; test_type($name, tests); }) ) #[test] fn test_int4range_params() { test_range!("INT4RANGE", i32, 100i32, "100", 200i32, "200") } #[test] fn test_int8range_params() { test_range!("INT8RANGE", i64, 100i64, "100", 200i64, "200") } fn test_timespec_range_params(sql_type: &str) { fn t(time: &str) -> Timespec { time::strptime(time, "%Y-%m-%d").unwrap().to_timespec() } let low = "1970-01-01"; let high = "1980-01-01"; test_range!(sql_type, Timespec, t(low), low, t(high), high); } #[test] fn test_tsrange_params() { test_timespec_range_params("TSRANGE"); } #[test] fn test_tstzrange_params() { test_timespec_range_params("TSTZRANGE"); } macro_rules! test_array_params( ($name:expr, $v1:expr, $s1:expr, $v2:expr, $s2:expr, $v3:expr, $s3:expr) => ({ let tests = [(Some(ArrayBase::from_vec(vec!(Some($v1), Some($v2), None), 1)), format!("'{{{},{},NULL}}'", $s1, $s2).into_string()), (None, "NULL".to_string())]; test_type(format!("{}[]", $name)[], tests); let mut a = ArrayBase::from_vec(vec!(Some($v1), Some($v2)), 0); a.wrap(-1); a.push_move(ArrayBase::from_vec(vec!(None, Some($v3)), 0)); let tests = [(Some(a), format!("'[-1:0][0:1]={{{{{},{}}},{{NULL,{}}}}}'", $s1, $s2, $s3).into_string())]; test_type(format!("{}[][]", $name)[], tests); }) ) #[test] fn test_boolarray_params() { test_array_params!("BOOL", false, "f", true, "t", true, "t"); } #[test] fn test_byteaarray_params() { test_array_params!("BYTEA", vec!(0u8, 1), r#""\\x0001""#, vec!(254u8, 255u8), r#""\\xfeff""#, vec!(10u8, 11u8), r#""\\x0a0b""#); } #[test] fn test_chararray_params() { test_array_params!("\"char\"", 'a' as i8, "a", 'z' as i8, "z", '0' as i8, "0"); } #[test] fn test_namearray_params() { test_array_params!("NAME", "hello".to_string(), "hello", "world".to_string(), "world", "!".to_string(), "!"); } #[test] fn test_int2array_params() { test_array_params!("INT2", 0i16, "0", 1i16, "1", 2i16, "2"); } #[test] fn test_int4array_params() { test_array_params!("INT4", 0i32, "0", 1i32, "1", 2i32, "2"); } #[test] fn test_textarray_params() { test_array_params!("TEXT", "hello".to_string(), "hello", "world".to_string(), "world", "!".to_string(), "!");<|fim▁hole|>#[test] fn test_charnarray_params() { test_array_params!("CHAR(5)", "hello".to_string(), "hello", "world".to_string(), "world", "! ".to_string(), "!"); } #[test] fn test_varchararray_params() { test_array_params!("VARCHAR", "hello".to_string(), "hello", "world".to_string(), "world", "!".to_string(), "!"); } #[test] fn test_int8array_params() { test_array_params!("INT8", 0i64, "0", 1i64, "1", 2i64, "2"); } #[test] fn test_timestamparray_params() { fn make_check<'a>(time: &'a str) -> (Timespec, &'a str) { (time::strptime(time, "%Y-%m-%d %H:%M:%S.%f").unwrap().to_timespec(), time) } let (v1, s1) = make_check("1970-01-01 00:00:00.01"); let (v2, s2) = make_check("1965-09-25 11:19:33.100314"); let (v3, s3) = make_check("2010-02-09 23:11:45.1202"); test_array_params!("TIMESTAMP", v1, s1, v2, s2, v3, s3); test_array_params!("TIMESTAMPTZ", v1, s1, v2, s2, v3, s3); } #[test] fn test_float4array_params() { test_array_params!("FLOAT4", 0f32, "0", 1.5f32, "1.5", 0.009f32, ".009"); } #[test] fn test_float8array_params() { test_array_params!("FLOAT8", 0f64, "0", 1.5f64, "1.5", 0.009f64, ".009"); } #[test] fn test_int4rangearray_params() { test_array_params!("INT4RANGE", range!('(', ')'), "\"(,)\"", range!('[' 10i32, ')'), "\"[10,)\"", range!('(', 10i32 ')'), "\"(,10)\""); } #[test] fn test_tsrangearray_params() { fn make_check<'a>(time: &'a str) -> (Timespec, &'a str) { (time::strptime(time, "%Y-%m-%d").unwrap().to_timespec(), time) } let (v1, s1) = make_check("1970-10-11"); let (v2, s2) = make_check("1990-01-01"); let r1 = range!('(', ')'); let rs1 = "\"(,)\""; let r2 = range!('[' v1, ')'); let rs2 = format!("\"[{},)\"", s1); let r3 = range!('(', v2 ')'); let rs3 = format!("\"(,{})\"", s2); test_array_params!("TSRANGE", r1, rs1, r2, rs2, r3, rs3); test_array_params!("TSTZRANGE", r1, rs1, r2, rs2, r3, rs3); } #[test] fn test_int8rangearray_params() { test_array_params!("INT8RANGE", range!('(', ')'), "\"(,)\"", range!('[' 10i64, ')'), "\"[10,)\"", range!('(', 10i64 ')'), "\"(,10)\""); } #[test] fn test_hstore_params() { macro_rules! make_map( ($($k:expr => $v:expr),+) => ({ let mut map = HashMap::new(); $(map.insert($k, $v);)+ map }) ) test_type("hstore", [(Some(make_map!("a".to_string() => Some("1".to_string()))), "'a=>1'"), (Some(make_map!("hello".to_string() => Some("world!".to_string()), "hola".to_string() => Some("mundo!".to_string()), "what".to_string() => None)), "'hello=>world!,hola=>mundo!,what=>NULL'"), (None, "NULL")]); } fn test_nan_param<T: Float+ToSql+FromSql>(sql_type: &str) { let conn = or_fail!(PostgresConnection::connect("postgres://postgres@localhost", &NoSsl)); let stmt = or_fail!(conn.prepare(format!("SELECT 'NaN'::{}", sql_type)[])); let mut result = or_fail!(stmt.query([])); let val: T = result.next().unwrap().get(0u); assert!(val.is_nan()); let nan: T = Float::nan(); let stmt = or_fail!(conn.prepare(format!("SELECT $1::{}", sql_type)[])); let mut result = or_fail!(stmt.query(&[&nan])); let val: T = result.next().unwrap().get(0u); assert!(val.is_nan()) } #[test] fn test_f32_nan_param() { test_nan_param::<f32>("REAL"); } #[test] fn test_f64_nan_param() { test_nan_param::<f64>("DOUBLE PRECISION"); } #[test] fn test_jsonarray_params() { test_array_params!("JSON", json::from_str("[10, 11, 12]").unwrap(), "\"[10,11,12]\"", json::from_str(r#"{"a": 10, "b": null}"#).unwrap(), r#""{\"a\": 10, \"b\": null}""#, json::from_str(r#"{"a": [10], "b": true}"#).unwrap(), r#""{\"a\": [10], \"b\": true}""#); } #[test] fn test_pg_database_datname() { let conn = or_fail!(PostgresConnection::connect("postgres://postgres@localhost", &NoSsl)); let stmt = or_fail!(conn.prepare("SELECT datname FROM pg_database")); let mut result = or_fail!(stmt.query([])); let next = result.next().unwrap(); or_fail!(next.get_opt::<uint, String>(0)); or_fail!(next.get_opt::<&str, String>("datname")); }<|fim▁end|>
}
<|file_name|>list.js<|end_file_name|><|fim▁begin|>import Ember from 'ember'; const { Controller } = Ember; export default Controller.extend({ columns: [ { propertyName : 'name', template : 'components/ui-table/cell/cell-event', title : 'Name' }, { propertyName : 'starts-at', template : 'components/ui-table/cell/cell-date', title : 'Date' }, { propertyName : 'roles', template : 'components/ui-table/cell/cell-roles', title : 'Roles', disableSorting : true, disableFiltering : true }, { propertyName : 'sessionsByState', template : 'components/ui-table/cell/cell-sessions', title : 'Sessions',<|fim▁hole|> disableFiltering : true }, { propertyName : 'speakers.length', title : 'Speakers', disableSorting : true, disableFiltering : true }, { propertyName : 'tickets', template : 'components/ui-table/cell/cell-tickets', title : 'Tickets', disableSorting : true, disableFiltering : true }, { propertyName : 'url', template : 'components/ui-table/cell/cell-link', title : 'Public URL', disableSorting : true, disableFiltering : true }, { template : 'components/ui-table/cell/cell-buttons', title : 'Action', disableSorting : true, disableFiltering : true } ], actions: { moveToDetails(id) { this.transitionToRoute('events.view', id); }, editEvent(id) { this.transitionToRoute('events.view.edit.basic-details', id); }, openDeleteEventModal(id, name) { this.set('isEventDeleteModalOpen', true); this.set('confirmName', ''); this.set('eventName', name); this.set('eventId', id); }, deleteEvent() { this.set('isLoading', true); this.store.findRecord('event', this.get('eventId'), { backgroundReload: false }).then(function(event) { event.destroyRecord(); }) .then(() => { this.notify.success(this.l10n.t('Event has been deleted successfully.')); }) .catch(()=> { this.notify.error(this.l10n.t('An unexpected error has occurred.')); }) .finally(() => { this.set('isLoading', false); }); this.set('isEventDeleteModalOpen', false); } } });<|fim▁end|>
disableSorting : true,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django.utils.translation import ugettext_lazy as _ from appconf import AppConf trans_app_label = _('Core') class OppsCoreConf(AppConf): DEFAULT_URLS = ('127.0.0.1', 'localhost',) SHORT = 'googl' SHORT_URL = 'googl.short.GooglUrlShort' CHANNEL_CONF = {} VIEWS_LIMIT = None PAGINATE_BY = 10 PAGINATE_SUFFIX = u'' PAGINATE_NOT_APP = [] CHECK_MOBILE = False DOMAIN_MOBILE = u'' PROTOCOL_MOBILE = u'http' ADMIN_RULES = {} RELATED_POSTS_PLACEHOLDER = "---related---" CACHE_PREFIX = 'opps' CACHE_EXPIRE = 300 CACHE_EXPIRE_LIST = 300 CACHE_EXPIRE_DETAIL = 300 RSS_LINK_TEMPLATE = '<a href="{}" class="ir ico ico-rss">RSS</a>' LIST_MODELS = ('Post',) RECOMMENDATION_RANGE_DAYS = 180 SMART_SLUG_ENABLED = True MENU = True MIRROR_CHANNEL = False CONTAINERS_BLACKLIST = ['Entry'] CONTAINERS_SITE_ID = None # default settings for tinymce EDITOR = { 'editor': 'tinymce', 'height': 400, 'js': ('/static/tinymce/tinymce.min.js',), "theme": "modern", "plugins": [ """advlist autolink lists link image charmap print preview hr anchor pagebreak """, "searchreplace wordcount visualblocks visualchars code fullscreen", """insertdatetime media nonbreaking save table contextmenu directionality""", "template paste textcolor opps" ], "toolbar1": """insertfile undo redo | styleselect | bold italic | alignleft aligncenter alignright alignjustify | bullist numlist outdent indent | link image media | print preview | forecolor backcolor | opps""", "image_advtab": True, "templates": [ {"title": 'Related', "content": RELATED_POSTS_PLACEHOLDER}, ], "file_browser_callback": 'CustomFileBrowser', } class Meta: prefix = 'opps' class GrapelliConf(AppConf): ADMIN_TITLE = "Opps CMS Admin" INDEX_DASHBOARD = 'opps.contrib.admin.dashboard.CustomIndexDashboard' class Meta: prefix = 'GRAPPELLI' class AdminConf(AppConf): SHORTCUTS = [ { 'shortcuts': [ { 'url_name': 'admin:articles_post_add', 'title': '+ Notícia', 'class': 'file3', 'help': 'Clique para adicionar uma nova notícia' }, { 'url_name': 'admin:articles_post_changelist', 'title': 'Notícias', 'count': 'opps.contrib.admin.shortcuts.count_posts', 'class': 'file2', 'help': 'Clique para visualisar todas as notícias' }, { 'url_name': 'admin:images_image_add', 'title': '+ Imagem', 'class': 'picture', 'help': 'Clique para adicionar uma nova imagem' }, { 'url_name': 'admin:articles_album_changelist', 'title': 'Álbum', 'count': 'opps.contrib.admin.shortcuts.count_albums', 'class': 'camera', 'help': 'Clique para visualisar todos os álbuns' }, { 'url': '/', 'open_new_window': True, 'help': 'Clique para visualizar a home page do site' }, ] } ] SHORTCUTS_SETTINGS = { 'hide_app_list': True, 'open_new_window': False, } SHORTCUTS_CLASS_MAPPINGS_EXTRA = [ ('blogs_blogpost', 'blog') ]<|fim▁hole|> prefix = 'ADMIN' class StaticSiteMapsConf(AppConf): ROOT_SITEMAP = 'opps.sitemaps.feed.sitemaps' class Meta: prefix = 'staticsitemaps' class HaystackConf(AppConf): CONNECTIONS = { 'default': { 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', } } class Meta: prefix = 'haystack' class ThumborConf(AppConf): SERVER = 'http://localhost:8888' MEDIA_URL = 'http://localhost:8000/media' SECURITY_KEY = '' ARGUMENTS = {} ENABLED = False class Meta: prefix = 'thumbor' class DjangoConf(AppConf): CACHES = {'default': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache'}}<|fim▁end|>
class Meta:
<|file_name|>dbquery.py<|end_file_name|><|fim▁begin|>## This file is part of Invenio. ## Copyright (C) 2008, 2009, 2010, 2011, 2012, 2013 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """ Invenio utilities to run SQL queries. The main API functions are: - run_sql() - run_sql_many() - run_sql_with_limit() but see the others as well. """ __revision__ = "$Id$" # dbquery clients can import these from here: # pylint: disable=W0611 from MySQLdb import Warning, Error, InterfaceError, DataError, \ DatabaseError, OperationalError, IntegrityError, \ InternalError, NotSupportedError, \ ProgrammingError import gc import os import string import time import re from thread import get_ident from flask import current_app from werkzeug.utils import cached_property from invenio.base.globals import cfg from invenio.utils.datastructures import LazyDict from invenio.utils.serializers import serialize_via_marshal, \ deserialize_via_marshal class DBConnect(object): def __call__(self, *args, **kwargs): return self._connect(*args, **kwargs) @cached_property def _connect(self): if cfg['CFG_MISCUTIL_SQL_USE_SQLALCHEMY']: try: import sqlalchemy.pool as pool import MySQLdb as mysqldb mysqldb = pool.manage(mysqldb, use_threadlocal=True) connect = mysqldb.connect except ImportError: cfg['CFG_MISCUTIL_SQL_USE_SQLALCHEMY'] = False from MySQLdb import connect else: from MySQLdb import connect return connect def unlock_all(app): for dbhost in _DB_CONN.keys(): for db in _DB_CONN[dbhost].values(): try: cur = db.cur() cur.execute("UNLOCK TABLES") except: pass return app def _db_conn(): current_app.teardown_appcontext_funcs.append(unlock_all) out = {} out[cfg['CFG_DATABASE_HOST']] = {} out[cfg['CFG_DATABASE_SLAVE']] = {} return out connect = DBConnect() _DB_CONN = LazyDict(_db_conn) class InvenioDbQueryWildcardLimitError(Exception):<|fim▁hole|> def __init__(self, res): """Initialization.""" self.res = res def _db_login(dbhost=None, relogin=0): """Login to the database.""" ## Note: we are using "use_unicode=False", because we want to ## receive strings from MySQL as Python UTF-8 binary string ## objects, not as Python Unicode string objects, as of yet. ## Note: "charset='utf8'" is needed for recent MySQLdb versions ## (such as 1.2.1_p2 and above). For older MySQLdb versions such ## as 1.2.0, an explicit "init_command='SET NAMES utf8'" parameter ## would constitute an equivalent. But we are not bothering with ## older MySQLdb versions here, since we are recommending to ## upgrade to more recent versions anyway. if dbhost is None: dbhost = cfg['CFG_DATABASE_HOST'] if cfg['CFG_MISCUTIL_SQL_USE_SQLALCHEMY']: return connect(host=dbhost, port=int(cfg['CFG_DATABASE_PORT']), db=cfg['CFG_DATABASE_NAME'], user=cfg['CFG_DATABASE_USER'], passwd=cfg['CFG_DATABASE_PASS'], use_unicode=False, charset='utf8') else: thread_ident = (os.getpid(), get_ident()) if relogin: connection = _DB_CONN[dbhost][thread_ident] = connect(host=dbhost, port=int(cfg['CFG_DATABASE_PORT']), db=cfg['CFG_DATABASE_NAME'], user=cfg['CFG_DATABASE_USER'], passwd=cfg['CFG_DATABASE_PASS'], use_unicode=False, charset='utf8') connection.autocommit(True) return connection else: if thread_ident in _DB_CONN[dbhost]: return _DB_CONN[dbhost][thread_ident] else: connection = _DB_CONN[dbhost][thread_ident] = connect(host=dbhost, port=int(cfg['CFG_DATABASE_PORT']), db=cfg['CFG_DATABASE_NAME'], user=cfg['CFG_DATABASE_USER'], passwd=cfg['CFG_DATABASE_PASS'], use_unicode=False, charset='utf8') connection.autocommit(True) return connection def _db_logout(dbhost=None): """Close a connection.""" if dbhost is None: dbhost = cfg['CFG_DATABASE_HOST'] try: del _DB_CONN[dbhost][(os.getpid(), get_ident())] except KeyError: pass def close_connection(dbhost=None): """ Enforce the closing of a connection Highly relevant in multi-processing and multi-threaded modules """ if dbhost is None: dbhost = cfg['CFG_DATABASE_HOST'] try: db = _DB_CONN[dbhost][(os.getpid(), get_ident())] cur = db.cursor() cur.execute("UNLOCK TABLES") db.close() del _DB_CONN[dbhost][(os.getpid(), get_ident())] except KeyError: pass def run_sql(sql, param=None, n=0, with_desc=False, with_dict=False, run_on_slave=False): """Run SQL on the server with PARAM and return result. @param param: tuple of string params to insert in the query (see notes below) @param n: number of tuples in result (0 for unbounded) @param with_desc: if True, will return a DB API 7-tuple describing columns in query. @param with_dict: if True, will return a list of dictionaries composed of column-value pairs @return: If SELECT, SHOW, DESCRIBE statements, return tuples of data, followed by description if parameter with_desc is provided. If SELECT and with_dict=True, return a list of dictionaries composed of column-value pairs, followed by description if parameter with_desc is provided. If INSERT, return last row id. Otherwise return SQL result as provided by database. @note: When the site is closed for maintenance (as governed by the config variable CFG_ACCESS_CONTROL_LEVEL_SITE), do not attempt to run any SQL queries but return empty list immediately. Useful to be able to have the website up while MySQL database is down for maintenance, hot copies, table repairs, etc. @note: In case of problems, exceptions are returned according to the Python DB API 2.0. The client code can import them from this file and catch them. """ if cfg['CFG_ACCESS_CONTROL_LEVEL_SITE'] == 3: # do not connect to the database as the site is closed for maintenance: return [] if param: param = tuple(param) dbhost = cfg['CFG_DATABASE_HOST'] if run_on_slave and cfg['CFG_DATABASE_SLAVE']: dbhost = cfg['CFG_DATABASE_SLAVE'] if 'sql-logger' in cfg.get('CFG_DEVEL_TOOLS', []): log_sql_query(dbhost, sql, param) try: db = _db_login(dbhost) cur = db.cursor() gc.disable() rc = cur.execute(sql, param) gc.enable() except (OperationalError, InterfaceError): # unexpected disconnect, bad malloc error, etc # FIXME: now reconnect is always forced, we may perhaps want to ping() first? try: db = _db_login(dbhost, relogin=1) cur = db.cursor() gc.disable() rc = cur.execute(sql, param) gc.enable() except (OperationalError, InterfaceError): # unexpected disconnect, bad malloc error, etc raise if string.upper(string.split(sql)[0]) in ("SELECT", "SHOW", "DESC", "DESCRIBE"): if n: recset = cur.fetchmany(n) else: recset = cur.fetchall() if with_dict: # return list of dictionaries # let's extract column names keys = [row[0] for row in cur.description] # let's construct a list of dictionaries list_dict_results = [dict(zip(*[keys, values])) for values in recset] if with_desc: return list_dict_results, cur.description else: return list_dict_results else: if with_desc: return recset, cur.description else: return recset else: if string.upper(string.split(sql)[0]) == "INSERT": rc = cur.lastrowid return rc def run_sql_many(query, params, limit=None, run_on_slave=False): """Run SQL on the server with PARAM. This method does executemany and is therefore more efficient than execute but it has sense only with queries that affect state of a database (INSERT, UPDATE). That is why the results just count number of affected rows @param params: tuple of tuple of string params to insert in the query @param limit: query will be executed in parts when number of parameters is greater than limit (each iteration runs at most `limit' parameters) @return: SQL result as provided by database """ if limit is None: limit = cfg['CFG_MISCUTIL_SQL_RUN_SQL_MANY_LIMIT'] dbhost = cfg['CFG_DATABASE_HOST'] if run_on_slave and cfg['CFG_DATABASE_SLAVE']: dbhost = cfg['CFG_DATABASE_SLAVE'] i = 0 r = None while i < len(params): ## make partial query safely (mimicking procedure from run_sql()) try: db = _db_login(dbhost) cur = db.cursor() gc.disable() rc = cur.executemany(query, params[i:i + limit]) gc.enable() except (OperationalError, InterfaceError): try: db = _db_login(dbhost, relogin=1) cur = db.cursor() gc.disable() rc = cur.executemany(query, params[i:i + limit]) gc.enable() except (OperationalError, InterfaceError): raise ## collect its result: if r is None: r = rc else: r += rc i += limit return r def run_sql_with_limit(query, param=None, n=0, with_desc=False, wildcard_limit=0, run_on_slave=False): """This function should be used in some cases, instead of run_sql function, in order to protect the db from queries that might take a log time to respond Ex: search queries like [a-z]+ ; cern*; a->z; The parameters are exactly the ones for run_sql function. In case the query limit is reached, an InvenioDbQueryWildcardLimitError will be raised. """ try: dummy = int(wildcard_limit) except ValueError: raise if wildcard_limit < 1:#no limit on the wildcard queries return run_sql(query, param, n, with_desc, run_on_slave=run_on_slave) safe_query = query + " limit %s" %wildcard_limit res = run_sql(safe_query, param, n, with_desc, run_on_slave=run_on_slave) if len(res) == wildcard_limit: raise InvenioDbQueryWildcardLimitError(res) return res def blob_to_string(ablob): """Return string representation of ABLOB. Useful to treat MySQL BLOBs in the same way for both recent and old MySQLdb versions. """ if ablob: if type(ablob) is str: # BLOB is already a string in MySQLdb 0.9.2 return ablob else: # BLOB is array.array in MySQLdb 1.0.0 and later return ablob.tostring() else: return ablob def log_sql_query(dbhost, sql, param=None): """Log SQL query into prefix/var/log/dbquery.log log file. In order to enable logging of all SQL queries, please uncomment one line in run_sql() above. Useful for fine-level debugging only! """ from flask import current_app from invenio.utils.date import convert_datestruct_to_datetext from invenio.utils.text import indent_text date_of_log = convert_datestruct_to_datetext(time.localtime()) message = date_of_log + '-->\n' message += indent_text('Host:\n' + indent_text(str(dbhost), 2, wrap=True), 2) message += indent_text('Query:\n' + indent_text(str(sql), 2, wrap=True), 2) message += indent_text('Params:\n' + indent_text(str(param), 2, wrap=True), 2) message += '-----------------------------\n\n' try: current_app.logger.info(message) except: pass def get_table_update_time(tablename, run_on_slave=False): """Return update time of TABLENAME. TABLENAME can contain wildcard `%' in which case we return the maximum update time value. """ # Note: in order to work with all of MySQL 4.0, 4.1, 5.0, this # function uses SHOW TABLE STATUS technique with a dirty column # position lookup to return the correct value. (Making use of # Index_Length column that is either of type long (when there are # some indexes defined) or of type None (when there are no indexes # defined, e.g. table is empty). When we shall use solely # MySQL-5.0, we can employ a much cleaner technique of using # SELECT UPDATE_TIME FROM INFORMATION_SCHEMA.TABLES WHERE # table_name='collection'. res = run_sql("SHOW TABLE STATUS LIKE %s", (tablename,), run_on_slave=run_on_slave) update_times = [] # store all update times for row in res: if type(row[10]) is long or \ row[10] is None: # MySQL-4.1 and 5.0 have creation_time in 11th position, # so return next column: update_times.append(str(row[12])) else: # MySQL-4.0 has creation_time in 10th position, which is # of type datetime.datetime or str (depending on the # version of MySQLdb), so return next column: update_times.append(str(row[11])) return max(update_times) def get_table_status_info(tablename, run_on_slave=False): """Return table status information on TABLENAME. Returned is a dict with keys like Name, Rows, Data_length, Max_data_length, etc. If TABLENAME does not exist, return empty dict. """ # Note: again a hack so that it works on all MySQL 4.0, 4.1, 5.0 res = run_sql("SHOW TABLE STATUS LIKE %s", (tablename,), run_on_slave=run_on_slave) table_status_info = {} # store all update times for row in res: if type(row[10]) is long or \ row[10] is None: # MySQL-4.1 and 5.0 have creation time in 11th position: table_status_info['Name'] = row[0] table_status_info['Rows'] = row[4] table_status_info['Data_length'] = row[6] table_status_info['Max_data_length'] = row[8] table_status_info['Create_time'] = row[11] table_status_info['Update_time'] = row[12] else: # MySQL-4.0 has creation_time in 10th position, which is # of type datetime.datetime or str (depending on the # version of MySQLdb): table_status_info['Name'] = row[0] table_status_info['Rows'] = row[3] table_status_info['Data_length'] = row[5] table_status_info['Max_data_length'] = row[7] table_status_info['Create_time'] = row[10] table_status_info['Update_time'] = row[11] return table_status_info def wash_table_column_name(colname): """ Evaluate table-column name to see if it is clean. This function accepts only names containing [a-zA-Z0-9_]. @param colname: The string to be checked @type colname: str @return: colname if test passed @rtype: str @raise Exception: Raises an exception if colname is invalid. """ if re.search('[^\w]', colname): raise Exception('The table column %s is not valid.' % repr(colname)) return colname def real_escape_string(unescaped_string, run_on_slave=False): """ Escapes special characters in the unescaped string for use in a DB query. @param unescaped_string: The string to be escaped @type unescaped_string: str @return: Returns the escaped string @rtype: str """ dbhost = cfg['CFG_DATABASE_HOST'] if run_on_slave and cfg['CFG_DATABASE_SLAVE']: dbhost = cfg['CFG_DATABASE_SLAVE'] connection_object = _db_login(dbhost) escaped_string = connection_object.escape_string(unescaped_string) return escaped_string<|fim▁end|>
"""Exception raised when query limit reached."""
<|file_name|>macros.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2017, All Contributors (see CONTRIBUTORS file) // // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. #[macro_export] macro_rules! builtins { ($file: expr) => { lazy_static! { static ref BUILTIN_FILE: &'static [u8] = include_bytes!($file); static ref BUILTIN_DEFS: Vec<Vec<u8>> = ::pumpkinscript::textparser::programs(*BUILTIN_FILE).unwrap().1; static ref BUILTINS: ::std::collections::BTreeMap<&'static [u8], &'static [u8]> = { let mut map = ::std::collections::BTreeMap::new(); let ref defs : Vec<Vec<u8>> = *BUILTIN_DEFS; for definition in defs { match ::pumpkinscript::binparser::instruction(&definition) { ::pumpkinscript::ParseResult::Done(&[0x81, b':', ref rest..], _) => { let instruction = &definition[0..definition.len() - rest.len() - 2]; map.insert(instruction, rest); }, other => panic!("builtin definition parse error {:?}", other) } } map }; }}; } #[macro_export] macro_rules! handle_builtins { () => { #[inline] fn handle_builtins(&mut self, env: &mut Env<'a>, instruction: &'a [u8], _: EnvId) -> PassResult<'a> { match BUILTINS.get(instruction) { Some(val) => { env.program.push(val); Ok(()) }, None => Err(Error::UnknownInstruction), } } }; } #[macro_export] macro_rules! handle_error { ($env: expr, $err: expr) => { handle_error!($env, $err, Ok(())) }; ($env: expr, $err: expr, $body: expr) => {{ if $env.tracking_errors > 0 { $env.aborting_try.push($err); $body } else { return Err($err) } }}; } #[macro_export] macro_rules! return_unless_instructions_equal { ($instruction: expr, $exp: expr) => { if $instruction != $exp { return Err(Error::UnknownInstruction) } }; } #[macro_export] macro_rules! error_program { ($desc: expr, $details: expr, $code: expr) => {{ let mut error = Vec::new(); write_size_header!($desc, error); error.extend_from_slice($desc); if $details.len() > 0 { write_size!($details.len() + offset_by_size($details.len()), error); } write_size_header!($details, error); error.extend_from_slice($details); error.extend_from_slice($code); Error::ProgramError(error) }} } #[macro_export] macro_rules! error_database { ($err: expr) => {{ let vec = Vec::new(); error_program!( $err.description().as_bytes(), &vec, ERROR_DATABASE ) }} } #[macro_export] macro_rules! error_no_transaction { () => {{ let vec = Vec::new(); error_program!( "No transaction".as_bytes(), &vec, ERROR_NO_TX ) }} } #[macro_export] macro_rules! error_unknown_key { ($key: expr) => {{ error_program!( "Unknown key".as_bytes(), $key, ERROR_UNKNOWN_KEY ) }} } #[macro_export] macro_rules! error_duplicate_key { ($key: expr) => {{ error_program!( "Duplicate key".as_bytes(), $key, ERROR_DUPLICATE_KEY ) }} } #[macro_export] macro_rules! error_decoding { () => {{ let vec = Vec::new(); error_program!( "Decoding error".as_bytes(), &vec, ERROR_DECODING ) }} } #[macro_export] macro_rules! error_empty_stack { () => {{ let vec = Vec::new(); error_program!( "Empty stack".as_bytes(), &vec, ERROR_EMPTY_STACK ) }} } #[macro_export] macro_rules! error_invalid_value { ($value: expr) => {{ error_program!( "Invalid value".as_bytes(), &$value, ERROR_INVALID_VALUE ) }} } #[macro_export] macro_rules! error_no_value { () => {{ let vec = Vec::new(); error_program!( "No value".as_bytes(), &vec, ERROR_NO_VALUE ) }} } #[macro_export] macro_rules! error_unknown_instruction { ($instruction: expr) => { { let (_, w) = binparser::instruction_or_internal_instruction($instruction).unwrap(); let instruction = match str::from_utf8(&w[1..]) { Ok(instruction) => instruction, Err(_) => "Error parsing instruction" }; let desc = format!("Unknown instruction: {}", instruction); let desc_bytes = desc.as_bytes(); error_program!( desc_bytes, $instruction, ERROR_UNKNOWN_INSTRUCTION ) } } } #[macro_export] macro_rules! alloc_slice { ($size: expr, $env: expr) => {{ let slice = $env.alloc($size); if slice.is_err() { return Err(slice.unwrap_err()); } slice.unwrap() }}; } #[macro_export] macro_rules! alloc_and_write { ($bytes: expr, $env: expr) => {{ let slice = alloc_slice!($bytes.len(), $env); slice.copy_from_slice($bytes); slice }}; } // TODO: use or remove? #[allow(unused_macros)] #[cfg(test)] macro_rules! eval { ($script: expr, $env: ident, $expr: expr) => { eval!($script, $env, _result, $expr); }; ($script: expr, $env: ident, $result: ident, $expr: expr) => {{ let (sender, receiver) = mpsc::channel(); eval!($script, $env, $result, sender, receiver, $expr); }}; ($script: expr, $env: ident, $result: ident, $sender: expr, $receiver: ident, $expr: expr) => { { use $crate::script::SchedulerHandle; let dir = TempDir::new("pumpkindb").unwrap(); let path = dir.path().to_str().unwrap(); fs::create_dir_all(path).expect("can't create directory"); let env = unsafe { lmdb::EnvBuilder::new() .expect("can't create env builder") .open(path, lmdb::open::NOTLS, 0o600) .expect("can't open env") }; let db = Arc::new(storage::Storage::new(&env)); crossbeam::scope(|scope| { let mut nvmem = MmapedFile::new_anonymous(20).unwrap(); let region = nvmem.claim(20).unwrap(); let timestamp = Arc::new(timestamp::Timestamp::new(region)); let mut simple = messaging::Simple::new(); let messaging_accessor = simple.accessor(); let publisher_thread = scope.spawn(move || simple.run()); let publisher_clone = messaging_accessor.clone(); let subscriber_clone = messaging_accessor.clone(); let timestamp_clone = timestamp.clone(); let (mut scheduler, sender) = Scheduler::new( dispatcher::StandardDispatcher::new(db.clone(), publisher_clone.clone(), subscriber_clone.clone(), timestamp_clone)); let handle = scope.spawn(move || scheduler.run()); let script = parse($script).unwrap(); let (callback, receiver) = mpsc::channel::<ResponseMessage>(); sender.schedule_env(EnvId::new(), script.clone(), callback, Box::new($sender)); match receiver.recv() { Ok(ResponseMessage::EnvTerminated(_, stack, stack_size)) => { sender.shutdown(); messaging_accessor.shutdown(); let $result = Ok::<(), Error>(()); let mut stack_ = Vec::with_capacity(stack.len()); for i in 0..(&stack).len() { stack_.push((&stack[i]).as_slice()); } let mut $env = Env::new_with_stack(stack_).unwrap(); $expr; } Ok(ResponseMessage::EnvFailed(_, err, stack, stack_size)) => { sender.shutdown(); messaging_accessor.shutdown(); let $result = Err::<(), Error>(err); let stack = stack.unwrap(); let mut stack_ = Vec::with_capacity(stack.len()); for i in 0..(&stack).len() { stack_.push((&stack)[i].as_slice()); } let mut $env = Env::new_with_stack(stack_).unwrap(); $expr; } Err(err) => { sender.shutdown(); messaging_accessor.shutdown(); panic!("recv error: {:?}", err); } } let _ = handle.join(); let _ = publisher_thread.join(); }); }; } } // TODO: use or remove? #[allow(unused_macros)] #[cfg(test)] macro_rules! bench_eval { ($script: expr, $b: expr) => { { use $crate::script::SchedulerHandle; let dir = TempDir::new("pumpkindb").unwrap(); let path = dir.path().to_str().unwrap(); fs::create_dir_all(path).expect("can't create directory"); let env = unsafe { let mut builder = lmdb::EnvBuilder::new().expect("can't create env builder"); builder.set_mapsize(1024 * 1024 * 1024).expect("can't set mapsize"); builder.open(path, lmdb::open::NOTLS, 0o600).expect("can't open env") }; let db = Arc::new(storage::Storage::new(&env)); let cpus = ::num_cpus::get(); crossbeam::scope(|scope| { let mut simple = messaging::Simple::new(); let messaging_accessor = simple.accessor(); let messaging_accessor_ = simple.accessor(); let simple_thread = scope.spawn(move || simple.run()); let mut nvmem = MmapedFile::new_anonymous(20).unwrap(); let region = nvmem.claim(20).unwrap(); let timestamp = Arc::new(timestamp::Timestamp::new(region)); let mut handles = vec![]; let mut senders = vec![]; for i in 0..cpus { let publisher_clone = messaging_accessor.clone(); let subscriber_clone = messaging_accessor.clone(); let timestamp_clone = timestamp.clone(); let (mut scheduler, sender) = Scheduler::new( dispatcher::StandardDispatcher::new(db.clone(), publisher_clone.clone(), subscriber_clone.clone(), timestamp_clone)); let storage = db.clone(); let handle = scope.spawn(move || scheduler.run()); handles.push(handle); senders.push(sender.clone()); } let original_senders = senders.clone(); let script = parse($script).unwrap(); $b.iter(move || { let (callback, receiver) = mpsc::channel::<ResponseMessage>(); let (sender0, _) = mpsc::channel(); let _ = senders.clone().schedule_env(EnvId::new(), script.clone(), callback, Box::new(sender0)); match receiver.recv() { Ok(ResponseMessage::EnvTerminated(_, stack, stack_size)) => (), Ok(ResponseMessage::EnvFailed(_, err, stack, stack_size)) => { senders.shutdown(); messaging_accessor.shutdown(); panic!("error: {:?}", err); } Err(err) => { senders.shutdown(); messaging_accessor.shutdown(); panic!("recv error: {:?}", err); } } }); original_senders.shutdown(); messaging_accessor_.shutdown(); for handle in handles { handle.join(); } simple_thread.join(); }); }; } } // TODO: use or remove? #[allow(unused_macros)] #[cfg(test)] macro_rules! data { ($ptr:expr) => { { let (_, size) = binparser::data_size($ptr).unwrap(); &$ptr[offset_by_size(size)..$ptr.len()] }<|fim▁hole|>// TODO: use or remove? #[allow(unused_macros)] #[cfg(test)] macro_rules! parsed_data { ($s: expr) => { data!(parse($s).unwrap().as_slice()) }; } // TODO: use or remove? #[allow(unused_macros)] #[cfg(test)] macro_rules! assert_error { ($result: expr, $expected: expr) => {{ assert!($result.is_err()); let error = $result.err().unwrap(); assert!(matches!(error, Error::ProgramError(_))); if let Error::ProgramError(inner) = error { assert_eq!(inner, parsed_data!($expected)); } else { } }}; }<|fim▁end|>
}; }
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from admin_views import *<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # This file is part of PrawoKultury, licensed under GNU Affero GPLv3 or later. # Copyright © Fundacja Nowoczesna Polska. See NOTICE for more information. # from datetime import datetime from django.core.mail import send_mail, mail_managers from django.conf import settings from django.contrib.sites.models import Site from django.db import models from django.template.loader import render_to_string from django.utils.translation import ugettext_lazy as _, override import getpaid from migdal.models import Entry from . import app_settings class Offer(models.Model): """ A fundraiser for a particular book. """<|fim▁hole|> entry = models.OneToOneField(Entry, models.CASCADE) # filter publications! price = models.DecimalField(_('price'), decimal_places=2, max_digits=6) cost_const = models.DecimalField(decimal_places=2, max_digits=6) cost_per_item = models.DecimalField(decimal_places=2, max_digits=6, default=0) class Meta: verbose_name = _('offer') verbose_name_plural = _('offers') ordering = ['entry'] def __unicode__(self): return unicode(self.entry) def get_absolute_url(self): return self.entry.get_absolute_url() def total_per_item(self): return self.price + self.cost_per_item def price_per_items(self, items): return self.cost_const + items * self.total_per_item() class Order(models.Model): """ A person paying for a book. The payment was completed if and only if payed_at is set. """ offer = models.ForeignKey(Offer, models.CASCADE, verbose_name=_('offer')) items = models.IntegerField(verbose_name=_('items'), default=1) name = models.CharField(_('name'), max_length=127, blank=True) email = models.EmailField(_('email'), db_index=True) address = models.TextField(_('address'), db_index=True) payed_at = models.DateTimeField(_('payed at'), null=True, blank=True, db_index=True) language_code = models.CharField(max_length = 2, null = True, blank = True) class Meta: verbose_name = _('order') verbose_name_plural = _('orders') ordering = ['-payed_at'] def __unicode__(self): return "%s (%d egz.)" % (unicode(self.offer), self.items) def get_absolute_url(self): return self.offer.get_absolute_url() def amount(self): return self.offer.price_per_items(self.items) def notify(self, subject, template_name, extra_context=None): context = { 'order': self, 'site': Site.objects.get_current(), } if extra_context: context.update(extra_context) with override(self.language_code or app_settings.DEFAULT_LANGUAGE): send_mail(subject, render_to_string(template_name, context), getattr(settings, 'CONTACT_EMAIL', '[email protected]'), [self.email], fail_silently=False ) def notify_managers(self, subject, template_name, extra_context=None): context = { 'order': self, 'site': Site.objects.get_current(), } if extra_context: context.update(extra_context) with override(app_settings.DEFAULT_LANGUAGE): mail_managers(subject, render_to_string(template_name, context)) # Register the Order model with django-getpaid for payments. getpaid.register_to_payment(Order, unique=False, related_name='payment') def new_payment_query_listener(sender, order=None, payment=None, **kwargs): """ Set payment details for getpaid. """ payment.amount = order.amount() payment.currency = 'PLN' getpaid.signals.new_payment_query.connect(new_payment_query_listener) def user_data_query_listener(sender, order, user_data, **kwargs): """ Set user data for payment. """ user_data['email'] = order.email getpaid.signals.user_data_query.connect(user_data_query_listener) def payment_status_changed_listener(sender, instance, old_status, new_status, **kwargs): """ React to status changes from getpaid. """ if old_status != 'paid' and new_status == 'paid': instance.order.payed_at = datetime.now() instance.order.save() instance.order.notify( _('Your payment has been completed.'), 'shop/email/payed.txt' ) instance.order.notify_managers( _('New order has been placed.'), 'shop/email/payed_managers.txt' ) getpaid.signals.payment_status_changed.connect(payment_status_changed_listener, dispatch_uid='shop.models.payment_status_changed_listener')<|fim▁end|>
<|file_name|>test_config.py<|end_file_name|><|fim▁begin|>import codecs import mock import os import tempfile import unittest from time import strftime import six from kinto import config from kinto import __version__ class ConfigTest(unittest.TestCase): def test_transpose_parameters_into_template(self):<|fim▁hole|> config.render_template(template, dest, secret='secret', storage_backend='storage_backend', cache_backend='cache_backend', permission_backend='permission_backend', storage_url='storage_url', cache_url='cache_url', permission_url='permission_url', kinto_version='kinto_version', config_file_timestamp='config_file_timestamp') with codecs.open(dest, 'r', encoding='utf-8') as d: destination_temp = d.read() sample_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "test_configuration/test.ini") with codecs.open(sample_path, 'r', encoding='utf-8') as c: sample = c.read() self.assertEqual(destination_temp, sample) def test_create_destination_directory(self): dest = os.path.join(tempfile.mkdtemp(), 'config', 'kinto.ini') config.render_template("kinto.tpl", dest, secret='secret', storage_backend='storage_backend', cache_backend='cache_backend', permission_backend='permission_backend', storage_url='storage_url', cache_url='cache_url', permission_url='permission_url', kinto_version='kinto_version', config_file_timestamp='config_file_timestamp') self.assertTrue(os.path.exists(dest)) @mock.patch('kinto.config.render_template') def test_hmac_secret_is_text(self, mocked_render_template): config.init('kinto.ini', 'postgresql') args, kwargs = list(mocked_render_template.call_args) self.assertEquals(type(kwargs['secret']), six.text_type) @mock.patch('kinto.config.render_template') def test_init_postgresql_values(self, mocked_render_template): config.init('kinto.ini', 'postgresql') args, kwargs = list(mocked_render_template.call_args) self.assertEquals(args, ('kinto.tpl', 'kinto.ini')) postgresql_url = "postgres://postgres:postgres@localhost/postgres" self.assertDictEqual(kwargs, { 'secret': kwargs['secret'], 'storage_backend': 'kinto.core.storage.postgresql', 'cache_backend': 'kinto.core.cache.postgresql', 'permission_backend': 'kinto.core.permission.postgresql', 'storage_url': postgresql_url, 'cache_url': postgresql_url, 'permission_url': postgresql_url, 'kinto_version': __version__, 'config_file_timestamp': strftime('%a, %d %b %Y %H:%M:%S %z') }) @mock.patch('kinto.config.render_template') def test_init_redis_values(self, mocked_render_template): config.init('kinto.ini', 'redis') args, kwargs = list(mocked_render_template.call_args) self.assertEquals(args, ('kinto.tpl', 'kinto.ini')) redis_url = "redis://localhost:6379" self.maxDiff = None # See the full diff in case of error self.assertDictEqual(kwargs, { 'secret': kwargs['secret'], 'storage_backend': 'kinto_redis.storage', 'cache_backend': 'kinto_redis.cache', 'permission_backend': 'kinto_redis.permission', 'storage_url': redis_url + '/1', 'cache_url': redis_url + '/2', 'permission_url': redis_url + '/3', 'kinto_version': __version__, 'config_file_timestamp': strftime('%a, %d %b %Y %H:%M:%S %z') }) @mock.patch('kinto.config.render_template') def test_init_memory_values(self, mocked_render_template): config.init('kinto.ini', 'memory') args, kwargs = list(mocked_render_template.call_args) self.assertEquals(args, ('kinto.tpl', 'kinto.ini')) self.assertDictEqual(kwargs, { 'secret': kwargs['secret'], 'storage_backend': 'kinto.core.storage.memory', 'cache_backend': 'kinto.core.cache.memory', 'permission_backend': 'kinto.core.permission.memory', 'storage_url': '', 'cache_url': '', 'permission_url': '', 'kinto_version': __version__, 'config_file_timestamp': strftime('%a, %d %b %Y %H:%M:%S %z') }) def test_render_template_creates_directory_if_necessary(self): temp_path = tempfile.mkdtemp() destination = os.path.join(temp_path, 'config/kinto.ini') config.render_template('kinto.tpl', destination, **{ 'secret': "abcd-ceci-est-un-secret", 'storage_backend': 'kinto.core.storage.memory', 'cache_backend': 'kinto.core.cache.memory', 'permission_backend': 'kinto.core.permission.memory', 'storage_url': '', 'cache_url': '', 'permission_url': '', 'kinto_version': '', 'config_file_timestamp': '' }) self.assertTrue(os.path.exists(destination)) def test_render_template_works_with_file_in_cwd(self): temp_path = tempfile.mkdtemp() os.chdir(temp_path) config.render_template('kinto.tpl', 'kinto.ini', **{ 'secret': "abcd-ceci-est-un-secret", 'storage_backend': 'kinto.core.storage.memory', 'cache_backend': 'kinto.core.cache.memory', 'permission_backend': 'kinto.core.permission.memory', 'storage_url': '', 'cache_url': '', 'permission_url': '', 'kinto_version': '', 'config_file_timestamp': '' }) self.assertTrue(os.path.exists( os.path.join(temp_path, 'kinto.ini') ))<|fim▁end|>
self.maxDiff = None template = "kinto.tpl" dest = tempfile.mktemp()
<|file_name|>PositionedTooltips.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'; import { makeStyles } from '@material-ui/core/styles'; import Grid from '@material-ui/core/Grid'; import Button from '@material-ui/core/Button'; import Tooltip from '@material-ui/core/Tooltip'; const useStyles = makeStyles({ root: { width: 500, }, }); export default function PositionedTooltips() { const classes = useStyles(); return ( <div className={classes.root}> <Grid container justifyContent="center"> <Grid item><|fim▁hole|> <Button>top</Button> </Tooltip> <Tooltip title="Add" placement="top-end"> <Button>top-end</Button> </Tooltip> </Grid> </Grid> <Grid container justifyContent="center"> <Grid item xs={6}> <Tooltip title="Add" placement="left-start"> <Button>left-start</Button> </Tooltip> <br /> <Tooltip title="Add" placement="left"> <Button>left</Button> </Tooltip> <br /> <Tooltip title="Add" placement="left-end"> <Button>left-end</Button> </Tooltip> </Grid> <Grid item container xs={6} alignItems="flex-end" direction="column"> <Grid item> <Tooltip title="Add" placement="right-start"> <Button>right-start</Button> </Tooltip> </Grid> <Grid item> <Tooltip title="Add" placement="right"> <Button>right</Button> </Tooltip> </Grid> <Grid item> <Tooltip title="Add" placement="right-end"> <Button>right-end</Button> </Tooltip> </Grid> </Grid> </Grid> <Grid container justifyContent="center"> <Grid item> <Tooltip title="Add" placement="bottom-start"> <Button>bottom-start</Button> </Tooltip> <Tooltip title="Add" placement="bottom"> <Button>bottom</Button> </Tooltip> <Tooltip title="Add" placement="bottom-end"> <Button>bottom-end</Button> </Tooltip> </Grid> </Grid> </div> ); }<|fim▁end|>
<Tooltip title="Add" placement="top-start"> <Button>top-start</Button> </Tooltip> <Tooltip title="Add" placement="top">
<|file_name|>GraphicFunctions.cpp<|end_file_name|><|fim▁begin|>/******************************************************************* This file is part of iContact. iContact is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. iContact is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with iContact. If not, see <http://www.gnu.org/licenses/>. *******************************************************************/ #include "stdafx.h" #include "GraphicFunctions.h" #include "macros.h" HFONT BuildFont(int iFontSize, BOOL bBold, BOOL bItalic) { LOGFONT lf; memset(&lf, 0, sizeof(LOGFONT)); lf.lfHeight = iFontSize; lf.lfWidth = 0; lf.lfEscapement = 0; lf.lfOrientation = 0; lf.lfWeight = bBold ? 600 : 500; lf.lfItalic = bItalic; lf.lfUnderline = false; lf.lfStrikeOut = false; lf.lfCharSet = EASTEUROPE_CHARSET; lf.lfOutPrecision = OUT_RASTER_PRECIS; lf.lfClipPrecision = CLIP_DEFAULT_PRECIS; lf.lfQuality = CLEARTYPE_QUALITY; lf.lfPitchAndFamily = DEFAULT_PITCH | FF_DONTCARE; _tcsncpy (lf.lfFaceName, TEXT("Tahoma"), LF_FACESIZE); //lf.lfFaceName[LF_FACESIZE-1] = L'\0'; // Ensure null termination return CreateFontIndirect(&lf); } // ************************************************************************** // Function Name: DrawRect // // Purpose: Draws a rectangle with the coordinates and the color passed in // // Arguments: // IN HDC hdc - DC for drawing // IN LPRECT prc - Area to draw the rectangle // IN COLORREF clr - color to draw the rectangle // // Return Values: // NONE // void DrawRect(HDC hdc, LPRECT prc, COLORREF clr) { COLORREF clrSave = SetBkColor(hdc, clr); ExtTextOut(hdc,0,0,ETO_OPAQUE,prc,NULL,0,NULL); SetBkColor(hdc, clrSave); } void DrawGradientGDI(HDC tdc, RECT iRect, COLORREF StartRGB, COLORREF EndRGB) { unsigned int Shift = 8; TRIVERTEX vert[2] ; GRADIENT_RECT gRect; vert [0] .x = iRect.left; vert [0] .y = iRect.top; vert [0] .Red = GetRValue(StartRGB) << Shift; vert [0] .Green = GetGValue(StartRGB) << Shift; vert [0] .Blue = GetBValue(StartRGB) << Shift; vert [0] .Alpha = 0x0000; vert [1] .x = iRect.right; vert [1] .y = iRect.bottom; vert [1] .Red = GetRValue(EndRGB) << Shift; vert [1] .Green = GetGValue(EndRGB) << Shift; vert [1] .Blue = GetBValue(EndRGB) << Shift; vert [1] .Alpha = 0x0000; gRect.UpperLeft = 0; gRect.LowerRight = 1; GradientFill(tdc, vert, 2, &gRect, 1, GRADIENT_FILL_RECT_V); } void BltAlpha(HDC hdcDest, int nXOriginDest, int nYOriginDest, int nWidthDest, int nHeightDest, HDC hdcSrc, int nXOriginSrc, int nYoriginSrc, int nWidthSrc, int nHeightSrc, BYTE alpha) { BLENDFUNCTION bf; bf.BlendOp = AC_SRC_OVER; bf.BlendFlags = 0; bf.SourceConstantAlpha = alpha; bf.AlphaFormat = 0; AlphaBlend(hdcDest, nXOriginDest, nYOriginDest, nWidthDest, nHeightDest, hdcSrc, nXOriginSrc, nYoriginSrc, nWidthSrc, nHeightSrc, bf); } void BltAlpha(HDC hdcDest, int nLeft, int nTop, int nWidth, int nHeight, HDC hdcSrc, BYTE alpha) { BltAlpha(hdcDest, nLeft, nTop, nWidth, nHeight, hdcSrc, 0, 0, nWidth, nHeight, alpha); } // ************************************************************************** // Function Name: GetStreamSize // // Purpose: Given an IStream, returns the size of the stream. This is needed // for streams that do not support the Stat method // // Arguments: // IN IStream* pStream - stream to determine size for // OUT ULONG* pulSize - size of stream // // Return Values: // HRESULT - S_OK if success, failure code if not // // Side Effects: // The stream pointer always resets to the beginning // HRESULT GetStreamSize(IStream* pStream, ULONG* pulSize) { HRESULT hr; LARGE_INTEGER li = {0}; ULARGE_INTEGER uliZero = {0}; ULARGE_INTEGER uli; <|fim▁hole|> hr = pStream->Seek(li, STREAM_SEEK_END, &uli); CHR(hr); *pulSize = uli.LowPart; hr = S_OK; Error: if (SUCCEEDED(hr)) { // Move the stream back to the beginning of the file hr = pStream->Seek(li, STREAM_SEEK_SET, &uliZero); } return hr; } // ************************************************************************** // Function Name: ScaleProportional // // Purpose: Scale the width and height to fit the given width and height // but maintain the proportion // // Arguments: // IN UINT uFitToWidth - width of source image // IN UINT uFitToHeight - height of source image // IN/OUT UINT* puWidthToScale - width of image to scale to // IN/OUT UINT* puHeightToScale - height of image to scale to // // Return Values: // HRESULT - S_OK if success, failure code if not // void ScaleProportional(UINT uFitToWidth, UINT uFitToHeight, UINT *puWidthToScale, UINT *puHeightToScale) { HRESULT hr; CBR(puWidthToScale != NULL && puHeightToScale != NULL); // Scale (*puWidthToScale, *puHeightToScale) to fit within (uFitToWidth, uFitToHeight), while // maintaining the aspect ratio int nScaledWidth = MulDiv(*puWidthToScale, uFitToHeight, *puHeightToScale); // If we didn't overflow and the scaled width does not exceed bounds if (nScaledWidth >= 0 && nScaledWidth <= (int)uFitToWidth) { *puWidthToScale = nScaledWidth; *puHeightToScale = uFitToHeight; } else { *puHeightToScale = MulDiv(*puHeightToScale, uFitToWidth, *puWidthToScale); // The height *must* be within the bounds [0, uFitToHeight] since we overflowed // while fitting to height ASSERT(*puHeightToScale >= 0 && *puHeightToScale <= uFitToHeight); *puWidthToScale = uFitToWidth; } Error: return; } // ************************************************************************** // Function Name: HBITMAPFromImage // // Purpose: Convert IImage to HBITMAP. If bitmap has transparency, the // background will be filled with the color passed in // // Arguments: // IN IImage* pImage - pointer to the IImage // IN COLORREF crBackColor - color of the background // // Return Values: // HRESULT - S_OK if success, failure code if not // HBITMAP HBITMAPFromImage (IN IImage * pImage, IN COLORREF crBackColor) { HRESULT hr; HBITMAP hbmResult = NULL; ImageInfo ii; HDC hDC = NULL; HBITMAP hbmNew = NULL; void * pv; BITMAPINFO bmi = { 0 }; HBITMAP hbmOld = NULL; RECT rc = { 0 }; CBR(pImage != NULL); // Get image width/height hr = pImage->GetImageInfo(&ii); CHR(hr); // Create HDC hDC = CreateCompatibleDC(NULL); CBR(hDC != NULL); // Create DIB section bmi.bmiHeader.biSize = sizeof(BITMAPINFOHEADER); bmi.bmiHeader.biWidth = ii.Width; bmi.bmiHeader.biHeight = ii.Height; bmi.bmiHeader.biPlanes = 1; bmi.bmiHeader.biBitCount = 24; bmi.bmiHeader.biCompression = BI_RGB; hbmNew = CreateDIBSection(hDC, &bmi, DIB_RGB_COLORS, &pv, NULL, 0); CBR(hbmNew != NULL); // Select DIB into DC hbmOld = (HBITMAP)SelectObject(hDC, hbmNew); rc.right = ii.Width; rc.bottom = ii.Height; // Clear the bitmap using the background color DrawRect(hDC, &rc, crBackColor); // Draw into DC/DIB hr = pImage->Draw(hDC, &rc, NULL); CHR(hr); hbmResult = hbmNew; hbmNew = NULL; Error: if (hbmNew) { DeleteObject(hbmNew); } if (hDC) { if (hbmOld) { SelectObject(hDC, hbmOld); } DeleteDC(hDC); } return hbmResult; } // ************************************************************************** // Function Name: GetBitmapFromStream // // Purpose: Convert an IStream to an HBITMAP and return the new dimensions // // Arguments: // IN UINT uFitToWidth - width of source image // IN UINT uFitToHeight - height of source image // OUT UINT* puWidth - width of image to scale to // OUT UINT* puHeight - height of image to scale to // // Return Values: // HRESULT - S_OK if success, failure code if not // HRESULT GetBitmapFromStream(IStream* pStream, HBITMAP* phBitmap, UINT* puWidth, UINT* puHeight) { HRESULT hr; HBITMAP hBitmap = NULL; IImagingFactory* pFactory = NULL; IImage* pImage = NULL; IImage* pThumbnail = NULL; ImageInfo imgInfo = {0}; CBR(pStream != NULL && phBitmap != NULL && puWidth != NULL && puHeight != NULL); // Use a little imaging help hr = CoCreateInstance(CLSID_ImagingFactory, NULL, CLSCTX_INPROC_SERVER, IID_IImagingFactory, (void**) &pFactory); CHR(hr); hr = pFactory->CreateImageFromStream(pStream, &pImage); CHR(hr); hr = pImage->GetImageInfo(&imgInfo); CHR(hr); CBR(imgInfo.Width > 0 && imgInfo.Height > 0); // Scale to the new size ScaleProportional(*puWidth, *puHeight, &imgInfo.Width, &imgInfo.Height); // Get the new image hr = pImage->GetThumbnail(imgInfo.Width, imgInfo.Height, &pThumbnail); CHR(hr); // Convert this to HBITMAP, our target format hBitmap = HBITMAPFromImage(pThumbnail, RGB(255,255,255)); CBR(hBitmap != NULL); *puWidth = imgInfo.Width; *puHeight = imgInfo.Height; *phBitmap = hBitmap; hBitmap = NULL; Error: RELEASE_OBJ(pFactory); RELEASE_OBJ(pImage); RELEASE_OBJ(pThumbnail); if (hBitmap) { DeleteObject((HGDIOBJ)(HBITMAP)(hBitmap)); } return hr; } // http://www.koders.com/cpp/fid743B2B2FCDBFE91584C329A50766C1155709DA1B.aspx /***************************************************************************** * Copyright (c) 2000, 2005 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *****************************************************************************/ /* Call IImageEncoder::GetEncodeSink to retrieve an IImageSink interface. The IImageSink interface is implemented as follows: a. Call IImageSink::BeginSink to negotiate the values contained in the ImageInfo structure for encoding the current frame. b. Call IImageSink::SetPalette to pass color palette information about the current image frame to the image sink. c. If you need to pass property data to the image sink, you can optionally call IImageSink::GetPropertyBuffer to obtain a buffer that will contain the property data. d. If GetPropertyBuffer is called above, you must next call IImageSink::PushPropertyItems to transfer the property data to the image sink. The buffer that was allocated by GetPropertyBuffer must be deallocated in the implementation for PushPropertyItems. e. Call IImageSink::PushPixelData or IImageSink::GetPixelDataBuffer to begin the data transfer, depending on how the image data is stored in the source: * If the image source has allocated memory for the image, use PushPixelData. * If the image source has not allocated memory for the image, use GetPixelDataBuffer. For each call to GetPixelDataBuffer, IImageSink::ReleasePixelDataBuffer must also be called. f. Call ImageSink::EndSink to complete the encoding process. g. Call ImageSink::Release to free the IImagesink interface. */ HRESULT SavePNG(HDC hDC, HBITMAP hBitmap, const TCHAR* szPath, IImagingFactory * pFactory) { HRESULT hr; // Set the particular encoder to use TCHAR * pszMimeType = TEXT("image/png"); UINT count; ImageCodecInfo* imageCodecInfo = NULL; hr = pFactory->GetInstalledEncoders(&count, &imageCodecInfo); CHR(hr); CBR(count > 0); CLSID encoderClassId; for (int i = 0; i < (int)count; i++) { if (wcscmp(imageCodecInfo[i].MimeType, pszMimeType) == 0) { encoderClassId = imageCodecInfo[i].Clsid; free(imageCodecInfo); break; } else { continue; } } IImageEncoder* imageEncoder = NULL; hr = pFactory->CreateImageEncoderToFile(&encoderClassId, szPath, &imageEncoder); CHR(hr); IImageSink* imageSink = NULL; hr = imageEncoder->GetEncodeSink(&imageSink); CHR(hr); BITMAP bm; GetObject ((HGDIOBJ)hBitmap, sizeof(BITMAP), &bm); ImageInfo* imageInfo = new ImageInfo(); imageInfo->Height = bm.bmHeight; imageInfo->Width = bm.bmWidth; //imageInfo->RawDataFormat = ImageFormatMemoryBMP;// ???? imageInfo->Flags |= SinkFlagsTopDown | SinkFlagsFullWidth; imageInfo->Xdpi = 192; imageInfo->Ydpi = 192; // Get pixel format from hBitmap switch (bm.bmBitsPixel) { case 1: imageInfo->PixelFormat = PixelFormat1bppIndexed; break; case 4: imageInfo->PixelFormat = PixelFormat4bppIndexed; break; case 8: imageInfo->PixelFormat = PixelFormat8bppIndexed; break; case 16: imageInfo->PixelFormat = PixelFormat16bppARGB1555; imageInfo->Flags |= SinkFlagsHasAlpha; break; case 24: imageInfo->PixelFormat = PixelFormat24bppRGB; break; default: imageInfo->PixelFormat = PixelFormat32bppARGB; imageInfo->Flags |= SinkFlagsHasAlpha; break; } hr = imageSink->BeginSink(imageInfo, NULL); CHR(hr); ColorPalette* palette = (ColorPalette*)malloc(sizeof(ColorPalette)); palette->Count = 0; if (imageInfo->Flags & SinkFlagsHasAlpha) palette->Flags = PALFLAG_HASALPHA; hr = imageSink->SetPalette(palette); CHR(hr); BitmapData* bmData = new BitmapData(); bmData->Height = bm.bmHeight; bmData->Width = bm.bmWidth; bmData->Scan0 = bm.bmBits; bmData->PixelFormat = imageInfo->PixelFormat; UINT bitsPerLine = imageInfo->Width * bm.bmBitsPixel; UINT bitAlignment = sizeof(LONG) * 8; // The image buffer is always padded to LONG boundaries UINT bitStride = bitAlignment * (bitsPerLine / bitAlignment); // Add a bit more for the leftover values if ((bitsPerLine % bitAlignment) != 0) bitStride += bitAlignment; bmData->Stride = bitStride / 8; RECT rect; SetRect(&rect, 0, 0, bm.bmWidth, bm.bmHeight); hr = imageSink->PushPixelData(&rect, bmData, TRUE); CHR(hr); hr = imageSink->EndSink(S_OK); CHR(hr); imageSink->Release(); hr = imageEncoder->TerminateEncoder(); CHR(hr); hr = S_OK; Error: RELEASE_OBJ(imageSink); return hr; }<|fim▁end|>
CBR(pStream != NULL && pulSize != NULL);
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>""" Django settings for librarymanagementsystem project. For more information on this file, see https://docs.djangoproject.com/en/1.6/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/1.6/ref/settings/ """ # Build paths inside the project like this: os.path.join(BASE_DIR, ...) import os BASE_DIR = os.path.dirname(os.path.dirname(__file__)) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = '=^%a6&@*aq8$sa$_f_r&b_gczd@sr77hv$xys7k!8f85g6-$e1' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True TEMPLATE_DEBUG = True ALLOWED_HOSTS = ['*'] # Application definition INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'social.apps.django_app.default', 'django_extensions', 'djangosecure', 'mainapp', ) AUTHENTICATION_BACKENDS = ( 'django.contrib.auth.backends.ModelBackend', 'social.backends.google.GoogleOpenId', ) SECURE_FRAME_DENY = True <|fim▁hole|> 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', 'social.apps.django_app.middleware.SocialAuthExceptionMiddleware', "djangosecure.middleware.SecurityMiddleware" ) TEMPLATE_CONTEXT_PROCESSORS = ( 'django.contrib.auth.context_processors.auth', 'social.apps.django_app.context_processors.backends', 'social.apps.django_app.context_processors.login_redirect', 'django.contrib.messages.context_processors.messages', ) CRISPY_TEMPLATE_PACK = 'bootstrap3' MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage' LOGIN_REDIRECT_URL = '/admin/' SOCIAL_AUTH_LOGIN_REDIRECT_URL = '/admin/' LOGIN_ERROR_URL = '/login-error/' ROOT_URLCONF = 'librarymanagementsystem.urls' WSGI_APPLICATION = 'librarymanagementsystem.wsgi.application' # Database # https://docs.djangoproject.com/en/1.6/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } } # Internationalization # https://docs.djangoproject.com/en/1.6/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/1.6/howto/static-files/ STATIC_URL = '/static/' STATIC_ROOT = os.path.join(BASE_DIR, 'static') STATIC_DIRS = ( os.path.join(BASE_DIR, 'static'), ) TEMPLATE_DIRS = ( os.path.join(BASE_DIR, 'templates'), ) TEMPLATE_LOADERS = ( 'django.template.loaders.app_directories.Loader', ) SOCIAL_AUTH_GOOGLE_WHITELISTED_DOMAINS = ['gmail.com'] try: from local_settings import * except ImportError: pass<|fim▁end|>
MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware',
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Copyright 2014 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 //<|fim▁hole|>// distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package rkt (main) implements the command line interface to rkt package main<|fim▁end|>
// Unless required by applicable law or agreed to in writing, software
<|file_name|>interfaces.go<|end_file_name|><|fim▁begin|>/* Copyright 2021 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.<|fim▁hole|>*/ package controller import "net/http" // Interface defines the base of a controller managed by a controller manager type Interface interface { // Name returns the canonical name of the controller. Name() string } // Debuggable defines a controller that allows the controller manager // to expose a debugging handler for the controller // // If a controller implements Debuggable, and the returned handler is // not nil, the controller manager can mount the handler during startup. type Debuggable interface { // DebuggingHandler returns a Handler that expose debugging information // for the controller, or nil if a debugging handler is not desired. // // The handler will be accessible at "/debug/controllers/{controllerName}/". DebuggingHandler() http.Handler }<|fim▁end|>
<|file_name|>object-one-type-two-traits.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Testing creating two vtables with the same self type, but different // traits. <|fim▁hole|>#![feature(box_syntax)] use std::any::Any; trait Wrap { fn get(&self) -> int; fn wrap(self: Box<Self>) -> Box<Any+'static>; } impl Wrap for int { fn get(&self) -> int { *self } fn wrap(self: Box<int>) -> Box<Any+'static> { self as Box<Any+'static> } } fn is<T:'static>(x: &Any) -> bool { x.is::<T>() } fn main() { let x = box 22i as Box<Wrap>; println!("x={}", x.get()); let y = x.wrap(); }<|fim▁end|>
#![allow(unknown_features)]
<|file_name|>read.js<|end_file_name|><|fim▁begin|>'use strict' const t = require('tcomb') const fetch = require('node-fetch') const { requestProperties, validateResponse, parseJSON } = require('../utils') const PATH = '/designs' module.exports = function getDesign(id) { t.String(id) return new Promise((resolve, reject) => { const { href, headers } = requestProperties( this.token, this.version, [PATH, id].join('/')<|fim▁hole|> fetch(href, { headers }) .then(validateResponse) .then(parseJSON) .then(resolve) .catch(reject) }) }<|fim▁end|>
)
<|file_name|>test_directory.py<|end_file_name|><|fim▁begin|>import gc import os import hashlib import inspect import shutil import tempfile import yaml import zipfile from juju.errors import CharmError, FileNotFound from juju.charm.errors import InvalidCharmFile from juju.charm.metadata import MetaData from juju.charm.directory import CharmDirectory from juju.charm.bundle import CharmBundle from juju.lib.filehash import compute_file_hash from juju.charm import tests from juju.charm.tests.test_repository import RepositoryTestBase sample_directory = os.path.join( os.path.dirname( inspect.getabsfile(tests)), "repository", "series", "dummy") class DirectoryTest(RepositoryTestBase): def setUp(self): super(DirectoryTest, self).setUp() # Ensure the empty/ directory exists under the dummy sample<|fim▁hole|> # charm. Depending on how the source code is exported, # empty directories may be ignored. empty_dir = os.path.join(sample_directory, "empty") if not os.path.isdir(empty_dir): os.mkdir(empty_dir) def copy_charm(self): dir_ = os.path.join(self.makeDir(), "sample") shutil.copytree(sample_directory, dir_) return dir_ def delete_revision(self, dir_): os.remove(os.path.join(dir_, "revision")) def set_metadata_revision(self, dir_, revision): metadata_path = os.path.join(dir_, "metadata.yaml") with open(metadata_path) as f: data = yaml.load(f.read()) data["revision"] = 999 with open(metadata_path, "w") as f: f.write(yaml.dump(data)) def test_metadata_is_required(self): directory = self.makeDir() self.assertRaises(FileNotFound, CharmDirectory, directory) def test_no_revision(self): dir_ = self.copy_charm() self.delete_revision(dir_) charm = CharmDirectory(dir_) self.assertEquals(charm.get_revision(), 0) with open(os.path.join(dir_, "revision")) as f: self.assertEquals(f.read(), "0\n") def test_nonsense_revision(self): dir_ = self.copy_charm() with open(os.path.join(dir_, "revision"), "w") as f: f.write("shifty look") err = self.assertRaises(CharmError, CharmDirectory, dir_) self.assertEquals( str(err), "Error processing %r: invalid charm revision 'shifty look'" % dir_) def test_revision_in_metadata(self): dir_ = self.copy_charm() self.delete_revision(dir_) self.set_metadata_revision(dir_, 999) log = self.capture_logging("juju.charm") charm = CharmDirectory(dir_) self.assertEquals(charm.get_revision(), 999) self.assertIn( "revision field is obsolete. Move it to the 'revision' file.", log.getvalue()) def test_competing_revisions(self): dir_ = self.copy_charm() self.set_metadata_revision(dir_, 999) log = self.capture_logging("juju.charm") charm = CharmDirectory(dir_) self.assertEquals(charm.get_revision(), 1) self.assertIn( "revision field is obsolete. Move it to the 'revision' file.", log.getvalue()) def test_set_revision(self): dir_ = self.copy_charm() charm = CharmDirectory(dir_) charm.set_revision(123) self.assertEquals(charm.get_revision(), 123) with open(os.path.join(dir_, "revision")) as f: self.assertEquals(f.read(), "123\n") def test_info(self): directory = CharmDirectory(sample_directory) self.assertTrue(directory.metadata is not None) self.assertTrue(isinstance(directory.metadata, MetaData)) self.assertEquals(directory.metadata.name, "dummy") self.assertEquals(directory.type, "dir") def test_make_archive(self): # make archive from sample directory directory = CharmDirectory(sample_directory) f = tempfile.NamedTemporaryFile(suffix=".charm") directory.make_archive(f.name) # open archive in .zip-format and assert integrity from zipfile import ZipFile zf = ZipFile(f.name) self.assertEqual(zf.testzip(), None) # assert included included = [info.filename for info in zf.infolist()] self.assertEqual( set(included), set(("metadata.yaml", "empty/", "src/", "src/hello.c", "config.yaml", "hooks/", "hooks/install", "revision"))) def test_as_bundle(self): directory = CharmDirectory(self.sample_dir1) charm_bundle = directory.as_bundle() self.assertEquals(type(charm_bundle), CharmBundle) self.assertEquals(charm_bundle.metadata.name, "sample") self.assertIn("sample-1.charm", charm_bundle.path) total_compressed = 0 total_uncompressed = 0 zip_file = zipfile.ZipFile(charm_bundle.path) for n in zip_file.namelist(): info = zip_file.getinfo(n) total_compressed += info.compress_size total_uncompressed += info.file_size self.assertTrue(total_compressed < total_uncompressed) def test_as_bundle_file_lifetime(self): """ The temporary bundle file created should have a life time equivalent to that of the directory object itself. """ directory = CharmDirectory(self.sample_dir1) charm_bundle = directory.as_bundle() gc.collect() self.assertTrue(os.path.isfile(charm_bundle.path)) del directory gc.collect() self.assertFalse(os.path.isfile(charm_bundle.path)) def test_compute_sha256(self): """ Computing the sha256 of a directory will use the bundled charm, since the hash of the file itself is needed. """ directory = CharmDirectory(self.sample_dir1) sha256 = directory.compute_sha256() charm_bundle = directory.as_bundle() self.assertEquals(type(charm_bundle), CharmBundle) self.assertEquals(compute_file_hash(hashlib.sha256, charm_bundle.path), sha256) def test_as_bundle_with_relative_path(self): """ Ensure that as_bundle works correctly with relative paths. """ current_dir = os.getcwd() os.chdir(self.sample_dir2) self.addCleanup(os.chdir, current_dir) charm_dir = "../%s" % os.path.basename(self.sample_dir1) directory = CharmDirectory(charm_dir) charm_bundle = directory.as_bundle() self.assertEquals(type(charm_bundle), CharmBundle) self.assertEquals(charm_bundle.metadata.name, "sample") def test_charm_base_inheritance(self): """ get_sha256() should be implemented in the base class, and should use compute_sha256 to calculate the digest. """ directory = CharmDirectory(self.sample_dir1) bundle = directory.as_bundle() digest = compute_file_hash(hashlib.sha256, bundle.path) self.assertEquals(digest, directory.get_sha256()) def test_as_directory(self): directory = CharmDirectory(self.sample_dir1) self.assertIs(directory.as_directory(), directory) def test_config(self): """Validate that ConfigOptions are available on the charm""" from juju.charm.tests.test_config import sample_yaml_data directory = CharmDirectory(sample_directory) self.assertEquals(directory.config.get_serialization_data(), sample_yaml_data) def test_file_type(self): charm_dir = self.copy_charm() os.mkfifo(os.path.join(charm_dir, "foobar")) directory = CharmDirectory(charm_dir) e = self.assertRaises(InvalidCharmFile, directory.as_bundle) self.assertIn("foobar' Invalid file type for a charm", str(e)) def test_internal_symlink(self): charm_path = self.copy_charm() os.symlink("/etc/lsb-release", os.path.join(charm_path, "foobar")) directory = CharmDirectory(charm_path) e = self.assertRaises(InvalidCharmFile, directory.as_bundle) self.assertIn("foobar' Absolute links are invalid", str(e)) def test_extract_symlink(self): charm_path = self.copy_charm() os.symlink("/etc/lsb-release", os.path.join(charm_path, "foobar")) directory = CharmDirectory(charm_path) e = self.assertRaises(InvalidCharmFile, directory.as_bundle) self.assertIn("foobar' Absolute links are invalid", str(e))<|fim▁end|>
<|file_name|>SearchProvider.ts<|end_file_name|><|fim▁begin|>/// <reference path="../_References.ts" /> /** Providers @namespace App.Providers */ module App.Providers { export class LazyLoadHandle { private lazyLoadCount: number = 0; private successCallback: () => void; constructor(successCallback: () => void ) { this.successCallback = successCallback; } public isLoading(): boolean { return this.lazyLoadCount > 0; } public lazyLoadStarted(): void { this.lazyLoadCount++; } public lazyLoadComplete(): void { this.lazyLoadCount--; if (this.lazyLoadCount == 0) { this.successCallback(); } } } export interface SearchSuccessCallback { (searchContext: SearchHandle): void; } export interface SearchErrorCallback { (errorMessage: string, searchContext: SearchHandle): void; } export class SearchHandle { public haltet: boolean = false; public searchCount: number = 0; public searchStatus: { [sourceName: string]: boolean; } = {}; } export class PoiChooser { public poi: App.Models.PointOfInterest = null; public choose: () => void; } export class SearchProvider { public static SourceDigitalArkivet: string = "Digitalarkivet"; public static SourceNorvegiana: string = "Norvegiana"; public static SourceWikipedia: string = "Wikipedia"; private pageNumber: number; private pageSize: number; private searchNorvegiana: boolean = true; private searchDigitalArkivet: boolean = true; private searchWikipedia: boolean = true; private successCallback: { (searchResult: App.Models.SearchResult): void; } = null; private searchProviderNorvegiana: App.SearchProviders.DataSourceNorvegiana; private searchProviderDigitalarkivet: App.SearchProviders.DataSourceDigitalarkivetProperty; private searchProviderWikipedia: App.SearchProviders.DataSourceWikiLocation; private currentSearch: SearchHandle = null; private resultCount: number = -1; private results: Array<App.Models.PointOfInterest> = new Array<App.Models.PointOfInterest>(); /** SearchProvider @class App.Providers.SearchProvider @classdesc */ constructor(searchCriteria: App.Models.SearchCriteria, successCallback: { (searchResult: App.Models.SearchResult): void; }) { this.pageNumber = 1; this.pageSize = searchCriteria.rows(); this.successCallback = successCallback; var norvegianaQueryFields: string = null; if (searchCriteria.category() && searchCriteria.category().length > 0 && searchCriteria.category() != "*") { var category = searchCriteria.category(); if (category == config.digitalArkivetPropertyCategory) { this.searchNorvegiana = false; this.searchWikipedia = false; } else if (category == config.wikiPropertyCategory) { this.searchNorvegiana = false; this.searchDigitalArkivet = false; } else { this.searchWikipedia = false; this.searchDigitalArkivet = false; } } if (searchCriteria.mediaType() && searchCriteria.mediaType().length > 0 && searchCriteria.mediaType() != "*") { var mediaType = searchCriteria.mediaType(); if (mediaType != "TEXT") { this.searchDigitalArkivet = false; this.searchWikipedia = false; } } if (searchCriteria.genre() && searchCriteria.genre().length > 0 && searchCriteria.genre() != "*") { var genre = searchCriteria.genre(); if (genre == "wikipedia") { this.searchNorvegiana = false; this.searchDigitalArkivet = false; } else if (genre == "digitaltfortalt") { this.searchWikipedia = false; this.searchDigitalArkivet = false; norvegianaQueryFields = 'abm_contentProvider_text:"Digitalt fortalt"' + ' OR abm_contentProvider_text:Industrimuseum'; } else if (genre == "fagdata") { this.searchWikipedia = false; norvegianaQueryFields = 'abm_contentProvider_text:Artsdatabanken' + ' OR abm_contentProvider_text:DigitaltMuseum' + ' OR abm_contentProvider_text:Kulturminnesøk' + ' OR abm_contentProvider_text:MUSIT' + ' OR abm_contentProvider_text:Naturbase' + ' OR abm_contentProvider_text:"Sentralt stedsnavnregister"'; } } if (this.searchNorvegiana) { this.searchProviderNorvegiana = new App.SearchProviders.DataSourceNorvegiana(searchCriteria, norvegianaQueryFields, (searchHandle: SearchHandle) => this.searchWithSuccess(SearchProvider.SourceNorvegiana, searchHandle), (errorMessage: string, searchHandle: SearchHandle) => this.searchWithError(SearchProvider.SourceNorvegiana ,errorMessage, searchHandle)); } if (this.searchDigitalArkivet) { this.searchProviderDigitalarkivet = new App.SearchProviders.DataSourceDigitalarkivetProperty(searchCriteria, (searchHandle: SearchHandle) => this.searchWithSuccess(SearchProvider.SourceDigitalArkivet, searchHandle), (errorMessage: string, searchHandle: SearchHandle) => this.searchWithError(SearchProvider.SourceDigitalArkivet, errorMessage, searchHandle)); } if (this.searchWikipedia) { this.searchProviderWikipedia = new App.SearchProviders.DataSourceWikiLocation(searchCriteria, <|fim▁hole|> } public haltSearch() { if (this.currentSearch != null) { this.currentSearch.haltet = true; } } public search(pageNumber: number) { var searchHandle = new SearchHandle(); this.pageNumber = pageNumber; this.currentSearch = searchHandle; log.debug("SearchProvider", "Searching..."); if (this.searchNorvegiana) { searchHandle.searchStatus[SearchProvider.SourceNorvegiana] = true; if (this.searchProviderNorvegiana.search(searchHandle)) { searchHandle.searchCount++; } } if (this.searchDigitalArkivet) { searchHandle.searchStatus[SearchProvider.SourceDigitalArkivet] = true; if (this.searchProviderDigitalarkivet.search(searchHandle)) { searchHandle.searchCount++; } } if (this.searchWikipedia) { searchHandle.searchStatus[SearchProvider.SourceWikipedia] = true; if (this.searchProviderWikipedia.search(searchHandle)) { searchHandle.searchCount++; } } if (searchHandle.searchCount > 0) { setTimeout(() => this.checkTimeout(searchHandle), config.searchTimeoutSeconds * 1000); } else { this.prepareSearchResult(); } } private prepareSearchResult() { this.haltSearch(); if (this.resultCount == -1) { this.resultCount = 0; if (this.searchNorvegiana) this.resultCount += this.searchProviderNorvegiana.getResultCount(); if (this.searchDigitalArkivet) this.resultCount += this.searchProviderDigitalarkivet.getResultCount(); if (this.searchWikipedia) this.resultCount += this.searchProviderWikipedia.getResultCount(); } var start = this.pageSize * (this.pageNumber - 1); var end = Math.min(start + this.pageSize, this.resultCount); while (this.results.length < end) { var poiChooser = new PoiChooser(); if (this.searchNorvegiana) this.searchProviderNorvegiana.getNextPoi(poiChooser); if (this.searchDigitalArkivet) this.searchProviderDigitalarkivet.getNextPoi(poiChooser); if (this.searchWikipedia) this.searchProviderWikipedia.getNextPoi(poiChooser); poiChooser.choose(); this.results.push(poiChooser.poi); } var pageItems = this.results.slice(start, end); var searchResult = new App.Models.SearchResult(); searchResult.items(pageItems); searchResult.numFound(this.resultCount); searchResult.numPages(Math.ceil(this.resultCount / this.pageSize)); this.ensureLoadedResultSetAndSignalSuccess(searchResult); } private ensureLoadedResultSetAndSignalSuccess(searchResult: App.Models.SearchResult) { var items = searchResult.items(); var lazyLoadHandle = new LazyLoadHandle(() => this.successCallback(searchResult)); for (var index in items) { items[index].ensureLoaded(lazyLoadHandle); } if (!lazyLoadHandle.isLoading()) { this.successCallback(searchResult); } } private searchWithSuccess(searchProviderName: string, searchHandle: SearchHandle): void { log.debug("SearchProvider", "successCallback - " + searchProviderName); if (searchHandle == null || searchHandle.haltet) { return; } searchHandle.searchStatus[searchProviderName] = false; searchHandle.searchCount--; if (searchHandle.searchCount == 0) { this.prepareSearchResult(); } } private searchWithError(searchProviderName: string, errorMessage: string, searchHandle: SearchHandle): void { log.error("SearchProvider", "errorCallback - " + searchProviderName + " - " + errorMessage); if (searchHandle == null || searchHandle.haltet) { return; } searchHandle.searchCount--; if (!networkHelper.isConnected()) { networkHelper.displayNetworkError(); return; } if (errorMessage != "Not Found") { userPopupController.sendError(tr.translate("Error searching"), tr.translate("Error searching") + " (" + searchProviderName + ")"); } if (searchHandle.searchCount == 0) { this.prepareSearchResult(); } } private checkTimeout(searchHandle: SearchHandle): void { if (searchHandle == null || searchHandle.haltet) { return; } var errorSourceString = ""; for (var source in searchHandle.searchStatus) { if (searchHandle.searchStatus[source]) { errorSourceString += (errorSourceString == "" ? "" : ", "); errorSourceString += source; } } log.error("Timeout", "Timeout while searching all datasources: Halting search."); userPopupController.sendError(tr.translate("Search"), tr.translate("Some sources did not return content in time", [errorSourceString])); this.prepareSearchResult(); } } }<|fim▁end|>
(searchHandle: SearchHandle) => this.searchWithSuccess(SearchProvider.SourceWikipedia, searchHandle), (errorMessage: string, searchHandle: SearchHandle) => this.searchWithError(SearchProvider.SourceWikipedia, errorMessage, searchHandle)); }
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#[cfg(test)] mod tests { #[test] fn it_works() { assert_eq!(2 + 2, 4) }<|fim▁hole|> #[test] fn another() { panic!("Make this eat fail"); } // 还可以通过 should_panic 注解 #[test] #[should_panic(expected = "Panic")] fn test_panic() { panic!("Panic") } // 还可以通过 Result<T, E> 用于测试 #[test] fn test_result() -> Result<(), String> { if 2 + 2 == 4 { Ok(()) } else { Err(String::from("two plus two does equal four")) } } }<|fim▁end|>
<|file_name|>drude.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010-2018 The ESPResSo project # # This file is part of ESPResSo. # # ESPResSo is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ESPResSo is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import numpy as np import unittest as ut import espressomd import espressomd.electrostatics import espressomd.interactions from espressomd import drude_helpers class Drude(ut.TestCase): @ut.skipIf(not espressomd.has_features("P3M", "THOLE", "LANGEVIN_PER_PARTICLE"), "Test needs P3M, THOLE and LANGEVIN_PER_PARTICLE") def test(self): """ Sets up a BMIM PF6 pair separated in y-direction with fixed cores. Adds the Drude particles and related features (intramolecular exclusion bonds, Thole screening) via helper functions. Calculates the induced dipole moment and the diagonals of the polarization tensor and compares against reference results, which where reproduced with LAMMPS. """ box_l = 50 system = espressomd.System(box_l=[box_l, box_l, box_l]) system.seed = system.cell_system.get_state()['n_nodes'] * [12] np.random.seed(12) #Reference Results, reproduced with LAMMPS #Dipole Moments ref_mu0_pf6 = [0.00177594, 0.16480996, -0.01605161] ref_mu0_c1 = [0.00076652, 0.15238767, 0.00135291] ref_mu0_c2 = [-0.00020222, 0.11084197, 0.00135842] ref_mu0_c3 = [0.00059177, 0.23949626, -0.05238468] ref_mu0_bmim = [0.00115606, 0.5027259, -0.04967335] #Polarisation Tensor diagonals ref_pol_pf6 = [ 4.5535698335873445, 4.7558611769477697, 4.5546580162000554] ref_pol_bmim = [ 13.126868394164262, 14.392582501485913, 16.824150151623762] #TIMESTEP fs_to_md_time = 1.0e-2 time_step_fs = 0.5 time_step_ns = time_step_fs * 1e-6 dt = time_step_fs * fs_to_md_time #COM TEMPERATURE #Global thermostat temperature, for com and langevin. #LangevinPerParticle temperature is set to 0 for drude and core to properly account for com forces. # Like that, langevin thermostat can still be used for non-drude # particles SI_temperature = 300.0 gamma_com = 1.0 kb_kjmol = 0.0083145 temperature_com = SI_temperature * kb_kjmol # COULOMB PREFACTOR (elementary charge)^2 / (4*pi*epsilon_0) in # Angstrom * kJ/mol coulomb_prefactor = 1.67101e5 * kb_kjmol #POLARIZATION #polarization = 1.0 #In (Angstrom^3)_CGS # alpha_SI = 4*Pi*eps_0 alpha_CGS; # 4*Pi*epsilon_0*Angstrom^3/((elementary charge)^2*Angstrom^2*N_A/kJ) conv_pol_CGS_SI = 7.197586e-4 #alpha = conv_pol_CGS_SI*args.polarization #DRUDE/TOTAL MASS #lamoureux03 used values 0.1-0.8 g/mol for drude mass mass_drude = 0.8 mass_tot = 100.0 mass_core = mass_tot - mass_drude mass_red_drude = mass_drude * mass_core / mass_tot #SPRING CONSTANT DRUDE #Used 1000kcal/mol/A^2 from lamoureux03a table 1 p 3031 k_drude = 4184.0 # in kJ/mol/A^2 T_spring = 2.0 * np.pi * np.sqrt(mass_drude / k_drude)<|fim▁hole|> #Period of free oscillation: T_spring = 2Pi/w; w = sqrt(k_d/m_d) #TEMP DRUDE # Used T* = 1K from lamoureux03a p 3031 (2) 'Cold drude oscillators # regime' SI_temperature_drude = 1.0 temperature_drude = SI_temperature_drude * kb_kjmol #GAMMA DRUDE #Thermostat relaxation time should be similar to T_spring gamma_drude = mass_red_drude / T_spring system.cell_system.skin = 0.4 system.time_step = dt #Forcefield types = {"PF6": 0, "BMIM_C1": 1, "BMIM_C2": 2, "BMIM_C3": 3, "BMIM_COM": 4, "PF6_D": 5, "BMIM_C1_D": 6, "BMIM_C2_D": 7, "BMIM_C3_D": 8} charges = {"PF6": -0.78, "BMIM_C1": 0.4374, "BMIM_C2": 0.1578, "BMIM_C3": 0.1848, "BMIM_COM": 0} polarizations = {"PF6": 4.653, "BMIM_C1": 5.693, "BMIM_C2": 2.103, "BMIM_C3": 7.409} masses = {"PF6": 144.96, "BMIM_C1": 67.07, "BMIM_C2": 15.04, "BMIM_C3": 57.12, "BMIM_COM": 0} masses["BMIM_COM"] = masses["BMIM_C1"] + \ masses["BMIM_C2"] + masses["BMIM_C3"] box_center = 0.5 * np.array(3 * [box_l]) system.min_global_cut = 3.5 #Place Particles dmol = 5.0 #Test Anion pos_pf6 = box_center + np.array([0, dmol, 0]) system.part.add(id=0, type=types["PF6"], pos=pos_pf6, q=charges[ "PF6"], mass=masses["PF6"], fix=[1, 1, 1]) pos_com = box_center - np.array([0, dmol, 0]) system.part.add(id=2, type=types["BMIM_C1"], pos=pos_com + [ 0, -0.527, 1.365], q=charges["BMIM_C1"], mass=masses["BMIM_C1"], fix=[1, 1, 1]) system.part.add(id=4, type=types["BMIM_C2"], pos=pos_com + [ 0, 1.641, 2.987], q=charges["BMIM_C2"], mass=masses["BMIM_C2"], fix=[1, 1, 1]) system.part.add(id=6, type=types["BMIM_C3"], pos=pos_com + [ 0, 0.187, -2.389], q=charges["BMIM_C3"], mass=masses["BMIM_C3"], fix=[1, 1, 1]) system.thermostat.set_langevin(kT=temperature_com, gamma=gamma_com) p3m = espressomd.electrostatics.P3M( prefactor=coulomb_prefactor, accuracy=1e-4, mesh=[18, 18, 18], cao=5) system.actors.add(p3m) #Drude related Bonds thermalized_dist_bond = espressomd.interactions.ThermalizedBond( temp_com=temperature_com, gamma_com=gamma_com, temp_distance=temperature_drude, gamma_distance=gamma_drude, r_cut=1.0) harmonic_bond = espressomd.interactions.HarmonicBond( k=k_drude, r_0=0.0, r_cut=1.0) system.bonded_inter.add(thermalized_dist_bond) system.bonded_inter.add(harmonic_bond) drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[ 0], 1, types["PF6_D"], polarizations["PF6"], mass_drude, coulomb_prefactor, 2.0) drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[ 2], 3, types["BMIM_C1_D"], polarizations["BMIM_C1"], mass_drude, coulomb_prefactor, 2.0) drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[ 4], 5, types["BMIM_C2_D"], polarizations["BMIM_C2"], mass_drude, coulomb_prefactor, 2.0) drude_helpers.add_drude_particle_to_core(system, harmonic_bond, thermalized_dist_bond, system.part[ 6], 7, types["BMIM_C3_D"], polarizations["BMIM_C3"], mass_drude, coulomb_prefactor, 2.0) #Setup and add Drude-Core SR exclusion bonds drude_helpers.setup_and_add_drude_exclusion_bonds(system) #Setup intramol SR exclusion bonds once drude_helpers.setup_intramol_exclusion_bonds( system, [6, 7, 8], [1, 2, 3], [charges["BMIM_C1"], charges["BMIM_C2"], charges["BMIM_C3"]]) #Add bonds per molecule drude_helpers.add_intramol_exclusion_bonds( system, [3, 5, 7], [2, 4, 6]) #Thole drude_helpers.add_all_thole(system) def dipole_moment(id_core, id_drude): pc = system.part[id_core] pd = system.part[id_drude] v = pd.pos - pc.pos return pd.q * v def measure_dipole_moments(): dm_pf6 = [] dm_C1 = [] dm_C2 = [] dm_C3 = [] system.integrator.run(115) for i in range(100): system.integrator.run(1) dm_pf6.append(dipole_moment(0, 1)) dm_C1.append(dipole_moment(2, 3)) dm_C2.append(dipole_moment(4, 5)) dm_C3.append(dipole_moment(6, 7)) dm_pf6_m = np.mean(dm_pf6, axis=0) dm_C1_m = np.mean(dm_C1, axis=0) dm_C2_m = np.mean(dm_C2, axis=0) dm_C3_m = np.mean(dm_C3, axis=0) dm_sum_bmim = dm_C1_m + dm_C2_m + dm_C3_m res = dm_pf6_m, dm_C1_m, dm_C2_m, dm_C3_m, dm_sum_bmim return res def setElectricField(E): E = np.array(E) for p in system.part: p.ext_force = p.q * E def calc_pol(mu0, muE, E): pol = (muE - mu0) / E / conv_pol_CGS_SI return pol def measure_pol(Es, dim): E = [0.0, 0.0, 0.0] E[dim] = Es setElectricField(E) mux_pf6, mux_c1, mux_c2, mux_c3, mux_bmim = measure_dipole_moments( ) return calc_pol(mu0_pf6[dim], mux_pf6[dim], Es), calc_pol(mu0_bmim[dim], mux_bmim[dim], Es) mu0_pf6, mu0_c1, mu0_c2, mu0_c3, mu0_bmim = measure_dipole_moments() eA_to_Debye = 4.8032047 atol = 1e-2 rtol = 1e-2 np.testing.assert_allclose( ref_mu0_pf6, eA_to_Debye * mu0_pf6, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_mu0_c1, eA_to_Debye * mu0_c1, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_mu0_c2, eA_to_Debye * mu0_c2, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_mu0_c3, eA_to_Debye * mu0_c3, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_mu0_bmim, eA_to_Debye * mu0_bmim, atol=atol, rtol=rtol) pol_pf6 = [] pol_bmim = [] Efield = 96.48536 # = 1 V/A in kJ / (Avogadro Number) / Angstrom / elementary charge res = measure_pol(Efield, 0) pol_pf6.append(res[0]) pol_bmim.append(res[1]) res = measure_pol(Efield, 1) pol_pf6.append(res[0]) pol_bmim.append(res[1]) res = measure_pol(Efield, 2) pol_pf6.append(res[0]) pol_bmim.append(res[1]) np.testing.assert_allclose( ref_pol_pf6, pol_pf6, atol=atol, rtol=rtol) np.testing.assert_allclose( ref_pol_bmim, pol_bmim, atol=atol, rtol=rtol) if __name__ == "__main__": ut.main()<|fim▁end|>
#T_spring_fs = T_spring/fs_to_md_time
<|file_name|>Heap.java<|end_file_name|><|fim▁begin|>public class Heap { public static <E extends Comparable<E>> void sort(E[] array) { constructHeap(array); sortHeap(array); String debug = ""; } private static <E extends Comparable<E>> void sortHeap(E[] array) { for (int i = array.length - 1; i >= 1; i--) { swap(array, 0, i); heapifyDown(array, 0, i); } } private static <E extends Comparable<E>> void constructHeap(E[] array) { for (int i = array.length / 2; i >= 0; i--) { heapifyDown(array, i, array.length); } } private static <E extends Comparable<E>> void heapifyDown(E[] array, int index, int limit) { while (index < array.length / 2) { int childIndex = (2 * index) + 1; if(childIndex >= limit){ break; } if (childIndex + 1 < limit && array[childIndex].compareTo(array[childIndex + 1]) < 0) { childIndex += 1; } int compare = array[index].compareTo(array[childIndex]); if (compare > 0) { break; } <|fim▁hole|> swap(array, index, childIndex); index = childIndex; } } private static <E extends Comparable<E>> void swap(E[] array, int index, int childIndex) { E element = array[index]; array[index] = array[childIndex]; array[childIndex] = element; } }<|fim▁end|>
<|file_name|>data_source_sakuracloud_packet_filter_test.go<|end_file_name|><|fim▁begin|>// Copyright 2016-2022 terraform-provider-sakuracloud authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sakuracloud import ( "testing" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource" ) func TestAccSakuraCloudDataSourcePacketFilter_basic(t *testing.T) { resourceName := "data.sakuracloud_packet_filter.foobar" rand := randomName() resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, ProviderFactories: testAccProviderFactories, Steps: []resource.TestStep{ { Config: buildConfigWithArgs(testAccSakuraCloudDataSourcePacketFilter_basic, rand), Check: resource.ComposeTestCheckFunc( testCheckSakuraCloudDataSourceExists(resourceName), resource.TestCheckResourceAttr(resourceName, "name", rand), resource.TestCheckResourceAttr(resourceName, "description", "description"), resource.TestCheckResourceAttr(resourceName, "expression.#", "2"), resource.TestCheckResourceAttr(resourceName, "expression.0.protocol", "tcp"), resource.TestCheckResourceAttr(resourceName, "expression.0.source_network", "0.0.0.0/0"), resource.TestCheckResourceAttr(resourceName, "expression.0.source_port", "0-65535"), resource.TestCheckResourceAttr(resourceName, "expression.0.destination_port", "80"), resource.TestCheckResourceAttr(resourceName, "expression.0.allow", "true"), ), }, }, }) } var testAccSakuraCloudDataSourcePacketFilter_basic = ` resource "sakuracloud_packet_filter" "foobar" { name = "{{ .arg0 }}" description = "description" expression { protocol = "tcp"<|fim▁hole|> } expression { protocol = "udp" source_network = "0.0.0.0/0" source_port = "0-65535" destination_port = "80" allow = true } } data "sakuracloud_packet_filter" "foobar" { filter { names = [sakuracloud_packet_filter.foobar.name] } }`<|fim▁end|>
source_network = "0.0.0.0/0" source_port = "0-65535" destination_port = "80" allow = true
<|file_name|>yid_test.go<|end_file_name|><|fim▁begin|>package yid /* import "testing" // S = eps | S func rec_alt1() Grammar { s := Alt{ Eps, Eps } s.Right = s return s } // S = eps . S func rec_cat1() Grammar { s := Cat{ Eps, Eps } s.Second = s return s } // S = S . eps func rec_cat2() Grammar { s := &Cat{ &Eps{}, &Eps{} } s.First = s return s } // // Nullable // var nullable_tests = []struct { grammar Grammar nullable bool }{ { Empty, false }, { Eps, true }, { Cat(func() Grammar { return Empty }, func() Grammar { return Eps }), false }, { Cat(func() Grammar { return Eps }, func() { return Empty }), false }, { Cat(func() Grammar { return Eps}, func() { return Eps }}, true }, { rec_alt1(), true }, { rec_cat1(), false }, { rec_cat2(), false }, } func TestNullable(t *testing.T) { for idx, test_case := range nullable_tests { if (Nullable(test_case.grammar) != test_case.nullable) { t.Errorf("Nullable test at index %d failed test (should be %b)", idx, test_case.nullable) } } } // // Compact // var compact_tests = []struct { grammar Grammar compacted Grammar }{ { &Empty{}, TheEmpty }, { &Eps{}, TheEps }, { &Lit{ "hello" }, &Lit{ "hello" } }, { &Cat{ &Empty{}, &Empty{} }, TheEmpty }, { &Cat{ &Eps{}, &Empty{} }, TheEmpty },<|fim▁hole|> { &Cat{ &Lit{ "hello" }, &Eps{} }, &Lit{ "hello" } }, { &Alt{ &Empty{}, &Empty{} }, TheEmpty }, { &Alt{ &Empty{}, &Eps{} }, TheEps }, { &Alt{ &Eps{}, &Empty{} }, TheEps }, { &Alt{ &Eps{}, &Eps{} }, TheEps }, { &Alt{ &Empty{}, &Lit{ "foo" } }, &Lit{ "foo" } }, { &Alt{ &Lit{ "foo" }, &Empty{} }, &Lit{ "foo" } }, } func TestCompact(t *testing.T) { for idx, test_case := range compact_tests { if !Eq(Compact(test_case.grammar), test_case.compacted) { t.Errorf("Compact test at index %d failed", idx) } } } // // Deriv // var deriv_tests = []struct { grammar Grammar next string deriv Grammar }{ { &Empty{}, "a", &Empty{} }, { &Eps{}, "b", &Empty{} }, { &Lit{"x"}, "x", &Eps{} }, { &Lit{"x"}, "y", &Empty{} }, { &Cat{ &Eps{}, &Eps{} }, "foo", &Empty{} }, { &Cat{ &Lit{ "foo" }, &Eps{} }, "foo", &Eps{} }, { &Cat{ &Lit{ "foo" }, &Lit{ "baz" } }, "foo", &Lit{ "baz" } }, } func TestDeriv(t *testing.T) { for idx, test_case := range deriv_tests { if !Eq(Compact(Deriv(test_case.next, test_case.grammar)), test_case.deriv) { t.Errorf("Deriv test at index %d failed test", idx) } } } // // TODO: test equiv-acceptance on many strings of a grammar and its compaction // */<|fim▁end|>
{ &Cat{ &Empty{}, &Eps{} }, TheEmpty }, { &Cat{ &Eps{}, &Eps{} }, TheEps }, { &Cat{ &Eps{}, &Lit{ "hello" } }, &Lit{ "hello" } },
<|file_name|>known_bug_excentric_convex.rs<|end_file_name|><|fim▁begin|>/*! * # Expected behaviour: * Same as the box_vee3d demo. * * It seems to behave as expected if the excentricity is not too big (tested with 10 and 100). * * # Symptoms: * Some object just fall through the ground, missing any collison. Then, after a while they seem * to notice that they are deep into the plane, and "jump" due to penetration resolution. * Thus, some collision are missed. * Identically, some boxes just dont collide to each other some times. * * # Cause: * Not sure, but this seems to be an accuracy limitation of the contact manifold generators * (OneShotContactManifoldGenerator and IncrementalContactManifoldGenerator). The repetitive * transformations of the saved contact might invalidate them. * * However, this might be a real bug for some other non-accuracy-related reasons. For example, when * a box is deep on the plane without contact why does the one-shot contact manifold generator * fails? Something wrong with the perturbation? * * This might be (but it is very unlikely) a problem with the DBVT that might become invalid. * Though this seems very unlikely as the AABBs seem to be fine and the plane has an infinite aabb * anyway. Thurthermore, the ray-cast (which uses the dbvt…) works fine, even for "jumpy" objects. * * * # Solution: * * * # Limitations of the solution: * */ extern crate nalgebra as na; extern crate ncollide; extern crate nphysics; extern crate nphysics_testbed3d; use na::{Pnt3, Vec3, Translation}; use ncollide::shape::{Plane, Convex}; use ncollide::procedural; use nphysics::world::World; use nphysics::object::RigidBody; use nphysics_testbed3d::Testbed; <|fim▁hole|> /* * World */ let mut world = World::new(); world.set_gravity(Vec3::new(0.0, -9.81, 0.0)); /* * Plane */ let geom = Plane::new(Vec3::new(0.0, 1.0, 0.0)); world.add_body(RigidBody::new_static(geom, 0.3, 0.6)); /* * Create the convex geometries. */ let num = 8; let shift = 2.0; let centerx = shift * (num as f32) / 2.0; let centery = shift / 2.0; let centerz = shift * (num as f32) / 2.0; for i in 0usize .. num { for j in 0usize .. num { for k in 0usize .. num { let excentricity = 5000.0; let x = i as f32 * shift - centerx - excentricity; let y = j as f32 * shift + centery - excentricity; let z = k as f32 * shift - centerz - excentricity; let mut shape = procedural::cuboid(&Vec3::new(2.0 - 0.08, 2.0 - 0.08, 2.0 - 0.08)); for c in shape.coords.iter_mut() { *c = *c + Vec3::new(excentricity, excentricity, excentricity); } let geom = Convex::new(shape.coords); let mut rb = RigidBody::new_dynamic(geom, 1.0, 0.3, 0.5); rb.set_deactivation_threshold(None); rb.append_translation(&Vec3::new(x, y, z)); world.add_body(rb); } } } /* * Set up the testbed. */ let mut testbed = Testbed::new(world); testbed.look_at(Pnt3::new(-30.0, 30.0, -30.0), Pnt3::new(0.0, 0.0, 0.0)); testbed.run(); }<|fim▁end|>
fn main() {
<|file_name|>ablation.py<|end_file_name|><|fim▁begin|># =============================================================================== # Copyright 2013 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= enthought library imports ======================= from __future__ import absolute_import from traitsui.api import View, Item, VGroup, InstanceEditor, UItem, EnumEditor, \ RangeEditor, spring, HGroup, Group, ButtonEditor # ============= standard library imports ======================== # ============= local library imports ========================== from pychron.core.ui.custom_label_editor import CustomLabel from pychron.core.ui.led_editor import LEDEditor from pychron.envisage.icon_button_editor import icon_button_editor from pychron.lasers.tasks.laser_panes import ClientPane class AblationCO2ClientPane(ClientPane): def trait_context(self): ctx = super(AblationCO2ClientPane, self).trait_context() ctx['tray_calibration'] = self.model.stage_manager.tray_calibration_manager ctx['stage_manager'] = self.model.stage_manager return ctx def traits_view(self): pos_grp = VGroup(UItem('move_enabled_button', editor=ButtonEditor(label_value='move_enabled_label')), VGroup(HGroup(Item('position'), UItem('stage_manager.stage_map_name', editor=EnumEditor(name='stage_manager.stage_map_names')), UItem('stage_stop_button')), Item('x', editor=RangeEditor(low_name='stage_manager.xmin', high_name='stage_manager.xmax')),<|fim▁hole|> high_name='stage_manager.ymax')), Item('z', editor=RangeEditor(low_name='stage_manager.zmin', high_name='stage_manager.zmax')), enabled_when='_move_enabled'), label='Positioning') calibration_grp = VGroup(UItem('tray_calibration.style', enabled_when='not tray_calibration.isCalibrating()'), UItem('tray_calibration.calibrate', editor=ButtonEditor(label_value='tray_calibration.calibration_step')), HGroup(Item('tray_calibration.cx', format_str='%0.3f', style='readonly'), Item('tray_calibration.cy', format_str='%0.3f', style='readonly')), Item('tray_calibration.rotation', format_str='%0.3f', style='readonly'), Item('tray_calibration.scale', format_str='%0.4f', style='readonly'), Item('tray_calibration.error', format_str='%0.2f', style='readonly'), UItem('tray_calibration.calibrator', style='custom', editor=InstanceEditor()), CustomLabel('tray_calibration.calibration_help', color='green', height=75, width=300), label='Tray Calibration') tgrp = Group(pos_grp, calibration_grp, layout='tabbed') egrp = HGroup(UItem('enabled', editor=LEDEditor(colors=['red', 'green'])), UItem('enable', editor=ButtonEditor(label_value='enable_label')), UItem('fire_laser_button', editor=ButtonEditor(label_value='fire_label'), enabled_when='enabled'), Item('output_power', label='Power'), UItem('units'), spring, icon_button_editor('snapshot_button', 'camera'), icon_button_editor('test_connection_button', 'connect', tooltip='Test Connection')) v = View(VGroup(egrp, tgrp)) return v # ============= EOF =============================================<|fim▁end|>
Item('y', editor=RangeEditor(low_name='stage_manager.ymin',
<|file_name|>MinimumWindowSubstring.py<|end_file_name|><|fim▁begin|>__source__ = 'https://leetcode.com/problems/minimum-window-substring/' # https://github.com/kamyu104/LeetCode/blob/master/Python/minimum-window-substring.py # Time: O(n) # Space: O(k), k is the number of different characters # Hashtable # # Description: Leetcode # 76. Minimum Window Substring # # Given a string S and a string T, find the minimum window in S # which will contain all the characters in T in complexity O(n). # # For example, # S = "ADOBECODEBANC" # T = "ABC" # Minimum window is "BANC". # # Note: # If there is no such window in S that covers all characters in T, return the emtpy string "". # # If there are multiple such windows, you are guaranteed that there will always be only one unique minimum window in S. # Companies # LinkedIn Snapchat Uber Facebook # Related Topics # Hash Table Two Pointers String # Similar Questions # Substring with Concatenation of All Words Minimum Size Subarray Sum # Sliding Window Maximum Permutation in String Smallest Range # import unittest import collections class Solution: # @return a string def minWindow(self, S, T): current_count = [0 for i in xrange(52)] #Radix a-zA-Z -> 52 expected_count = [0 for i in xrange(52)] for char in T: expected_count[ord(char) - ord('a')] += 1 i, count, start, min_width, min_start = 0, 0, 0, float("inf"), 0 while i < len(S): current_count[ord(S[i]) - ord('a')] += 1 if current_count[ord(S[i]) - ord('a')] <= expected_count[ord(S[i]) - ord('a')]: count += 1 if count == len(T): while expected_count[ord(S[start]) - ord('a')] == 0 or \ current_count[ord(S[start]) - ord('a')] > expected_count[ord(S[start]) - ord('a')]: current_count[ord(S[start]) - ord('a')] -= 1 start += 1 if min_width > i - start + 1: min_width = i - start + 1 min_start = start i += 1 if min_width == float("inf"): return "" return S[min_start:min_width+min_start] class Solution2: def minWindow(self, s, t): """ :type s: str :type t: str :rtype: str """ res = "" len_s = len(s) len_t = len(t) dict = collections.defaultdict(int) cnt = 0 minLen = float("inf") for i in xrange(len_t): dict[t[i]] += 1 s_idx = 0 for i in xrange(len_s): if s[i] in dict: dict[s[i]] -= 1 if dict[s[i]] >= 0: cnt += 1 while cnt == len(t): if s[s_idx] in dict: dict[s[s_idx]] += 1 if dict[s[s_idx]] > 0: if minLen > i - s_idx + 1: minLen = i - s_idx + 1 res = s[s_idx: i+1] cnt -= 1 s_idx += 1 return res class TestMethods(unittest.TestCase): def test_Local(self): self.assertEqual(1, 1) self.assertEqual("BANC", Solution2().minWindow("ADOBECODEBANC", "ABC")) self.assertEqual("BANC", Solution().minWindow("ADOBECODEBANC", "ABC")) if __name__ == '__main__': unittest.main() Java = ''' # Thought: https://leetcode.com/problems/minimum-window-substring/solution/ # 25ms 52.80% class Solution { public String minWindow(String s, String t) { String res = ""; if(s == null || t == null || s.length() == 0 || t.length() == 0) return res; int minLen = Integer.MAX_VALUE; Map<Character, Integer> map = new HashMap<>(); for( int i = 0; i < t.length(); i++){ if(!map.containsKey(t.charAt(i))){ map.put(t.charAt(i), 0); } map.put(t.charAt(i), map.get(t.charAt(i)) + 1); } int cnt = 0; int prev = 0; for(int i = 0 ; i < s.length(); i++){ char c = s.charAt(i); if(map.containsKey(c)){ map.put(c, map.get(c) - 1); if(map.get(c) >= 0){<|fim▁hole|> while(cnt == t.length()){ char p = s.charAt(prev); if(map.containsKey(p)){ map.put(p, map.get(p) + 1); if(map.get(p) > 0){ if(minLen > i - prev + 1){ minLen = i - prev + 1; res = s.substring(prev, i + 1); } cnt --; } } prev ++; } } } return res; } } #76.01% 7ms class Solution { public String minWindow(String s, String t) { int lenS = s.length(); int lenT = t.length(); if (lenS == 0 || lenT == 0) { return ""; } int[] sCount = new int[128]; int[] tCount = new int[128]; int count = lenT; int[] result = new int[] {-1, -1}; int start = 0; for (int i = 0; i < lenT; i++) { tCount[t.charAt(i)]++; } for (int i = 0; i < lenS; i++) { char c = s.charAt(i); sCount[c]++; if (sCount[c] <= tCount[c]) { count--; } if (count == 0) { while (true) { char remove = s.charAt(start); if (sCount[remove] <= tCount[remove]) { break; } sCount[remove]--; start++; } if (result[0] < 0 || result[1] - result[0] > i + 1 - start) { result[0] = start; result[1] = i + 1; } sCount[s.charAt(start++)]--; count++; } } return result[0] < 0 ? "" : s.substring(result[0], result[1]); } } # 4ms 96.63% class Solution { public String minWindow(String s, String t) { if (s == null || t == null || s.length() == 0 || t.length() == 0) { return null; } int start = -1; int end = s.length() + 1; int left = 0; int windowSize = 0; int[] count = new int[256]; for (char c : t.toCharArray()) { count[c]++; } for (int i = 0; i < s.length(); ++i) { if(--count[s.charAt(i)] >= 0) { windowSize++; } if (windowSize == t.length()) { while (++count[s.charAt(left)] <= 0) { left++; } if (i - left < end - start) { start = left; end = i; } left++; windowSize--; } } return start == -1 ? "" : s.substring(start, end + 1); } } '''<|fim▁end|>
cnt += 1; }
<|file_name|>1028_musical_all_of_us.py<|end_file_name|><|fim▁begin|>''' NPR Puzzle 2018-10-28 https://www.npr.org/2018/10/28/660936138/sunday-puzzle-row-row-row Think of a famous Broadway musical in two words. Change one letter in it to the preceding letter of the alphabet — so B would become A, C would become B, etc. Remove the space so you have a solid word. The result will name something that all of us are part of. What is it? ''' import sys sys.path.append('..') import nprcommontools as nct from nltk.corpus import wordnet as wn import re #%%<|fim▁hole|>musicals = set(x for x in nct.wikipedia_category_members('Broadway_musicals') if x.count(' ') == 1) #musicals = musicals.union(wikipedia_category_members('Off-Broadway_musicals')) #musicals = musicals.union(wikipedia_category_members('American musical films')) words = set(x for x in wn.all_lemma_names() if x.count('_') == 0) #%% # Go through musicals and look for ones that work for musical in musicals: musical_nospace = re.sub(r'[^A-Za-z]+','',musical).lower() for i in range(len(musical_nospace)): letter = musical_nospace[i] myword = musical_nospace[:i] + nct.letter_shift(letter,-1) + musical_nospace[i+1:] if myword in words: print(musical,myword)<|fim▁end|>
# Get a list of musicals from Wikipedia
<|file_name|>eric6_doc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright (c) 2003 - 2014 Detlev Offenbach <[email protected]> # """ Eric6 Documentation Generator. This is the main Python script of the documentation generator. It is this script that gets called via the source documentation interface. This script can be used via the commandline as well. """ from __future__ import unicode_literals import Toolbox.PyQt4ImportHook # __IGNORE_WARNING__ try: # Only for Py2 import Utilities.compatibility_fixes # __IGNORE_WARNING__ except (ImportError): pass import glob import os import sys import fnmatch import Utilities.ModuleParser from DocumentationTools.ModuleDocumentor import ModuleDocument from DocumentationTools.IndexGenerator import IndexGenerator from DocumentationTools.QtHelpGenerator import QtHelpGenerator from DocumentationTools.Config import eric6docDefaultColors from UI.Info import Version import Utilities # list of supported filename extensions supportedExtensions = [".py", ".pyw", ".ptl", ".rb"] def usage(): """ Function to print some usage information. It prints a reference of all commandline parameters that may be used and ends the application. """ print("eric6_doc") print() print("Copyright (c) 2003 - 2014 Detlev Offenbach" " <[email protected]>.") print() print("Usage:") print() print(" eric6_doc [options] files...") print() print("where files can be either python modules, package") print("directories or ordinary directories.") print() print("Options:") print() print(" -c filename or --style-sheet=filename") print(" Specify a CSS style sheet file to be used.") print(" -e or --noempty") print(" Don't include empty modules.") print(" --eol=eol-type") print(" Use the given eol type to terminate lines.") print(" Valid values are 'cr', 'lf' and 'crlf'.") print(" --exclude-file=pattern") print(" Specify a filename pattern of files to be excluded.") print(" This option may be repeated multiple times.") print(" -h or --help") print(" Show this help and exit.") print(" -i or --noindex") print(" Don't generate index files.") print(" -o directory or --outdir=directory") print(" Generate files in the named directory.") print(" -R, -r or --recursive") print(" Perform a recursive search for Python files.") print(" -t ext or --extension=ext") print(" Add the given extension to the list of file extensions.") print(" This option may be given multiple times.") print(" -V or --version") print(" Show version information and exit.") print(" -x directory or --exclude=directory") print(" Specify a directory basename to be excluded.") print(" This option may be repeated multiple times.") print() print(" --body-color=color") print(" Specify the text color.") print(" --body-background-color=color") print(" Specify the text background color.") print(" --l1header-color=color") print(" Specify the text color of level 1 headers.") print(" --l1header-background-color=color") print(" Specify the text background color of level 1 headers.") print(" --l2header-color=color") print(" Specify the text color of level 2 headers.") print(" --l2header-background-color=color") print(" Specify the text background color of level 2 headers.") print(" --cfheader-color=color") print(" Specify the text color of class and function headers.") print(" --cfheader-background-color=color") print(" Specify the text background color of class and" " function headers.") print(" --link-color=color") print(" Specify the text color of hyperlinks.") print() print(" --create-qhp") print(" Enable generation of QtHelp files.") print(" --qhp-outdir=directory") print(" Generate QtHelp files in the named directory.") print(" --qhp-namespace=namespace") print(" Use the given namespace (mandatory).") print(" --qhp-virtualfolder=folder") print(" Use the given virtual folder (mandatory).") print(" The virtual folder must not contain '/'.") print(" --qhp-filtername=name") print(" Use the given name for the custom filter.") print(" --qhp-filterattribs=attributes") print(" Add the given attributes to the filter list.") print(" Attributes must be separated by ':'.") print(" --qhp-title=title") print(" Use this as the title for the generated help (mandatory).") print(" --create-qhc") print(" Enable generation of QtHelp Collection files.") sys.exit(1) def version(): """ Function to show the version information. """ print( """eric6_doc {0}\n""" """\n""" """Eric6 API documentation generator.\n""" """\n""" """Copyright (c) 2003-2014 Detlev Offenbach""" """ <[email protected]>\n""" """This is free software; see the LICENSE.GPL3 for copying""" """ conditions.\n""" """There is NO warranty; not even for MERCHANTABILITY or FITNESS""" """ FOR A\n""" """PARTICULAR PURPOSE.""".format(Version)) sys.exit(1) def main(): """ Main entry point into the application. """ import getopt try: opts, args = getopt.getopt( sys.argv[1:], "c:ehio:Rrt:Vx:", ["exclude=", "extension=", "help", "noindex", "noempty", "outdir=", "recursive", "style-sheet=", "version", "exclude-file=", "eol=", "body-color=", "body-background-color=", "l1header-color=", "l1header-background-color=", "l2header-color=", "l2header-background-color=", "cfheader-color=", "cfheader-background-color=", "link-color=", "create-qhp", "qhp-outdir=", "qhp-namespace=", "qhp-virtualfolder=", "qhp-filtername=", "qhp-filterattribs=", "qhp-title=", "create-qhc", ]) except getopt.error: usage() excludeDirs = ["CVS", ".svn", "_svn", ".ropeproject", "_ropeproject", ".eric6project", "_eric6project", "dist", "build", "doc", "docs"]<|fim▁hole|> doIndex = True noempty = False newline = None stylesheetFile = "" colors = eric6docDefaultColors.copy() qtHelpCreation = False qtHelpOutputDir = "help" qtHelpNamespace = "" qtHelpFolder = "source" qtHelpFilterName = "unknown" qtHelpFilterAttribs = "" qtHelpTitle = "" qtHelpCreateCollection = False for k, v in opts: if k in ["-o", "--outdir"]: outputDir = v elif k in ["-R", "-r", "--recursive"]: recursive = True elif k in ["-x", "--exclude"]: excludeDirs.append(v) elif k == "--exclude-file": excludePatterns.append(v) elif k in ["-i", "--noindex"]: doIndex = False elif k in ["-e", "--noempty"]: noempty = True elif k in ["-h", "--help"]: usage() elif k in ["-V", "--version"]: version() elif k in ["-c", "--style-sheet"]: stylesheetFile = v elif k in ["-t", "--extension"]: if not v.startswith("."): v = ".{0}".format(v) supportedExtensions.append(v) elif k == "--eol": if v.lower() == "cr": newline = '\r' elif v.lower() == "lf": newline = '\n' elif v.lower() == "crlf": newline = '\r\n' elif k == "--body-color": colors['BodyColor'] = v elif k == "--body-background-color": colors['BodyBgColor'] = v elif k == "--l1header-color": colors['Level1HeaderColor'] = v elif k == "--l1header-background-color": colors['Level1HeaderBgColor'] = v elif k == "--l2header-color": colors['Level2HeaderColor'] = v elif k == "--l2header-background-color": colors['Level2HeaderBgColor'] = v elif k == "--cfheader-color": colors['CFColor'] = v elif k == "--cfheader-background-color": colors['CFBgColor'] = v elif k == "--link-color": colors['LinkColor'] = v elif k == "--create-qhp": qtHelpCreation = True elif k == "--qhp-outdir": qtHelpOutputDir = v elif k == "--qhp-namespace": qtHelpNamespace = v elif k == "--qhp-virtualfolder": qtHelpFolder = v elif k == "--qhp-filtername": qtHelpFilterName = v elif k == "--qhp-filterattribs": qtHelpFilterAttribs = v elif k == "--qhp-title": qtHelpTitle = v elif k == "--create-qhc": qtHelpCreateCollection = True if not args: usage() if qtHelpCreation and \ (qtHelpNamespace == "" or qtHelpFolder == "" or '/' in qtHelpFolder or qtHelpTitle == ""): usage() if qtHelpCreation: from PyQt5.QtCore import QCoreApplication app = QCoreApplication(sys.argv) # __IGNORE_WARNING__ input = output = 0 basename = "" if outputDir: if not os.path.isdir(outputDir): try: os.makedirs(outputDir) except EnvironmentError: sys.stderr.write( "Could not create output directory {0}.".format(outputDir)) sys.exit(2) else: outputDir = os.getcwd() outputDir = os.path.abspath(outputDir) if stylesheetFile: try: sf = open(stylesheetFile, "r", encoding="utf-8") stylesheet = sf.read() sf.close() except IOError: sys.stderr.write( "The CSS stylesheet '{0}' does not exist\n".format( stylesheetFile)) sys.stderr.write("Disabling CSS usage.\n") stylesheet = None else: stylesheet = None indexGenerator = IndexGenerator(outputDir, colors, stylesheet) if qtHelpCreation: if qtHelpOutputDir: if not os.path.isdir(qtHelpOutputDir): try: os.makedirs(qtHelpOutputDir) except EnvironmentError: sys.stderr.write( "Could not create QtHelp output directory {0}.".format( qtHelpOutputDir)) sys.exit(2) else: qtHelpOutputDir = os.getcwd() qtHelpOutputDir = os.path.abspath(qtHelpOutputDir) qtHelpGenerator = QtHelpGenerator(outputDir, qtHelpOutputDir, qtHelpNamespace, qtHelpFolder, qtHelpFilterName, qtHelpFilterAttribs, qtHelpTitle, qtHelpCreateCollection) for arg in args: if os.path.isdir(arg): if os.path.exists(os.path.join( arg, Utilities.joinext("__init__", ".py"))): basename = os.path.dirname(arg) if arg == '.': sys.stderr.write("The directory '.' is a package.\n") sys.stderr.write( "Please repeat the call giving its real name.\n") sys.stderr.write("Ignoring the directory.\n") continue else: basename = arg if basename: basename = "{0}{1}".format(basename, os.sep) if recursive and not os.path.islink(arg): names = [arg] + Utilities.getDirs(arg, excludeDirs) else: names = [arg] else: basename = "" names = [arg] for filename in names: inpackage = False if os.path.isdir(filename): files = [] for ext in supportedExtensions: files.extend(glob.glob(os.path.join( filename, Utilities.joinext("*", ext)))) initFile = os.path.join( filename, Utilities.joinext("__init__", ext)) if initFile in files: inpackage = True files.remove(initFile) files.insert(0, initFile) else: if Utilities.isWindowsPlatform() and glob.has_magic(filename): files = glob.glob(filename) else: files = [filename] for file in files: skipIt = False for pattern in excludePatterns: if fnmatch.fnmatch(os.path.basename(file), pattern): skipIt = True break if skipIt: continue try: module = Utilities.ModuleParser.readModule( file, basename=basename, inpackage=inpackage, extensions=supportedExtensions) moduleDocument = ModuleDocument(module, colors, stylesheet) doc = moduleDocument.genDocument() except IOError as v: sys.stderr.write("{0} error: {1}\n".format(file, v[1])) continue except ImportError as v: sys.stderr.write("{0} error: {1}\n".format(file, v)) continue input = input + 1 f = Utilities.joinext(os.path.join( outputDir, moduleDocument.name()), ".html") # remember for index file generation indexGenerator.remember(file, moduleDocument, basename) # remember for QtHelp generation if qtHelpCreation: qtHelpGenerator.remember(file, moduleDocument, basename) if (noempty or file.endswith('__init__.py')) \ and moduleDocument.isEmpty(): continue # generate output try: out = open(f, "w", encoding="utf-8", newline=newline) out.write(doc) out.close() except IOError as v: sys.stderr.write("{0} error: {1}\n".format(file, v[1])) else: sys.stdout.write("{0} ok\n".format(f)) output = output + 1 sys.stdout.flush() sys.stderr.flush() # write index files if doIndex: indexGenerator.writeIndices(basename, newline=newline) # generate the QtHelp files if qtHelpCreation: qtHelpGenerator.generateFiles(newline=newline) sys.exit(0) if __name__ == '__main__': main()<|fim▁end|>
excludePatterns = [] outputDir = "doc" recursive = False
<|file_name|>EventTrackingField.java<|end_file_name|><|fim▁begin|>package com.github.dandelion.gua.core.field; public enum EventTrackingField implements AnalyticsField, AnalyticsCreateField {<|fim▁hole|> @AnalyticsFieldControl(AnalyticsFieldControl.Policy.TEXT) eventCategory, @AnalyticsFieldControl(AnalyticsFieldControl.Policy.TEXT) eventAction, @AnalyticsFieldControl(AnalyticsFieldControl.Policy.TEXT) eventLabel, @AnalyticsFieldControl(AnalyticsFieldControl.Policy.INTEGER) eventValue, }<|fim▁end|>
<|file_name|>wrapper.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ #![allow(unsafe_code)] //! Wrapper definitions on top of Gecko types in order to be used in the style //! system. //! //! This really follows the Servo pattern in //! `components/script/layout_wrapper.rs`. //! //! This theoretically should live in its own crate, but now it lives in the //! style system it's kind of pointless in the Stylo case, and only Servo forces //! the separation between the style system implementation and everything else. use crate::applicable_declarations::ApplicableDeclarationBlock; use crate::author_styles::AuthorStyles; use crate::context::{PostAnimationTasks, QuirksMode, SharedStyleContext, UpdateAnimationsTasks}; use crate::data::ElementData; use crate::dom::{LayoutIterator, NodeInfo, OpaqueNode, TDocument, TElement, TNode, TShadowRoot}; use crate::element_state::{DocumentState, ElementState}; use crate::font_metrics::{FontMetrics, FontMetricsOrientation, FontMetricsProvider}; use crate::gecko::data::GeckoStyleSheet; use crate::gecko::selector_parser::{NonTSPseudoClass, PseudoElement, SelectorImpl}; use crate::gecko::snapshot_helpers; use crate::gecko_bindings::bindings; use crate::gecko_bindings::bindings::Gecko_ElementHasAnimations; use crate::gecko_bindings::bindings::Gecko_ElementHasCSSAnimations; use crate::gecko_bindings::bindings::Gecko_ElementHasCSSTransitions; use crate::gecko_bindings::bindings::Gecko_GetActiveLinkAttrDeclarationBlock; use crate::gecko_bindings::bindings::Gecko_GetAnimationEffectCount; use crate::gecko_bindings::bindings::Gecko_GetAnimationRule; use crate::gecko_bindings::bindings::Gecko_GetExtraContentStyleDeclarations; use crate::gecko_bindings::bindings::Gecko_GetHTMLPresentationAttrDeclarationBlock; use crate::gecko_bindings::bindings::Gecko_GetStyleAttrDeclarationBlock; use crate::gecko_bindings::bindings::Gecko_GetUnvisitedLinkAttrDeclarationBlock; use crate::gecko_bindings::bindings::Gecko_GetVisitedLinkAttrDeclarationBlock; use crate::gecko_bindings::bindings::Gecko_IsSignificantChild; use crate::gecko_bindings::bindings::Gecko_MatchLang; use crate::gecko_bindings::bindings::Gecko_UnsetDirtyStyleAttr; use crate::gecko_bindings::bindings::Gecko_UpdateAnimations; use crate::gecko_bindings::bindings::{Gecko_ElementState, Gecko_GetDocumentLWTheme}; use crate::gecko_bindings::bindings::{Gecko_SetNodeFlags, Gecko_UnsetNodeFlags}; use crate::gecko_bindings::structs; use crate::gecko_bindings::structs::nsChangeHint; use crate::gecko_bindings::structs::Document_DocumentTheme as DocumentTheme; use crate::gecko_bindings::structs::EffectCompositor_CascadeLevel as CascadeLevel; use crate::gecko_bindings::structs::ELEMENT_HANDLED_SNAPSHOT; use crate::gecko_bindings::structs::ELEMENT_HAS_ANIMATION_ONLY_DIRTY_DESCENDANTS_FOR_SERVO; use crate::gecko_bindings::structs::ELEMENT_HAS_DIRTY_DESCENDANTS_FOR_SERVO; use crate::gecko_bindings::structs::ELEMENT_HAS_SNAPSHOT; use crate::gecko_bindings::structs::NODE_DESCENDANTS_NEED_FRAMES; use crate::gecko_bindings::structs::NODE_NEEDS_FRAME; use crate::gecko_bindings::structs::{nsAtom, nsIContent, nsINode_BooleanFlag}; use crate::gecko_bindings::structs::{nsINode as RawGeckoNode, Element as RawGeckoElement}; use crate::gecko_bindings::sugar::ownership::{HasArcFFI, HasSimpleFFI}; use crate::global_style_data::GLOBAL_STYLE_DATA; use crate::hash::FxHashMap; use crate::invalidation::element::restyle_hints::RestyleHint; use crate::media_queries::Device; use crate::properties::animated_properties::{AnimationValue, AnimationValueMap}; use crate::properties::{ComputedValues, LonghandId}; use crate::properties::{Importance, PropertyDeclaration, PropertyDeclarationBlock}; use crate::rule_tree::CascadeLevel as ServoCascadeLevel; use crate::selector_parser::{AttrValue, HorizontalDirection, Lang}; use crate::shared_lock::Locked; use crate::string_cache::{Atom, Namespace, WeakAtom, WeakNamespace}; use crate::stylist::CascadeData; use crate::values::computed::font::GenericFontFamily; use crate::values::computed::Length; use crate::values::specified::length::FontBaseSize; use crate::CaseSensitivityExt; use app_units::Au; use atomic_refcell::{AtomicRefCell, AtomicRefMut}; use selectors::attr::{AttrSelectorOperation, AttrSelectorOperator}; use selectors::attr::{CaseSensitivity, NamespaceConstraint}; use selectors::matching::VisitedHandlingMode; use selectors::matching::{ElementSelectorFlags, MatchingContext}; use selectors::sink::Push; use selectors::{Element, OpaqueElement}; use servo_arc::{Arc, ArcBorrow, RawOffsetArc}; use std::cell::RefCell; use std::fmt; use std::hash::{Hash, Hasher}; use std::mem; use std::ptr; #[inline] fn elements_with_id<'a, 'le>( array: *const structs::nsTArray<*mut RawGeckoElement>, ) -> &'a [GeckoElement<'le>] { unsafe { if array.is_null() { return &[]; } let elements: &[*mut RawGeckoElement] = &**array; // NOTE(emilio): We rely on the in-memory representation of // GeckoElement<'ld> and *mut RawGeckoElement being the same. #[allow(dead_code)] unsafe fn static_assert() { mem::transmute::<*mut RawGeckoElement, GeckoElement<'static>>(0xbadc0de as *mut _); } mem::transmute(elements) } } /// A simple wrapper over `Document`. #[derive(Clone, Copy)] pub struct GeckoDocument<'ld>(pub &'ld structs::Document); impl<'ld> TDocument for GeckoDocument<'ld> { type ConcreteNode = GeckoNode<'ld>; #[inline] fn as_node(&self) -> Self::ConcreteNode { GeckoNode(&self.0._base) } #[inline] fn is_html_document(&self) -> bool { self.0.mType == structs::Document_Type::eHTML } #[inline] fn quirks_mode(&self) -> QuirksMode { self.0.mCompatMode.into() } #[inline] fn elements_with_id<'a>(&self, id: &Atom) -> Result<&'a [GeckoElement<'ld>], ()> where Self: 'a, { Ok(elements_with_id(unsafe { bindings::Gecko_Document_GetElementsWithId(self.0, id.as_ptr()) })) } } /// A simple wrapper over `ShadowRoot`. #[derive(Clone, Copy)] pub struct GeckoShadowRoot<'lr>(pub &'lr structs::ShadowRoot); impl<'ln> fmt::Debug for GeckoShadowRoot<'ln> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // TODO(emilio): Maybe print the host or something? write!(f, "<shadow-root> ({:#x})", self.as_node().opaque().0) } } impl<'lr> PartialEq for GeckoShadowRoot<'lr> { #[inline] fn eq(&self, other: &Self) -> bool { self.0 as *const _ == other.0 as *const _ } } impl<'lr> TShadowRoot for GeckoShadowRoot<'lr> { type ConcreteNode = GeckoNode<'lr>; #[inline] fn as_node(&self) -> Self::ConcreteNode { GeckoNode(&self.0._base._base._base._base) } #[inline] fn host(&self) -> GeckoElement<'lr> { GeckoElement(unsafe { &*self.0._base.mHost.mRawPtr }) } #[inline] fn style_data<'a>(&self) -> Option<&'a CascadeData> where Self: 'a, { let author_styles = unsafe { self.0.mServoStyles.mPtr.as_ref()? }; let author_styles = AuthorStyles::<GeckoStyleSheet>::from_ffi(author_styles); Some(&author_styles.data) } #[inline] fn elements_with_id<'a>(&self, id: &Atom) -> Result<&'a [GeckoElement<'lr>], ()> where Self: 'a, { Ok(elements_with_id(unsafe { bindings::Gecko_ShadowRoot_GetElementsWithId(self.0, id.as_ptr()) })) } #[inline] fn parts<'a>(&self) -> &[<Self::ConcreteNode as TNode>::ConcreteElement] where Self: 'a, { let slice: &[*const RawGeckoElement] = &*self.0.mParts; #[allow(dead_code)] unsafe fn static_assert() { mem::transmute::<*const RawGeckoElement, GeckoElement<'static>>(0xbadc0de as *const _); } unsafe { mem::transmute(slice) } } } /// A simple wrapper over a non-null Gecko node (`nsINode`) pointer. /// /// Important: We don't currently refcount the DOM, because the wrapper lifetime /// magic guarantees that our LayoutFoo references won't outlive the root, and /// we don't mutate any of the references on the Gecko side during restyle. /// /// We could implement refcounting if need be (at a potentially non-trivial /// performance cost) by implementing Drop and making LayoutFoo non-Copy. #[derive(Clone, Copy)] pub struct GeckoNode<'ln>(pub &'ln RawGeckoNode); impl<'ln> PartialEq for GeckoNode<'ln> { #[inline] fn eq(&self, other: &Self) -> bool { self.0 as *const _ == other.0 as *const _ } } impl<'ln> fmt::Debug for GeckoNode<'ln> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(el) = self.as_element() { return el.fmt(f); } if self.is_text_node() { return write!(f, "<text node> ({:#x})", self.opaque().0); } if self.is_document() { return write!(f, "<document> ({:#x})", self.opaque().0); } if let Some(sr) = self.as_shadow_root() { return sr.fmt(f); } write!(f, "<non-text node> ({:#x})", self.opaque().0) } } impl<'ln> GeckoNode<'ln> { #[inline] fn is_document(&self) -> bool { // This is a DOM constant that isn't going to change. const DOCUMENT_NODE: u16 = 9; self.node_info().mInner.mNodeType == DOCUMENT_NODE } #[inline] fn is_shadow_root(&self) -> bool { self.is_in_shadow_tree() && self.parent_node().is_none() } #[inline] fn from_content(content: &'ln nsIContent) -> Self { GeckoNode(&content._base) } #[inline] fn flags(&self) -> u32 { (self.0)._base._base_1.mFlags } #[inline] fn node_info(&self) -> &structs::NodeInfo { debug_assert!(!self.0.mNodeInfo.mRawPtr.is_null()); unsafe { &*self.0.mNodeInfo.mRawPtr } } // These live in different locations depending on processor architecture. #[cfg(target_pointer_width = "64")] #[inline] fn bool_flags(&self) -> u32 { (self.0)._base._base_1.mBoolFlags } #[cfg(target_pointer_width = "32")] #[inline] fn bool_flags(&self) -> u32 { (self.0).mBoolFlags } #[inline] fn get_bool_flag(&self, flag: nsINode_BooleanFlag) -> bool { self.bool_flags() & (1u32 << flag as u32) != 0 } /// This logic is duplicate in Gecko's nsINode::IsInShadowTree(). #[inline] fn is_in_shadow_tree(&self) -> bool { use crate::gecko_bindings::structs::NODE_IS_IN_SHADOW_TREE; self.flags() & (NODE_IS_IN_SHADOW_TREE as u32) != 0 } /// WARNING: This logic is duplicated in Gecko's FlattenedTreeParentIsParent. /// Make sure to mirror any modifications in both places. #[inline] fn flattened_tree_parent_is_parent(&self) -> bool { use crate::gecko_bindings::structs::*; let flags = self.flags(); // FIXME(emilio): The shadow tree condition seems it shouldn't be needed // anymore, if we check for slots. if self.is_in_shadow_tree() { return false; } let parent = unsafe { self.0.mParent.as_ref() }.map(GeckoNode); let parent_el = parent.and_then(|p| p.as_element()); if flags & (NODE_IS_NATIVE_ANONYMOUS_ROOT as u32) != 0 && parent_el.map_or(false, |el| el.is_root()) { return false; } if let Some(parent) = parent_el { if parent.shadow_root().is_some() { return false; } } true } #[inline] fn flattened_tree_parent(&self) -> Option<Self> { // TODO(emilio): Measure and consider not doing this fast-path and take // always the common path, it's only a function call and from profiles // it seems that keeping this fast path makes the compiler not inline // `flattened_tree_parent`. if self.flattened_tree_parent_is_parent() { debug_assert_eq!( unsafe { bindings::Gecko_GetFlattenedTreeParentNode(self.0) .as_ref() .map(GeckoNode) }, self.parent_node(), "Fast path stopped holding!" ); return self.parent_node(); } // NOTE(emilio): If this call is too expensive, we could manually // inline more aggressively. unsafe { bindings::Gecko_GetFlattenedTreeParentNode(self.0) .as_ref() .map(GeckoNode) } } #[inline] fn contains_non_whitespace_content(&self) -> bool { unsafe { Gecko_IsSignificantChild(self.0, false) } } } impl<'ln> NodeInfo for GeckoNode<'ln> { #[inline] fn is_element(&self) -> bool { self.get_bool_flag(nsINode_BooleanFlag::NodeIsElement) } fn is_text_node(&self) -> bool { // This is a DOM constant that isn't going to change. const TEXT_NODE: u16 = 3; self.node_info().mInner.mNodeType == TEXT_NODE } } impl<'ln> TNode for GeckoNode<'ln> { type ConcreteDocument = GeckoDocument<'ln>; type ConcreteShadowRoot = GeckoShadowRoot<'ln>; type ConcreteElement = GeckoElement<'ln>; #[inline] fn parent_node(&self) -> Option<Self> { unsafe { self.0.mParent.as_ref().map(GeckoNode) } } #[inline] fn first_child(&self) -> Option<Self> { unsafe { self.0 .mFirstChild .raw::<nsIContent>() .as_ref() .map(GeckoNode::from_content) } } #[inline] fn last_child(&self) -> Option<Self> { unsafe { bindings::Gecko_GetLastChild(self.0).as_ref().map(GeckoNode) } } #[inline] fn prev_sibling(&self) -> Option<Self> { unsafe { bindings::Gecko_GetPreviousSibling(self.0) .as_ref() .map(GeckoNode) } } #[inline] fn next_sibling(&self) -> Option<Self> { unsafe { self.0 .mNextSibling .raw::<nsIContent>() .as_ref() .map(GeckoNode::from_content) } } #[inline] fn owner_doc(&self) -> Self::ConcreteDocument { debug_assert!(!self.node_info().mDocument.is_null()); GeckoDocument(unsafe { &*self.node_info().mDocument }) } #[inline] fn is_in_document(&self) -> bool { self.get_bool_flag(nsINode_BooleanFlag::IsInDocument) } fn traversal_parent(&self) -> Option<GeckoElement<'ln>> { self.flattened_tree_parent().and_then(|n| n.as_element()) } #[inline] fn opaque(&self) -> OpaqueNode { let ptr: usize = self.0 as *const _ as usize; OpaqueNode(ptr) } fn debug_id(self) -> usize { unimplemented!() } #[inline] fn as_element(&self) -> Option<GeckoElement<'ln>> { if !self.is_element() { return None; } Some(GeckoElement(unsafe { &*(self.0 as *const _ as *const RawGeckoElement) })) } #[inline] fn as_document(&self) -> Option<Self::ConcreteDocument> { if !self.is_document() { return None; } debug_assert_eq!(self.owner_doc().as_node(), *self, "How?"); Some(self.owner_doc()) } #[inline] fn as_shadow_root(&self) -> Option<Self::ConcreteShadowRoot> { if !self.is_shadow_root() { return None; } Some(GeckoShadowRoot(unsafe { &*(self.0 as *const _ as *const structs::ShadowRoot) })) } } /// A wrapper on top of two kind of iterators, depending on the parent being /// iterated. /// /// We generally iterate children by traversing the light-tree siblings of the /// first child like Servo does. /// /// However, for nodes with anonymous children, we use a custom (heavier-weight) /// Gecko-implemented iterator. /// /// FIXME(emilio): If we take into account shadow DOM, we're going to need the /// flat tree pretty much always. We can try to optimize the case where there's /// no shadow root sibling, probably. pub enum GeckoChildrenIterator<'a> { /// A simple iterator that tracks the current node being iterated and /// replaces it with the next sibling when requested. Current(Option<GeckoNode<'a>>), /// A Gecko-implemented iterator we need to drop appropriately. GeckoIterator(structs::StyleChildrenIterator), } impl<'a> Drop for GeckoChildrenIterator<'a> { fn drop(&mut self) { if let GeckoChildrenIterator::GeckoIterator(ref mut it) = *self { unsafe { bindings::Gecko_DestroyStyleChildrenIterator(it); } } } } impl<'a> Iterator for GeckoChildrenIterator<'a> { type Item = GeckoNode<'a>; fn next(&mut self) -> Option<GeckoNode<'a>> { match *self { GeckoChildrenIterator::Current(curr) => { let next = curr.and_then(|node| node.next_sibling()); *self = GeckoChildrenIterator::Current(next); curr }, GeckoChildrenIterator::GeckoIterator(ref mut it) => unsafe { // We do this unsafe lengthening of the lifetime here because // structs::StyleChildrenIterator is actually StyleChildrenIterator<'a>, // however we can't express this easily with bindgen, and it would // introduce functions with two input lifetimes into bindgen, // which would be out of scope for elision. bindings::Gecko_GetNextStyleChild(&mut *(it as *mut _)) .as_ref() .map(GeckoNode) }, } } } /// A simple wrapper over a non-null Gecko `Element` pointer. #[derive(Clone, Copy)] pub struct GeckoElement<'le>(pub &'le RawGeckoElement); impl<'le> fmt::Debug for GeckoElement<'le> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { use nsstring::nsCString; write!(f, "<{}", self.local_name())?; let mut attrs = nsCString::new(); unsafe { bindings::Gecko_Element_DebugListAttributes(self.0, &mut attrs); } write!(f, "{}", attrs)?; write!(f, "> ({:#x})", self.as_node().opaque().0) } } impl<'le> GeckoElement<'le> { #[inline(always)] fn attrs(&self) -> &[structs::AttrArray_InternalAttr] { unsafe { let attrs = match self.0.mAttrs.mImpl.mPtr.as_ref() { Some(attrs) => attrs, None => return &[], }; attrs.mBuffer.as_slice(attrs.mAttrCount as usize) } } #[inline(always)] fn get_part_attr(&self) -> Option<&structs::nsAttrValue> { if !self.has_part_attr() { return None; } snapshot_helpers::find_attr(self.attrs(), &atom!("part")) } #[inline(always)] fn get_class_attr(&self) -> Option<&structs::nsAttrValue> { if !self.may_have_class() { return None; } if self.is_svg_element() { let svg_class = unsafe { bindings::Gecko_GetSVGAnimatedClass(self.0).as_ref() }; if let Some(c) = svg_class { return Some(c); } } snapshot_helpers::find_attr(self.attrs(), &atom!("class")) } #[inline] fn closest_anon_subtree_root_parent(&self) -> Option<Self> { debug_assert!(self.is_in_native_anonymous_subtree()); let mut current = *self; loop { if current.is_root_of_native_anonymous_subtree() { return current.traversal_parent(); } current = current.traversal_parent()?; } } #[inline] fn may_have_anonymous_children(&self) -> bool { self.as_node() .get_bool_flag(nsINode_BooleanFlag::ElementMayHaveAnonymousChildren) } #[inline] fn flags(&self) -> u32 { self.as_node().flags() } // FIXME: We can implement this without OOL calls, but we can't easily given // GeckoNode is a raw reference. // // We can use a Cell<T>, but that's a bit of a pain. #[inline] fn set_flags(&self, flags: u32) { unsafe { Gecko_SetNodeFlags(self.as_node().0, flags) } } #[inline] unsafe fn unset_flags(&self, flags: u32) { Gecko_UnsetNodeFlags(self.as_node().0, flags) } /// Returns true if this element has descendants for lazy frame construction. #[inline] pub fn descendants_need_frames(&self) -> bool { self.flags() & (NODE_DESCENDANTS_NEED_FRAMES as u32) != 0 } /// Returns true if this element needs lazy frame construction. #[inline] pub fn needs_frame(&self) -> bool { self.flags() & (NODE_NEEDS_FRAME as u32) != 0 } /// Returns a reference to the DOM slots for this Element, if they exist. #[inline] fn dom_slots(&self) -> Option<&structs::FragmentOrElement_nsDOMSlots> { let slots = self.as_node().0.mSlots as *const structs::FragmentOrElement_nsDOMSlots; unsafe { slots.as_ref() } } /// Returns a reference to the extended DOM slots for this Element. #[inline] fn extended_slots(&self) -> Option<&structs::FragmentOrElement_nsExtendedDOMSlots> { self.dom_slots().and_then(|s| unsafe { // For the bit usage, see nsContentSlots::GetExtendedSlots. let e_slots = s._base.mExtendedSlots & !structs::nsIContent_nsContentSlots_sNonOwningExtendedSlotsFlag; (e_slots as *const structs::FragmentOrElement_nsExtendedDOMSlots).as_ref() }) } #[inline] fn namespace_id(&self) -> i32 { self.as_node().node_info().mInner.mNamespaceID } #[inline] fn has_id(&self) -> bool { self.as_node() .get_bool_flag(nsINode_BooleanFlag::ElementHasID) } #[inline] fn state_internal(&self) -> u64 { if !self .as_node() .get_bool_flag(nsINode_BooleanFlag::ElementHasLockedStyleStates) { return self.0.mState.mStates; } unsafe { Gecko_ElementState(self.0) } } #[inline] fn document_state(&self) -> DocumentState { DocumentState::from_bits_truncate(self.as_node().owner_doc().0.mDocumentState.mStates) } #[inline] fn may_have_class(&self) -> bool { self.as_node() .get_bool_flag(nsINode_BooleanFlag::ElementMayHaveClass) } #[inline] fn has_properties(&self) -> bool { use crate::gecko_bindings::structs::NODE_HAS_PROPERTIES; (self.flags() & NODE_HAS_PROPERTIES as u32) != 0 } #[inline] fn before_or_after_pseudo(&self, is_before: bool) -> Option<Self> { if !self.has_properties() { return None; } unsafe { bindings::Gecko_GetBeforeOrAfterPseudo(self.0, is_before) .as_ref() .map(GeckoElement) } } #[inline] fn may_have_style_attribute(&self) -> bool { self.as_node() .get_bool_flag(nsINode_BooleanFlag::ElementMayHaveStyle) } #[inline] fn document_theme(&self) -> DocumentTheme { let node = self.as_node(); unsafe { Gecko_GetDocumentLWTheme(node.owner_doc().0) } } /// Only safe to call on the main thread, with exclusive access to the /// element and its ancestors. /// /// This function is also called after display property changed for SMIL /// animation. /// /// Also this function schedules style flush. pub unsafe fn note_explicit_hints(&self, restyle_hint: RestyleHint, change_hint: nsChangeHint) { use crate::gecko::restyle_damage::GeckoRestyleDamage; let damage = GeckoRestyleDamage::new(change_hint); debug!( "note_explicit_hints: {:?}, restyle_hint={:?}, change_hint={:?}", self, restyle_hint, change_hint ); debug_assert!( !(restyle_hint.has_animation_hint() && restyle_hint.has_non_animation_hint()), "Animation restyle hints should not appear with non-animation restyle hints" ); let mut data = match self.mutate_data() { Some(d) => d, None => { debug!("(Element not styled, discarding hints)"); return; }, }; debug_assert!(data.has_styles(), "how?"); // Propagate the bit up the chain. if restyle_hint.has_animation_hint() { bindings::Gecko_NoteAnimationOnlyDirtyElement(self.0); } else { bindings::Gecko_NoteDirtyElement(self.0); } data.hint.insert(restyle_hint); data.damage |= damage; } /// This logic is duplicated in Gecko's nsIContent::IsRootOfNativeAnonymousSubtree. #[inline] fn is_root_of_native_anonymous_subtree(&self) -> bool { use crate::gecko_bindings::structs::NODE_IS_NATIVE_ANONYMOUS_ROOT; return self.flags() & (NODE_IS_NATIVE_ANONYMOUS_ROOT as u32) != 0; } /// Returns true if this node is the shadow root of an use-element shadow tree. #[inline] fn is_root_of_use_element_shadow_tree(&self) -> bool { if !self.as_node().is_in_shadow_tree() { return false; } if !self.parent_node_is_shadow_root() { return false; } let host = self.containing_shadow_host().unwrap(); host.is_svg_element() && host.local_name() == &*local_name!("use") } fn css_transitions_info(&self) -> FxHashMap<LonghandId, Arc<AnimationValue>> { use crate::gecko_bindings::bindings::Gecko_ElementTransitions_EndValueAt; use crate::gecko_bindings::bindings::Gecko_ElementTransitions_Length; let collection_length = unsafe { Gecko_ElementTransitions_Length(self.0) } as usize; let mut map = FxHashMap::with_capacity_and_hasher(collection_length, Default::default()); for i in 0..collection_length { let raw_end_value = unsafe { Gecko_ElementTransitions_EndValueAt(self.0, i).as_ref() }; let end_value = AnimationValue::arc_from_borrowed(&raw_end_value) .expect("AnimationValue not found in ElementTransitions"); let property = end_value.id(); debug_assert!(!property.is_logical()); map.insert(property, end_value.clone_arc()); } map } fn needs_transitions_update_per_property(<|fim▁hole|> combined_duration: f32, before_change_style: &ComputedValues, after_change_style: &ComputedValues, existing_transitions: &FxHashMap<LonghandId, Arc<AnimationValue>>, ) -> bool { use crate::values::animated::{Animate, Procedure}; debug_assert!(!longhand_id.is_logical()); // If there is an existing transition, update only if the end value // differs. // // If the end value has not changed, we should leave the currently // running transition as-is since we don't want to interrupt its timing // function. if let Some(ref existing) = existing_transitions.get(&longhand_id) { let after_value = AnimationValue::from_computed_values(longhand_id, after_change_style).unwrap(); return ***existing != after_value; } let from = AnimationValue::from_computed_values(longhand_id, before_change_style); let to = AnimationValue::from_computed_values(longhand_id, after_change_style); debug_assert_eq!(to.is_some(), from.is_some()); combined_duration > 0.0f32 && from != to && from.unwrap() .animate( to.as_ref().unwrap(), Procedure::Interpolate { progress: 0.5 }, ) .is_ok() } } /// Converts flags from the layout used by rust-selectors to the layout used /// by Gecko. We could align these and then do this without conditionals, but /// it's probably not worth the trouble. fn selector_flags_to_node_flags(flags: ElementSelectorFlags) -> u32 { use crate::gecko_bindings::structs::*; let mut gecko_flags = 0u32; if flags.contains(ElementSelectorFlags::HAS_SLOW_SELECTOR) { gecko_flags |= NODE_HAS_SLOW_SELECTOR as u32; } if flags.contains(ElementSelectorFlags::HAS_SLOW_SELECTOR_LATER_SIBLINGS) { gecko_flags |= NODE_HAS_SLOW_SELECTOR_LATER_SIBLINGS as u32; } if flags.contains(ElementSelectorFlags::HAS_EDGE_CHILD_SELECTOR) { gecko_flags |= NODE_HAS_EDGE_CHILD_SELECTOR as u32; } if flags.contains(ElementSelectorFlags::HAS_EMPTY_SELECTOR) { gecko_flags |= NODE_HAS_EMPTY_SELECTOR as u32; } gecko_flags } fn get_animation_rule( element: &GeckoElement, cascade_level: CascadeLevel, ) -> Option<Arc<Locked<PropertyDeclarationBlock>>> { use crate::properties::longhands::ANIMATABLE_PROPERTY_COUNT; // There's a very rough correlation between the number of effects // (animations) on an element and the number of properties it is likely to // animate, so we use that as an initial guess for the size of the // AnimationValueMap in order to reduce the number of re-allocations needed. let effect_count = unsafe { Gecko_GetAnimationEffectCount(element.0) }; // Also, we should try to reuse the PDB, to avoid creating extra rule nodes. let mut animation_values = AnimationValueMap::with_capacity_and_hasher( effect_count.min(ANIMATABLE_PROPERTY_COUNT), Default::default(), ); if unsafe { Gecko_GetAnimationRule( element.0, cascade_level, AnimationValueMap::as_ffi_mut(&mut animation_values), ) } { let shared_lock = &GLOBAL_STYLE_DATA.shared_lock; Some(Arc::new(shared_lock.wrap( PropertyDeclarationBlock::from_animation_value_map(&animation_values), ))) } else { None } } #[derive(Debug)] /// Gecko font metrics provider pub struct GeckoFontMetricsProvider { /// Cache of base font sizes for each language /// /// Usually will have 1 element. /// // This may be slow on pages using more languages, might be worth optimizing // by caching lang->group mapping separately and/or using a hashmap on larger // loads. pub font_size_cache: RefCell<Vec<(Atom, crate::gecko_bindings::structs::FontSizePrefs)>>, } impl GeckoFontMetricsProvider { /// Construct pub fn new() -> Self { GeckoFontMetricsProvider { font_size_cache: RefCell::new(Vec::new()), } } } impl FontMetricsProvider for GeckoFontMetricsProvider { fn create_from(_: &SharedStyleContext) -> GeckoFontMetricsProvider { GeckoFontMetricsProvider::new() } fn get_size(&self, font_name: &Atom, font_family: GenericFontFamily) -> Length { let mut cache = self.font_size_cache.borrow_mut(); if let Some(sizes) = cache.iter().find(|el| el.0 == *font_name) { return sizes.1.size_for_generic(font_family); } let sizes = unsafe { bindings::Gecko_GetBaseSize(font_name.as_ptr()) }; cache.push((font_name.clone(), sizes)); sizes.size_for_generic(font_family) } fn query( &self, context: &crate::values::computed::Context, base_size: FontBaseSize, orientation: FontMetricsOrientation, ) -> FontMetrics { let pc = match context.device().pres_context() { Some(pc) => pc, None => return Default::default(), }; let size = Au::from(base_size.resolve(context)); let style = context.style(); let (wm, font) = match base_size { FontBaseSize::CurrentStyle => (style.writing_mode, style.get_font()), // This is only used for font-size computation. FontBaseSize::InheritedStyle => { (*style.inherited_writing_mode(), style.get_parent_font()) }, }; let vertical_metrics = match orientation { FontMetricsOrientation::MatchContext => wm.is_vertical() && wm.is_upright(), FontMetricsOrientation::Horizontal => false, }; let gecko_metrics = unsafe { bindings::Gecko_GetFontMetrics( pc, vertical_metrics, font.gecko(), size.0, // we don't use the user font set in a media query !context.in_media_query, ) }; FontMetrics { x_height: Some(Au(gecko_metrics.mXSize).into()), zero_advance_measure: if gecko_metrics.mChSize >= 0 { Some(Au(gecko_metrics.mChSize).into()) } else { None }, } } } impl structs::FontSizePrefs { fn size_for_generic(&self, font_family: GenericFontFamily) -> Length { Au(match font_family { GenericFontFamily::None => self.mDefaultVariableSize, GenericFontFamily::Serif => self.mDefaultSerifSize, GenericFontFamily::SansSerif => self.mDefaultSansSerifSize, GenericFontFamily::Monospace => self.mDefaultMonospaceSize, GenericFontFamily::Cursive => self.mDefaultCursiveSize, GenericFontFamily::Fantasy => self.mDefaultFantasySize, GenericFontFamily::MozEmoji => unreachable!( "Should never get here, since this doesn't (yet) appear on font family" ), }) .into() } } impl<'le> TElement for GeckoElement<'le> { type ConcreteNode = GeckoNode<'le>; type FontMetricsProvider = GeckoFontMetricsProvider; type TraversalChildrenIterator = GeckoChildrenIterator<'le>; fn inheritance_parent(&self) -> Option<Self> { if self.is_pseudo_element() { return self.pseudo_element_originating_element(); } self.as_node() .flattened_tree_parent() .and_then(|n| n.as_element()) } fn traversal_children(&self) -> LayoutIterator<GeckoChildrenIterator<'le>> { // This condition is similar to the check that // StyleChildrenIterator::IsNeeded does, except that it might return // true if we used to (but no longer) have anonymous content from // ::before/::after, or nsIAnonymousContentCreators. if self.is_in_native_anonymous_subtree() || self.is_html_slot_element() || self.shadow_root().is_some() || self.may_have_anonymous_children() { unsafe { let mut iter: structs::StyleChildrenIterator = ::std::mem::zeroed(); bindings::Gecko_ConstructStyleChildrenIterator(self.0, &mut iter); return LayoutIterator(GeckoChildrenIterator::GeckoIterator(iter)); } } LayoutIterator(GeckoChildrenIterator::Current(self.as_node().first_child())) } fn before_pseudo_element(&self) -> Option<Self> { self.before_or_after_pseudo(/* is_before = */ true) } fn after_pseudo_element(&self) -> Option<Self> { self.before_or_after_pseudo(/* is_before = */ false) } fn marker_pseudo_element(&self) -> Option<Self> { if !self.has_properties() { return None; } unsafe { bindings::Gecko_GetMarkerPseudo(self.0) .as_ref() .map(GeckoElement) } } #[inline] fn is_html_element(&self) -> bool { self.namespace_id() == structs::kNameSpaceID_XHTML as i32 } #[inline] fn is_mathml_element(&self) -> bool { self.namespace_id() == structs::kNameSpaceID_MathML as i32 } #[inline] fn is_svg_element(&self) -> bool { self.namespace_id() == structs::kNameSpaceID_SVG as i32 } #[inline] fn is_xul_element(&self) -> bool { self.namespace_id() == structs::root::kNameSpaceID_XUL as i32 } #[inline] fn local_name(&self) -> &WeakAtom { unsafe { WeakAtom::new(self.as_node().node_info().mInner.mName) } } #[inline] fn namespace(&self) -> &WeakNamespace { unsafe { let namespace_manager = structs::nsContentUtils_sNameSpaceManager; WeakNamespace::new((*namespace_manager).mURIArray[self.namespace_id() as usize].mRawPtr) } } /// Return the list of slotted nodes of this node. #[inline] fn slotted_nodes(&self) -> &[Self::ConcreteNode] { if !self.is_html_slot_element() || !self.as_node().is_in_shadow_tree() { return &[]; } let slot: &structs::HTMLSlotElement = unsafe { mem::transmute(self.0) }; if cfg!(debug_assertions) { let base: &RawGeckoElement = &slot._base._base._base._base; assert_eq!(base as *const _, self.0 as *const _, "Bad cast"); } // FIXME(emilio): Workaround a bindgen bug on Android that causes // mAssignedNodes to be at the wrong offset. See bug 1466406. // // Bug 1466580 tracks running the Android layout tests on automation. // // The actual bindgen bug still needs reduction. let assigned_nodes: &[structs::RefPtr<structs::nsINode>] = if !cfg!(target_os = "android") { debug_assert_eq!( unsafe { bindings::Gecko_GetAssignedNodes(self.0) }, &slot.mAssignedNodes as *const _, ); &*slot.mAssignedNodes } else { unsafe { &**bindings::Gecko_GetAssignedNodes(self.0) } }; debug_assert_eq!( mem::size_of::<structs::RefPtr<structs::nsINode>>(), mem::size_of::<Self::ConcreteNode>(), "Bad cast!" ); unsafe { mem::transmute(assigned_nodes) } } #[inline] fn shadow_root(&self) -> Option<GeckoShadowRoot<'le>> { let slots = self.extended_slots()?; unsafe { slots.mShadowRoot.mRawPtr.as_ref().map(GeckoShadowRoot) } } #[inline] fn containing_shadow(&self) -> Option<GeckoShadowRoot<'le>> { let slots = self.extended_slots()?; unsafe { slots ._base .mContainingShadow .mRawPtr .as_ref() .map(GeckoShadowRoot) } } fn each_anonymous_content_child<F>(&self, mut f: F) where F: FnMut(Self), { if !self.may_have_anonymous_children() { return; } let array: *mut structs::nsTArray<*mut nsIContent> = unsafe { bindings::Gecko_GetAnonymousContentForElement(self.0) }; if array.is_null() { return; } for content in unsafe { &**array } { let node = GeckoNode::from_content(unsafe { &**content }); let element = match node.as_element() { Some(e) => e, None => continue, }; f(element); } unsafe { bindings::Gecko_DestroyAnonymousContentList(array) }; } #[inline] fn as_node(&self) -> Self::ConcreteNode { unsafe { GeckoNode(&*(self.0 as *const _ as *const RawGeckoNode)) } } fn owner_doc_matches_for_testing(&self, device: &Device) -> bool { self.as_node().owner_doc().0 as *const structs::Document == device.document() as *const _ } fn style_attribute(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> { if !self.may_have_style_attribute() { return None; } let declarations = unsafe { Gecko_GetStyleAttrDeclarationBlock(self.0).as_ref() }; let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> = declarations.and_then(|s| s.as_arc_opt()); declarations.map(|s| s.borrow_arc()) } fn unset_dirty_style_attribute(&self) { if !self.may_have_style_attribute() { return; } unsafe { Gecko_UnsetDirtyStyleAttr(self.0) }; } fn smil_override(&self) -> Option<ArcBorrow<Locked<PropertyDeclarationBlock>>> { unsafe { let slots = self.extended_slots()?; let declaration: &structs::DeclarationBlock = slots.mSMILOverrideStyleDeclaration.mRawPtr.as_ref()?; let raw: &structs::RawServoDeclarationBlock = declaration.mRaw.mRawPtr.as_ref()?; Some( Locked::<PropertyDeclarationBlock>::as_arc( &*(&raw as *const &structs::RawServoDeclarationBlock), ) .borrow_arc(), ) } } fn animation_rule(&self) -> Option<Arc<Locked<PropertyDeclarationBlock>>> { get_animation_rule(self, CascadeLevel::Animations) } fn transition_rule(&self) -> Option<Arc<Locked<PropertyDeclarationBlock>>> { get_animation_rule(self, CascadeLevel::Transitions) } #[inline] fn state(&self) -> ElementState { ElementState::from_bits_truncate(self.state_internal()) } #[inline] fn has_attr(&self, namespace: &Namespace, attr: &Atom) -> bool { unsafe { bindings::Gecko_HasAttr(self.0, namespace.0.as_ptr(), attr.as_ptr()) } } #[inline] fn has_part_attr(&self) -> bool { self.as_node() .get_bool_flag(nsINode_BooleanFlag::ElementHasPart) } #[inline] fn exports_any_part(&self) -> bool { snapshot_helpers::find_attr(self.attrs(), &atom!("exportparts")).is_some() } // FIXME(emilio): we should probably just return a reference to the Atom. #[inline] fn id(&self) -> Option<&WeakAtom> { if !self.has_id() { return None; } snapshot_helpers::get_id(self.attrs()) } fn each_class<F>(&self, callback: F) where F: FnMut(&Atom), { let attr = match self.get_class_attr() { Some(c) => c, None => return, }; snapshot_helpers::each_class_or_part(attr, callback) } fn each_part<F>(&self, callback: F) where F: FnMut(&Atom), { let attr = match self.get_part_attr() { Some(c) => c, None => return, }; snapshot_helpers::each_class_or_part(attr, callback) } #[inline] fn has_snapshot(&self) -> bool { self.flags() & (ELEMENT_HAS_SNAPSHOT as u32) != 0 } #[inline] fn handled_snapshot(&self) -> bool { self.flags() & (ELEMENT_HANDLED_SNAPSHOT as u32) != 0 } unsafe fn set_handled_snapshot(&self) { debug_assert!(self.get_data().is_some()); self.set_flags(ELEMENT_HANDLED_SNAPSHOT as u32) } #[inline] fn has_dirty_descendants(&self) -> bool { self.flags() & (ELEMENT_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32) != 0 } unsafe fn set_dirty_descendants(&self) { debug_assert!(self.get_data().is_some()); debug!("Setting dirty descendants: {:?}", self); self.set_flags(ELEMENT_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32) } unsafe fn unset_dirty_descendants(&self) { self.unset_flags(ELEMENT_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32) } #[inline] fn has_animation_only_dirty_descendants(&self) -> bool { self.flags() & (ELEMENT_HAS_ANIMATION_ONLY_DIRTY_DESCENDANTS_FOR_SERVO as u32) != 0 } unsafe fn set_animation_only_dirty_descendants(&self) { self.set_flags(ELEMENT_HAS_ANIMATION_ONLY_DIRTY_DESCENDANTS_FOR_SERVO as u32) } unsafe fn unset_animation_only_dirty_descendants(&self) { self.unset_flags(ELEMENT_HAS_ANIMATION_ONLY_DIRTY_DESCENDANTS_FOR_SERVO as u32) } unsafe fn clear_descendant_bits(&self) { self.unset_flags( ELEMENT_HAS_DIRTY_DESCENDANTS_FOR_SERVO as u32 | ELEMENT_HAS_ANIMATION_ONLY_DIRTY_DESCENDANTS_FOR_SERVO as u32 | NODE_DESCENDANTS_NEED_FRAMES as u32, ) } fn is_visited_link(&self) -> bool { self.state().intersects(ElementState::IN_VISITED_STATE) } /// This logic is duplicated in Gecko's nsINode::IsInNativeAnonymousSubtree. #[inline] fn is_in_native_anonymous_subtree(&self) -> bool { use crate::gecko_bindings::structs::NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE; self.flags() & (NODE_IS_IN_NATIVE_ANONYMOUS_SUBTREE as u32) != 0 } #[inline] fn matches_user_and_author_rules(&self) -> bool { !self.is_in_native_anonymous_subtree() } #[inline] fn implemented_pseudo_element(&self) -> Option<PseudoElement> { if !self.is_in_native_anonymous_subtree() { return None; } if !self.has_properties() { return None; } let pseudo_type = unsafe { bindings::Gecko_GetImplementedPseudo(self.0) }; PseudoElement::from_pseudo_type(pseudo_type) } #[inline] fn store_children_to_process(&self, _: isize) { // This is only used for bottom-up traversal, and is thus a no-op for Gecko. } fn did_process_child(&self) -> isize { panic!("Atomic child count not implemented in Gecko"); } #[inline(always)] fn get_data(&self) -> Option<&AtomicRefCell<ElementData>> { unsafe { self.0.mServoData.get().as_ref() } } unsafe fn ensure_data(&self) -> AtomicRefMut<ElementData> { if self.get_data().is_none() { debug!("Creating ElementData for {:?}", self); let ptr = Box::into_raw(Box::new(AtomicRefCell::new(ElementData::default()))); self.0.mServoData.set(ptr); } self.mutate_data().unwrap() } unsafe fn clear_data(&self) { let ptr = self.0.mServoData.get(); self.unset_flags( ELEMENT_HAS_SNAPSHOT as u32 | ELEMENT_HANDLED_SNAPSHOT as u32 | structs::Element_kAllServoDescendantBits | NODE_NEEDS_FRAME as u32, ); if !ptr.is_null() { debug!("Dropping ElementData for {:?}", self); let data = Box::from_raw(self.0.mServoData.get()); self.0.mServoData.set(ptr::null_mut()); // Perform a mutable borrow of the data in debug builds. This // serves as an assertion that there are no outstanding borrows // when we destroy the data. debug_assert!({ let _ = data.borrow_mut(); true }); } } #[inline] fn skip_item_display_fixup(&self) -> bool { debug_assert!( !self.is_pseudo_element(), "Just don't call me if I'm a pseudo, you should know the answer already" ); self.is_root_of_native_anonymous_subtree() } unsafe fn set_selector_flags(&self, flags: ElementSelectorFlags) { debug_assert!(!flags.is_empty()); self.set_flags(selector_flags_to_node_flags(flags)); } fn has_selector_flags(&self, flags: ElementSelectorFlags) -> bool { let node_flags = selector_flags_to_node_flags(flags); (self.flags() & node_flags) == node_flags } #[inline] fn may_have_animations(&self) -> bool { if let Some(pseudo) = self.implemented_pseudo_element() { if !pseudo.is_before_or_after() { return false; } // FIXME(emilio): When would the parent of a ::before / ::after // pseudo-element be null? return self.parent_element().map_or(false, |p| { p.as_node() .get_bool_flag(nsINode_BooleanFlag::ElementHasAnimations) }); } self.as_node() .get_bool_flag(nsINode_BooleanFlag::ElementHasAnimations) } /// Process various tasks that are a result of animation-only restyle. fn process_post_animation(&self, tasks: PostAnimationTasks) { debug_assert!(!tasks.is_empty(), "Should be involved a task"); // If display style was changed from none to other, we need to resolve // the descendants in the display:none subtree. Instead of resolving // those styles in animation-only restyle, we defer it to a subsequent // normal restyle. if tasks.intersects(PostAnimationTasks::DISPLAY_CHANGED_FROM_NONE_FOR_SMIL) { debug_assert!( self.implemented_pseudo_element() .map_or(true, |p| !p.is_before_or_after()), "display property animation shouldn't run on pseudo elements \ since it's only for SMIL" ); unsafe { self.note_explicit_hints( RestyleHint::restyle_subtree(), nsChangeHint::nsChangeHint_Empty, ); } } } /// Update various animation-related state on a given (pseudo-)element as /// results of normal restyle. fn update_animations( &self, before_change_style: Option<Arc<ComputedValues>>, tasks: UpdateAnimationsTasks, ) { // We have to update animations even if the element has no computed // style since it means the element is in a display:none subtree, we // should destroy all CSS animations in display:none subtree. let computed_data = self.borrow_data(); let computed_values = computed_data.as_ref().map(|d| d.styles.primary()); let before_change_values = before_change_style .as_ref() .map_or(ptr::null(), |x| x.as_gecko_computed_style()); let computed_values_opt = computed_values .as_ref() .map_or(ptr::null(), |x| x.as_gecko_computed_style()); unsafe { Gecko_UpdateAnimations( self.0, before_change_values, computed_values_opt, tasks.bits(), ); } } fn has_animations(&self) -> bool { self.may_have_animations() && unsafe { Gecko_ElementHasAnimations(self.0) } } fn has_css_animations(&self) -> bool { self.may_have_animations() && unsafe { Gecko_ElementHasCSSAnimations(self.0) } } fn has_css_transitions(&self) -> bool { self.may_have_animations() && unsafe { Gecko_ElementHasCSSTransitions(self.0) } } fn might_need_transitions_update( &self, old_style: Option<&ComputedValues>, new_style: &ComputedValues, ) -> bool { let old_style = match old_style { Some(v) => v, None => return false, }; let new_box_style = new_style.get_box(); if !self.has_css_transitions() && !new_box_style.specifies_transitions() { return false; } if new_box_style.clone_display().is_none() || old_style.clone_display().is_none() { return false; } return true; } // Detect if there are any changes that require us to update transitions. // This is used as a more thoroughgoing check than the, cheaper // might_need_transitions_update check. // // The following logic shadows the logic used on the Gecko side // (nsTransitionManager::DoUpdateTransitions) where we actually perform the // update. // // https://drafts.csswg.org/css-transitions/#starting fn needs_transitions_update( &self, before_change_style: &ComputedValues, after_change_style: &ComputedValues, ) -> bool { use crate::gecko_bindings::structs::nsCSSPropertyID; use crate::properties::LonghandIdSet; use crate::values::computed::TransitionProperty; debug_assert!( self.might_need_transitions_update(Some(before_change_style), after_change_style), "We should only call needs_transitions_update if \ might_need_transitions_update returns true" ); let after_change_box_style = after_change_style.get_box(); let transitions_count = after_change_box_style.transition_property_count(); let existing_transitions = self.css_transitions_info(); // Check if this property is none, custom or unknown. let is_none_or_custom_property = |property: nsCSSPropertyID| -> bool { return property == nsCSSPropertyID::eCSSPropertyExtra_no_properties || property == nsCSSPropertyID::eCSSPropertyExtra_variable || property == nsCSSPropertyID::eCSSProperty_UNKNOWN; }; let mut transitions_to_keep = LonghandIdSet::new(); for i in 0..transitions_count { let property = after_change_box_style.transition_nscsspropertyid_at(i); let combined_duration = after_change_box_style.transition_combined_duration_at(i); // We don't need to update transition for none/custom properties. if is_none_or_custom_property(property) { continue; } let transition_property: TransitionProperty = property.into(); let mut property_check_helper = |property: LonghandId| -> bool { let property = property.to_physical(after_change_style.writing_mode); transitions_to_keep.insert(property); self.needs_transitions_update_per_property( property, combined_duration, before_change_style, after_change_style, &existing_transitions, ) }; match transition_property { TransitionProperty::Custom(..) | TransitionProperty::Unsupported(..) => {}, TransitionProperty::Shorthand(ref shorthand) => { if shorthand.longhands().any(property_check_helper) { return true; } }, TransitionProperty::Longhand(longhand_id) => { if property_check_helper(longhand_id) { return true; } }, } } // Check if we have to cancel the running transition because this is not // a matching transition-property value. existing_transitions .keys() .any(|property| !transitions_to_keep.contains(*property)) } #[inline] fn lang_attr(&self) -> Option<AttrValue> { let ptr = unsafe { bindings::Gecko_LangValue(self.0) }; if ptr.is_null() { None } else { Some(unsafe { Atom::from_addrefed(ptr) }) } } fn match_element_lang(&self, override_lang: Option<Option<AttrValue>>, value: &Lang) -> bool { // Gecko supports :lang() from CSS Selectors 3, which only accepts a // single language tag, and which performs simple dash-prefix matching // on it. let override_lang_ptr = match &override_lang { &Some(Some(ref atom)) => atom.as_ptr(), _ => ptr::null_mut(), }; unsafe { Gecko_MatchLang( self.0, override_lang_ptr, override_lang.is_some(), value.as_slice().as_ptr(), ) } } fn is_html_document_body_element(&self) -> bool { if self.local_name() != &*local_name!("body") { return false; } if !self.is_html_element() { return false; } unsafe { bindings::Gecko_IsDocumentBody(self.0) } } fn synthesize_presentational_hints_for_legacy_attributes<V>( &self, visited_handling: VisitedHandlingMode, hints: &mut V, ) where V: Push<ApplicableDeclarationBlock>, { use crate::properties::longhands::_x_lang::SpecifiedValue as SpecifiedLang; use crate::properties::longhands::_x_text_zoom::SpecifiedValue as SpecifiedZoom; use crate::properties::longhands::color::SpecifiedValue as SpecifiedColor; use crate::properties::longhands::text_align::SpecifiedValue as SpecifiedTextAlign; use crate::values::specified::color::Color; lazy_static! { static ref TH_RULE: ApplicableDeclarationBlock = { let global_style_data = &*GLOBAL_STYLE_DATA; let pdb = PropertyDeclarationBlock::with_one( PropertyDeclaration::TextAlign(SpecifiedTextAlign::MozCenterOrInherit), Importance::Normal, ); let arc = Arc::new_leaked(global_style_data.shared_lock.wrap(pdb)); ApplicableDeclarationBlock::from_declarations(arc, ServoCascadeLevel::PresHints) }; static ref TABLE_COLOR_RULE: ApplicableDeclarationBlock = { let global_style_data = &*GLOBAL_STYLE_DATA; let pdb = PropertyDeclarationBlock::with_one( PropertyDeclaration::Color(SpecifiedColor(Color::InheritFromBodyQuirk.into())), Importance::Normal, ); let arc = Arc::new_leaked(global_style_data.shared_lock.wrap(pdb)); ApplicableDeclarationBlock::from_declarations(arc, ServoCascadeLevel::PresHints) }; static ref MATHML_LANG_RULE: ApplicableDeclarationBlock = { let global_style_data = &*GLOBAL_STYLE_DATA; let pdb = PropertyDeclarationBlock::with_one( PropertyDeclaration::XLang(SpecifiedLang(atom!("x-math"))), Importance::Normal, ); let arc = Arc::new_leaked(global_style_data.shared_lock.wrap(pdb)); ApplicableDeclarationBlock::from_declarations(arc, ServoCascadeLevel::PresHints) }; static ref SVG_TEXT_DISABLE_ZOOM_RULE: ApplicableDeclarationBlock = { let global_style_data = &*GLOBAL_STYLE_DATA; let pdb = PropertyDeclarationBlock::with_one( PropertyDeclaration::XTextZoom(SpecifiedZoom(false)), Importance::Normal, ); let arc = Arc::new_leaked(global_style_data.shared_lock.wrap(pdb)); ApplicableDeclarationBlock::from_declarations(arc, ServoCascadeLevel::PresHints) }; }; let ns = self.namespace_id(); // <th> elements get a default MozCenterOrInherit which may get overridden if ns == structs::kNameSpaceID_XHTML as i32 { if self.local_name().as_ptr() == atom!("th").as_ptr() { hints.push(TH_RULE.clone()); } else if self.local_name().as_ptr() == atom!("table").as_ptr() && self.as_node().owner_doc().quirks_mode() == QuirksMode::Quirks { hints.push(TABLE_COLOR_RULE.clone()); } } if ns == structs::kNameSpaceID_SVG as i32 { if self.local_name().as_ptr() == atom!("text").as_ptr() { hints.push(SVG_TEXT_DISABLE_ZOOM_RULE.clone()); } } let declarations = unsafe { Gecko_GetHTMLPresentationAttrDeclarationBlock(self.0).as_ref() }; let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> = declarations.and_then(|s| s.as_arc_opt()); if let Some(decl) = declarations { hints.push(ApplicableDeclarationBlock::from_declarations( decl.clone_arc(), ServoCascadeLevel::PresHints, )); } let declarations = unsafe { Gecko_GetExtraContentStyleDeclarations(self.0).as_ref() }; let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> = declarations.and_then(|s| s.as_arc_opt()); if let Some(decl) = declarations { hints.push(ApplicableDeclarationBlock::from_declarations( decl.clone_arc(), ServoCascadeLevel::PresHints, )); } // Support for link, vlink, and alink presentation hints on <body> if self.is_link() { // Unvisited vs. visited styles are computed up-front based on the // visited mode (not the element's actual state). let declarations = match visited_handling { VisitedHandlingMode::AllLinksVisitedAndUnvisited => { unreachable!( "We should never try to selector match with \ AllLinksVisitedAndUnvisited" ); }, VisitedHandlingMode::AllLinksUnvisited => unsafe { Gecko_GetUnvisitedLinkAttrDeclarationBlock(self.0).as_ref() }, VisitedHandlingMode::RelevantLinkVisited => unsafe { Gecko_GetVisitedLinkAttrDeclarationBlock(self.0).as_ref() }, }; let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> = declarations.and_then(|s| s.as_arc_opt()); if let Some(decl) = declarations { hints.push(ApplicableDeclarationBlock::from_declarations( decl.clone_arc(), ServoCascadeLevel::PresHints, )); } let active = self .state() .intersects(NonTSPseudoClass::Active.state_flag()); if active { let declarations = unsafe { Gecko_GetActiveLinkAttrDeclarationBlock(self.0).as_ref() }; let declarations: Option<&RawOffsetArc<Locked<PropertyDeclarationBlock>>> = declarations.and_then(|s| s.as_arc_opt()); if let Some(decl) = declarations { hints.push(ApplicableDeclarationBlock::from_declarations( decl.clone_arc(), ServoCascadeLevel::PresHints, )); } } } // xml:lang has precedence over lang, which can be // set by Gecko_GetHTMLPresentationAttrDeclarationBlock // // http://www.whatwg.org/specs/web-apps/current-work/multipage/elements.html#language let ptr = unsafe { bindings::Gecko_GetXMLLangValue(self.0) }; if !ptr.is_null() { let global_style_data = &*GLOBAL_STYLE_DATA; let pdb = PropertyDeclarationBlock::with_one( PropertyDeclaration::XLang(SpecifiedLang(unsafe { Atom::from_addrefed(ptr) })), Importance::Normal, ); let arc = Arc::new(global_style_data.shared_lock.wrap(pdb)); hints.push(ApplicableDeclarationBlock::from_declarations( arc, ServoCascadeLevel::PresHints, )) } // MathML's default lang has precedence over both `lang` and `xml:lang` if ns == structs::kNameSpaceID_MathML as i32 { if self.local_name().as_ptr() == atom!("math").as_ptr() { hints.push(MATHML_LANG_RULE.clone()); } } } } impl<'le> PartialEq for GeckoElement<'le> { #[inline] fn eq(&self, other: &Self) -> bool { self.0 as *const _ == other.0 as *const _ } } impl<'le> Eq for GeckoElement<'le> {} impl<'le> Hash for GeckoElement<'le> { #[inline] fn hash<H: Hasher>(&self, state: &mut H) { (self.0 as *const RawGeckoElement).hash(state); } } impl<'le> ::selectors::Element for GeckoElement<'le> { type Impl = SelectorImpl; #[inline] fn opaque(&self) -> OpaqueElement { OpaqueElement::new(self.0) } #[inline] fn parent_element(&self) -> Option<Self> { let parent_node = self.as_node().parent_node(); parent_node.and_then(|n| n.as_element()) } #[inline] fn parent_node_is_shadow_root(&self) -> bool { self.as_node() .parent_node() .map_or(false, |p| p.is_shadow_root()) } #[inline] fn containing_shadow_host(&self) -> Option<Self> { let shadow = self.containing_shadow()?; Some(shadow.host()) } #[inline] fn is_pseudo_element(&self) -> bool { self.implemented_pseudo_element().is_some() } #[inline] fn pseudo_element_originating_element(&self) -> Option<Self> { debug_assert!(self.is_pseudo_element()); self.closest_anon_subtree_root_parent() } #[inline] fn assigned_slot(&self) -> Option<Self> { let slot = self.extended_slots()?._base.mAssignedSlot.mRawPtr; unsafe { Some(GeckoElement(&slot.as_ref()?._base._base._base._base)) } } #[inline] fn prev_sibling_element(&self) -> Option<Self> { let mut sibling = self.as_node().prev_sibling(); while let Some(sibling_node) = sibling { if let Some(el) = sibling_node.as_element() { return Some(el); } sibling = sibling_node.prev_sibling(); } None } #[inline] fn next_sibling_element(&self) -> Option<Self> { let mut sibling = self.as_node().next_sibling(); while let Some(sibling_node) = sibling { if let Some(el) = sibling_node.as_element() { return Some(el); } sibling = sibling_node.next_sibling(); } None } fn attr_matches( &self, ns: &NamespaceConstraint<&Namespace>, local_name: &Atom, operation: &AttrSelectorOperation<&Atom>, ) -> bool { unsafe { match *operation { AttrSelectorOperation::Exists => { bindings::Gecko_HasAttr(self.0, ns.atom_or_null(), local_name.as_ptr()) }, AttrSelectorOperation::WithValue { operator, case_sensitivity, expected_value, } => { let ignore_case = match case_sensitivity { CaseSensitivity::CaseSensitive => false, CaseSensitivity::AsciiCaseInsensitive => true, }; // FIXME: case sensitivity for operators other than Equal match operator { AttrSelectorOperator::Equal => bindings::Gecko_AttrEquals( self.0, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Includes => bindings::Gecko_AttrIncludes( self.0, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::DashMatch => bindings::Gecko_AttrDashEquals( self.0, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Prefix => bindings::Gecko_AttrHasPrefix( self.0, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Suffix => bindings::Gecko_AttrHasSuffix( self.0, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), AttrSelectorOperator::Substring => bindings::Gecko_AttrHasSubstring( self.0, ns.atom_or_null(), local_name.as_ptr(), expected_value.as_ptr(), ignore_case, ), } }, } } } #[inline] fn is_root(&self) -> bool { if self .as_node() .get_bool_flag(nsINode_BooleanFlag::ParentIsContent) { return false; } if !self.as_node().is_in_document() { return false; } debug_assert!(self .as_node() .parent_node() .map_or(false, |p| p.is_document())); unsafe { bindings::Gecko_IsRootElement(self.0) } } fn is_empty(&self) -> bool { !self .as_node() .dom_children() .any(|child| unsafe { Gecko_IsSignificantChild(child.0, true) }) } #[inline] fn has_local_name(&self, name: &WeakAtom) -> bool { self.local_name() == name } #[inline] fn has_namespace(&self, ns: &WeakNamespace) -> bool { self.namespace() == ns } #[inline] fn is_same_type(&self, other: &Self) -> bool { self.local_name() == other.local_name() && self.namespace() == other.namespace() } fn match_non_ts_pseudo_class<F>( &self, pseudo_class: &NonTSPseudoClass, context: &mut MatchingContext<Self::Impl>, flags_setter: &mut F, ) -> bool where F: FnMut(&Self, ElementSelectorFlags), { use selectors::matching::*; match *pseudo_class { NonTSPseudoClass::Defined | NonTSPseudoClass::Focus | NonTSPseudoClass::Enabled | NonTSPseudoClass::Disabled | NonTSPseudoClass::Checked | NonTSPseudoClass::Fullscreen | NonTSPseudoClass::Indeterminate | NonTSPseudoClass::PlaceholderShown | NonTSPseudoClass::Target | NonTSPseudoClass::Valid | NonTSPseudoClass::Invalid | NonTSPseudoClass::MozUIValid | NonTSPseudoClass::MozBroken | NonTSPseudoClass::MozUserDisabled | NonTSPseudoClass::MozSuppressed | NonTSPseudoClass::MozLoading | NonTSPseudoClass::MozHandlerBlocked | NonTSPseudoClass::MozHandlerDisabled | NonTSPseudoClass::MozHandlerCrashed | NonTSPseudoClass::Required | NonTSPseudoClass::Optional | NonTSPseudoClass::MozReadOnly | NonTSPseudoClass::MozReadWrite | NonTSPseudoClass::FocusWithin | NonTSPseudoClass::MozDragOver | NonTSPseudoClass::MozDevtoolsHighlighted | NonTSPseudoClass::MozStyleeditorTransitioning | NonTSPseudoClass::MozFocusRing | NonTSPseudoClass::MozHandlerClickToPlay | NonTSPseudoClass::MozHandlerVulnerableUpdatable | NonTSPseudoClass::MozHandlerVulnerableNoUpdate | NonTSPseudoClass::MozMathIncrementScriptLevel | NonTSPseudoClass::InRange | NonTSPseudoClass::OutOfRange | NonTSPseudoClass::Default | NonTSPseudoClass::MozSubmitInvalid | NonTSPseudoClass::MozUIInvalid | NonTSPseudoClass::MozMeterOptimum | NonTSPseudoClass::MozMeterSubOptimum | NonTSPseudoClass::MozMeterSubSubOptimum | NonTSPseudoClass::MozHasDirAttr | NonTSPseudoClass::MozDirAttrLTR | NonTSPseudoClass::MozDirAttrRTL | NonTSPseudoClass::MozDirAttrLikeAuto | NonTSPseudoClass::MozAutofill | NonTSPseudoClass::Active | NonTSPseudoClass::Hover | NonTSPseudoClass::MozAutofillPreview => { self.state().intersects(pseudo_class.state_flag()) }, NonTSPseudoClass::AnyLink => self.is_link(), NonTSPseudoClass::Link => { self.is_link() && context.visited_handling().matches_unvisited() }, NonTSPseudoClass::Visited => { self.is_link() && context.visited_handling().matches_visited() }, NonTSPseudoClass::MozFirstNode => { flags_setter(self, ElementSelectorFlags::HAS_EDGE_CHILD_SELECTOR); let mut elem = self.as_node(); while let Some(prev) = elem.prev_sibling() { if prev.contains_non_whitespace_content() { return false; } elem = prev; } true }, NonTSPseudoClass::MozLastNode => { flags_setter(self, ElementSelectorFlags::HAS_EDGE_CHILD_SELECTOR); let mut elem = self.as_node(); while let Some(next) = elem.next_sibling() { if next.contains_non_whitespace_content() { return false; } elem = next; } true }, NonTSPseudoClass::MozOnlyWhitespace => { flags_setter(self, ElementSelectorFlags::HAS_EMPTY_SELECTOR); if self .as_node() .dom_children() .any(|c| c.contains_non_whitespace_content()) { return false; } true }, NonTSPseudoClass::MozNativeAnonymous | NonTSPseudoClass::MozNativeAnonymousNoSpecificity => { self.is_in_native_anonymous_subtree() }, NonTSPseudoClass::MozUseShadowTreeRoot => self.is_root_of_use_element_shadow_tree(), NonTSPseudoClass::MozTableBorderNonzero => unsafe { bindings::Gecko_IsTableBorderNonzero(self.0) }, NonTSPseudoClass::MozBrowserFrame => unsafe { bindings::Gecko_IsBrowserFrame(self.0) }, NonTSPseudoClass::MozIsHTML => self.is_html_element_in_html_document(), NonTSPseudoClass::MozLWTheme => self.document_theme() != DocumentTheme::Doc_Theme_None, NonTSPseudoClass::MozLWThemeBrightText => { self.document_theme() == DocumentTheme::Doc_Theme_Bright }, NonTSPseudoClass::MozLWThemeDarkText => { self.document_theme() == DocumentTheme::Doc_Theme_Dark }, NonTSPseudoClass::MozWindowInactive => { let state_bit = DocumentState::NS_DOCUMENT_STATE_WINDOW_INACTIVE; if context.extra_data.document_state.intersects(state_bit) { return !context.in_negation(); } self.document_state().contains(state_bit) }, NonTSPseudoClass::MozPlaceholder => false, NonTSPseudoClass::MozAny(ref sels) => context.nest(|context| { sels.iter() .any(|s| matches_complex_selector(s.iter(), self, context, flags_setter)) }), NonTSPseudoClass::Lang(ref lang_arg) => self.match_element_lang(None, lang_arg), NonTSPseudoClass::MozLocaleDir(ref dir) => { let state_bit = DocumentState::NS_DOCUMENT_STATE_RTL_LOCALE; if context.extra_data.document_state.intersects(state_bit) { // NOTE(emilio): We could still return false for values // other than "ltr" and "rtl", but we don't bother. return !context.in_negation(); } let doc_is_rtl = self.document_state().contains(state_bit); match dir.as_horizontal_direction() { Some(HorizontalDirection::Ltr) => !doc_is_rtl, Some(HorizontalDirection::Rtl) => doc_is_rtl, None => false, } }, NonTSPseudoClass::Dir(ref dir) => self.state().intersects(dir.element_state()), } } fn match_pseudo_element( &self, pseudo_element: &PseudoElement, _context: &mut MatchingContext<Self::Impl>, ) -> bool { // TODO(emilio): I believe we could assert we are a pseudo-element and // match the proper pseudo-element, given how we rulehash the stuff // based on the pseudo. match self.implemented_pseudo_element() { Some(ref pseudo) => *pseudo == *pseudo_element, None => false, } } #[inline] fn is_link(&self) -> bool { self.state() .intersects(ElementState::IN_VISITED_OR_UNVISITED_STATE) } #[inline] fn has_id(&self, id: &Atom, case_sensitivity: CaseSensitivity) -> bool { if !self.has_id() { return false; } let element_id = match snapshot_helpers::get_id(self.attrs()) { Some(id) => id, None => return false, }; case_sensitivity.eq_atom(element_id, id) } #[inline] fn is_part(&self, name: &Atom) -> bool { let attr = match self.get_part_attr() { Some(c) => c, None => return false, }; snapshot_helpers::has_class_or_part(name, CaseSensitivity::CaseSensitive, attr) } #[inline] fn exported_part(&self, name: &Atom) -> Option<Atom> { snapshot_helpers::exported_part(self.attrs(), name) } #[inline] fn imported_part(&self, name: &Atom) -> Option<Atom> { snapshot_helpers::imported_part(self.attrs(), name) } #[inline(always)] fn has_class(&self, name: &Atom, case_sensitivity: CaseSensitivity) -> bool { let attr = match self.get_class_attr() { Some(c) => c, None => return false, }; snapshot_helpers::has_class_or_part(name, case_sensitivity, attr) } #[inline] fn is_html_element_in_html_document(&self) -> bool { self.is_html_element() && self.as_node().owner_doc().is_html_document() } #[inline] fn is_html_slot_element(&self) -> bool { self.is_html_element() && self.local_name().as_ptr() == local_name!("slot").as_ptr() } #[inline] fn ignores_nth_child_selectors(&self) -> bool { self.is_root_of_native_anonymous_subtree() } } /// A few helpers to help with attribute selectors and snapshotting. pub trait NamespaceConstraintHelpers { /// Returns the namespace of the selector, or null otherwise. fn atom_or_null(&self) -> *mut nsAtom; } impl<'a> NamespaceConstraintHelpers for NamespaceConstraint<&'a Namespace> { fn atom_or_null(&self) -> *mut nsAtom { match *self { NamespaceConstraint::Any => ptr::null_mut(), NamespaceConstraint::Specific(ref ns) => ns.0.as_ptr(), } } }<|fim▁end|>
&self, longhand_id: LonghandId,
<|file_name|>test_parser.py<|end_file_name|><|fim▁begin|>""" This file is part of Maml. Maml is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Maml is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.<|fim▁hole|> along with Maml. If not, see <http://www.gnu.org/licenses/>. Copyright 2010 Brian Hawthorne """ from unittest import TestCase from maml.parser import * example1 = """ -def A(z) %ul %li %html %body %h3 """ example2 = """ -def A(z) %ul -for x in range(z) .list-item#item_id = x foo %html %body %h3 yup = A(6) """ class TestParser (TestCase): def test_tag_attrs(self): good_results = { '()': ('(', '', ')'), '{}': ('{', '', '}'), '(borp="baz" dorp="daz" blarp="blaz")': ('(', 'borp="baz" dorp="daz" blarp="blaz"', ')'), '{borp:"baz", dorp:"daz", blarp:"blaz"}': ('{', 'borp:"baz", dorp:"daz", blarp:"blaz"', '}'), } for input, output in good_results.items(): self.assertEqual(tuple(tag_attrs.parseString(input)), output) def test_tag_decl(self): good_results = { '%html': ('%', 'html', ''), '%html foo': ('%', 'html', 'foo'), '%html= foo': ('%', 'html', '=', 'foo'), '%html()= foo': ('%', 'html', '(', '', ')', '=', 'foo'), '%html.class-name()= foo': ('%', 'html', '.', 'class-name', '(', '', ')', '=', 'foo'), '%html.class-name(borp="baz")= foo': ('%', 'html', '.', 'class-name', '(', 'borp="baz"', ')', '=', 'foo'), '#foo.boo': ('#', 'foo', '.', 'boo', ''), '.foo(){}': ('.', 'foo', '(', '', ')', '{', '', '}', ''), } for input, output in good_results.items(): self.assertEqual(tuple(tag_decl.parseString(input)), output) def test_namespace(self): namespace_example = "-namespace(/common/defs.mak, bnorp)" assert Parser().parse(namespace_example).render_string()<|fim▁end|>
You should have received a copy of the GNU General Public License
<|file_name|>layout.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors // Licensed under the MIT License: // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. use std::mem; use std::ptr; use data; use text; use private::capability::{ClientHook}; use private::arena::*; use private::endian::{WireValue, Endian}; use private::mask::*; use private::units::*; use private::zero; use {MessageSize, Result, Word}; pub use self::ElementSize::{Void, Bit, Byte, TwoBytes, FourBytes, EightBytes, Pointer, InlineComposite}; #[repr(u8)] #[derive(Clone, Copy, PartialEq)] pub enum ElementSize { Void = 0, Bit = 1, Byte = 2, TwoBytes = 3, FourBytes = 4, EightBytes = 5, Pointer = 6, InlineComposite = 7 } impl ElementSize { fn from(val: u8) -> ElementSize { match val { 0 => ElementSize::Void, 1 => ElementSize::Bit, 2 => ElementSize::Byte, 3 => ElementSize::TwoBytes, 4 => ElementSize::FourBytes, 5 => ElementSize::EightBytes, 6 => ElementSize::Pointer, 7 => ElementSize::InlineComposite, _ => panic!("illegal element size: {}", val), } } } pub fn data_bits_per_element(size: ElementSize) -> BitCount32 { match size { Void => 0, Bit => 1, Byte => 8, TwoBytes => 16, FourBytes => 32, EightBytes => 64, Pointer => 0, InlineComposite => 0 } } pub fn pointers_per_element(size: ElementSize) -> WirePointerCount32 { match size { Pointer => 1, _ => 0 } } #[derive(Clone, Copy)] pub struct StructSize { pub data: WordCount16, pub pointers: WirePointerCount16, } impl StructSize { pub fn total(&self) -> WordCount32 { self.data as WordCount32 + self.pointers as WordCount32 * WORDS_PER_POINTER as WordCount32 } } #[repr(u8)] #[derive(Clone, Copy, PartialEq)] pub enum WirePointerKind { Struct = 0, List = 1, Far = 2, Other = 3 } impl WirePointerKind { fn from(val: u8) -> WirePointerKind { match val { 0 => WirePointerKind::Struct, 1 => WirePointerKind::List, 2 => WirePointerKind::Far, 3 => WirePointerKind::Other, _ => panic!("illegal element size: {}", val), } } } #[repr(C)] pub struct WirePointer { offset_and_kind: WireValue<u32>, upper32bits: u32, } #[repr(C)] pub struct StructRef { data_size: WireValue<WordCount16>, ptr_count: WireValue<WirePointerCount16> } #[repr(C)] pub struct ListRef { element_size_and_count: WireValue<u32> } #[repr(C)] pub struct FarRef { segment_id: WireValue<u32> } #[repr(C)] pub struct CapRef { index: WireValue<u32> } impl StructRef { pub fn word_size(&self) -> WordCount32 { self.data_size.get() as WordCount32 + self.ptr_count.get() as WordCount32 * WORDS_PER_POINTER as u32 } #[inline] pub fn set_from_struct_size(&mut self, size: StructSize) { self.data_size.set(size.data); self.ptr_count.set(size.pointers); } #[inline] pub fn set(&mut self, ds: WordCount16, rc: WirePointerCount16) { self.data_size.set(ds); self.ptr_count.set(rc); } } impl ListRef { #[inline] pub fn element_size(&self) -> ElementSize { ElementSize::from(self.element_size_and_count.get() as u8 & 7) } #[inline] pub fn element_count(&self) -> ElementCount32 { self.element_size_and_count.get() >> 3 } #[inline] pub fn inline_composite_word_count(&self) -> WordCount32 { self.element_count() } #[inline] pub fn set(&mut self, es: ElementSize, ec: ElementCount32) { assert!(ec < (1 << 29), "Lists are limited to 2**29 elements"); self.element_size_and_count.set((ec << 3 ) | (es as u32)); } #[inline] pub fn set_inline_composite(&mut self, wc: WordCount32) { assert!(wc < (1 << 29), "Inline composite lists are limited to 2**29 words"); self.element_size_and_count.set((wc << 3) | (InlineComposite as u32)); } } impl FarRef { #[inline] pub fn set(&mut self, si: SegmentId) { self.segment_id.set(si); } } impl CapRef { #[inline] pub fn set(&mut self, index: u32) { self.index.set(index); } } impl WirePointer { #[inline] pub fn kind(&self) -> WirePointerKind { WirePointerKind::from(self.offset_and_kind.get() as u8 & 3) } #[inline] pub fn is_capability(&self) -> bool { self.offset_and_kind.get() == WirePointerKind::Other as u32 } #[inline] pub fn target(&self) -> *const Word { let this_addr: *const Word = self as *const _ as *const _; unsafe { this_addr.offset((1 + ((self.offset_and_kind.get() as i32) >> 2)) as isize) } } #[inline] pub fn mut_target(&mut self) -> *mut Word { let this_addr: *mut Word = self as *mut _ as *mut _; unsafe { this_addr.offset((1 + ((self.offset_and_kind.get() as i32) >> 2)) as isize) } } #[inline] pub fn set_kind_and_target(&mut self, kind: WirePointerKind, target: *mut Word) { let this_addr: isize = self as *const _ as isize; let target_addr: isize = target as *const _ as isize; self.offset_and_kind.set( ((((target_addr - this_addr) / BYTES_PER_WORD as isize) as i32 - 1) << 2) as u32 | (kind as u32)) } #[inline] pub fn set_kind_with_zero_offset(&mut self, kind: WirePointerKind) { self.offset_and_kind.set(kind as u32) } #[inline] pub fn set_kind_and_target_for_empty_struct(&mut self) { //# This pointer points at an empty struct. Assuming the //# WirePointer itself is in-bounds, we can set the target to //# point either at the WirePointer itself or immediately after //# it. The latter would cause the WirePointer to be "null" //# (since for an empty struct the upper 32 bits are going to //# be zero). So we set an offset of -1, as if the struct were //# allocated immediately before this pointer, to distinguish //# it from null. self.offset_and_kind.set(0xfffffffc); } #[inline] pub fn inline_composite_list_element_count(&self) -> ElementCount32 { self.offset_and_kind.get() >> 2 } #[inline] pub fn set_kind_and_inline_composite_list_element_count(&mut self, kind: WirePointerKind, element_count: ElementCount32) { self.offset_and_kind.set(( element_count << 2) | (kind as u32)) } #[inline] pub fn far_position_in_segment(&self) -> WordCount32 { self.offset_and_kind.get() >> 3 } #[inline] pub fn is_double_far(&self) -> bool { ((self.offset_and_kind.get() >> 2) & 1) != 0 } #[inline] pub fn set_far(&mut self, is_double_far: bool, pos: WordCount32) { self.offset_and_kind .set(( pos << 3) | ((is_double_far as u32) << 2) | WirePointerKind::Far as u32); } #[inline] pub fn set_cap(&mut self, index: u32) { self.offset_and_kind.set(WirePointerKind::Other as u32); self.mut_cap_ref().set(index); } #[inline] pub fn struct_ref<'a>(&'a self) -> &'a StructRef { unsafe { mem::transmute(&self.upper32bits) } } #[inline] pub fn mut_struct_ref<'a>(&'a mut self) -> &'a mut StructRef { unsafe { mem::transmute(&mut self.upper32bits) } } #[inline] pub fn list_ref<'a>(&'a self) -> &'a ListRef { unsafe { mem::transmute(&self.upper32bits) } } #[inline] pub fn mut_list_ref<'a>(&'a mut self) -> &'a mut ListRef { unsafe { mem::transmute(&mut self.upper32bits) } } #[inline] pub fn far_ref<'a>(&'a self) -> &'a FarRef { unsafe { mem::transmute(&self.upper32bits) } } #[inline] pub fn mut_far_ref<'a>(&'a mut self) -> &'a mut FarRef { unsafe { mem::transmute(&mut self.upper32bits) } } #[inline] pub fn cap_ref<'a>(&'a self) -> &'a CapRef { unsafe { mem::transmute(&self.upper32bits) } } #[inline] pub fn mut_cap_ref<'a>(&'a mut self) -> &'a mut CapRef { unsafe { mem::transmute(&mut self.upper32bits) } } #[inline] pub fn is_null(&self) -> bool { self.offset_and_kind.get() == 0 && self.upper32bits == 0 } } mod wire_helpers { use std::ptr; use std::slice; use private::capability::ClientHook; use private::arena::*; use private::layout::*; use private::units::*; use data; use text; use {Error, MessageSize, Result, Word}; pub struct SegmentAnd<T> { #[allow(dead_code)] segment: *mut SegmentBuilder, pub value: T, } #[inline] pub fn round_bytes_up_to_words(bytes: ByteCount32) -> WordCount32 { //# This code assumes 64-bit words. (bytes + 7) / BYTES_PER_WORD as u32 } //# The maximum object size is 4GB - 1 byte. If measured in bits, //# this would overflow a 32-bit counter, so we need to accept //# BitCount64. However, 32 bits is enough for the returned //# ByteCounts and WordCounts. #[inline] pub fn round_bits_up_to_words(bits: BitCount64) -> WordCount32 { //# This code assumes 64-bit words. ((bits + 63) / (BITS_PER_WORD as u64)) as WordCount32 } #[allow(dead_code)] #[inline] pub fn round_bits_up_to_bytes(bits: BitCount64) -> ByteCount32 { ((bits + 7) / (BITS_PER_BYTE as u64)) as ByteCount32 } #[inline] pub unsafe fn bounds_check(segment: *const SegmentReader, start: *const Word, end: *const Word, kind: WirePointerKind) -> Result<()> { //# If segment is null, this is an unchecked message, so we don't do bounds checks. if segment.is_null() || (*segment).contains_interval(start, end) { Ok(()) } else { let desc = match kind { WirePointerKind::List => "Message contained out-of-bounds list pointer.", WirePointerKind::Struct => "Message contained out-of-bounds struct pointer.", WirePointerKind::Far => "Message contained out-of-bounds far pointer.", WirePointerKind::Other => "Message contained out-of-bounds other pointer.", }; Err(Error::failed(desc.to_string())) } } #[inline] pub unsafe fn amplified_read(segment: *const SegmentReader, virtual_amount: u64) -> Result<()> { if segment.is_null() || (*segment).amplified_read(virtual_amount) { Ok(()) } else { Err(Error::failed("Message contained amplified list pointer.".to_string())) } } #[inline] pub unsafe fn allocate(reff: &mut *mut WirePointer, segment: &mut *mut SegmentBuilder, amount: WordCount32, kind: WirePointerKind) -> *mut Word { let is_null = (**reff).is_null(); if !is_null { zero_object(*segment, *reff) } if amount == 0 && kind == WirePointerKind::Struct { (**reff).set_kind_and_target_for_empty_struct(); return *reff as *mut _; } match (**segment).allocate(amount) { None => { //# Need to allocate in a new segment. We'll need to //# allocate an extra pointer worth of space to act as //# the landing pad for a far pointer. let amount_plus_ref = amount + POINTER_SIZE_IN_WORDS as u32; let allocation = (*(**segment).get_arena()).allocate(amount_plus_ref); *segment = allocation.0; let ptr = allocation.1; //# Set up the original pointer to be a far pointer to //# the new segment. (**reff).set_far(false, (**segment).get_word_offset_to(ptr)); (**reff).mut_far_ref().segment_id.set((**segment).id); //# Initialize the landing pad to indicate that the //# data immediately follows the pad. *reff = ptr as *mut _; let ptr1 = ptr.offset(POINTER_SIZE_IN_WORDS as isize); (**reff).set_kind_and_target(kind, ptr1); return ptr1; } Some(ptr) => { (**reff).set_kind_and_target(kind, ptr); return ptr; } } } #[inline] pub unsafe fn follow_builder_fars(reff: &mut *mut WirePointer, ref_target: *mut Word, segment: &mut *mut SegmentBuilder) -> Result<*mut Word> { // If `ref` is a far pointer, follow it. On return, `ref` will have been updated to point at // a WirePointer that contains the type information about the target object, and a pointer // to the object contents is returned. The caller must NOT use `ref->target()` as this may // or may not actually return a valid pointer. `segment` is also updated to point at the // segment which actually contains the object. // // If `ref` is not a far pointer, this simply returns `ref_target`. Usually, `ref_target` // should be the same as `ref->target()`, but may not be in cases where `ref` is only a tag. if (**reff).kind() == WirePointerKind::Far { *segment = try!((*(**segment).get_arena()).get_segment((**reff).far_ref().segment_id.get())); let pad: *mut WirePointer = (**segment).get_ptr_unchecked((**reff).far_position_in_segment()) as *mut _; if !(**reff).is_double_far() { *reff = pad; return Ok((*pad).mut_target()); } //# Landing pad is another far pointer. It is followed by a //# tag describing the pointed-to object. *reff = pad.offset(1); *segment = try!((*(**segment).get_arena()).get_segment((*pad).far_ref().segment_id.get())); return Ok((**segment).get_ptr_unchecked((*pad).far_position_in_segment())); } else { Ok(ref_target) } } #[inline] pub unsafe fn follow_fars(reff: &mut *const WirePointer, ref_target: *const Word, segment: &mut *const SegmentReader) -> Result<*const Word> { // If the segment is null, this is an unchecked message, so there are no FAR pointers. if !(*segment).is_null() && (**reff).kind() == WirePointerKind::Far { *segment = try!((**segment).arena.try_get_segment((**reff).far_ref().segment_id.get())); let ptr: *const Word = (**segment).get_start_ptr().offset( (**reff).far_position_in_segment() as isize); let pad_words: isize = if (**reff).is_double_far() { 2 } else { 1 }; try!(bounds_check(*segment, ptr, ptr.offset(pad_words), WirePointerKind::Far)); let pad: *const WirePointer = ptr as *const _; if !(**reff).is_double_far() { *reff = pad; return Ok((*pad).target()); } else { //# Landing pad is another far pointer. It is //# followed by a tag describing the pointed-to //# object. *reff = pad.offset(1); *segment = try!((**segment).arena.try_get_segment((*pad).far_ref().segment_id.get())); return Ok((**segment).get_start_ptr().offset((*pad).far_position_in_segment() as isize)); } } else { return Ok(ref_target); } } pub unsafe fn zero_object(mut segment: *mut SegmentBuilder, reff: *mut WirePointer) { //# Zero out the pointed-to object. Use when the pointer is //# about to be overwritten making the target object no longer //# reachable. match (*reff).kind() { WirePointerKind::Struct | WirePointerKind::List | WirePointerKind::Other => { zero_object_helper(segment, reff, (*reff).mut_target()) } WirePointerKind::Far => { segment = (*(*segment).get_arena()).get_segment((*reff).far_ref().segment_id.get()).unwrap(); let pad: *mut WirePointer = (*segment).get_ptr_unchecked((*reff).far_position_in_segment()) as *mut _; if (*reff).is_double_far() { segment = (*(*segment).get_arena()).get_segment((*pad).far_ref().segment_id.get()).unwrap(); zero_object_helper(segment, pad.offset(1), (*segment).get_ptr_unchecked((*pad).far_position_in_segment())); ptr::write_bytes(pad, 0u8, 2); } else { zero_object(segment, pad); ptr::write_bytes(pad, 0u8, 1); } } } } pub unsafe fn zero_object_helper(segment: *mut SegmentBuilder, tag: *mut WirePointer, ptr: *mut Word) { match (*tag).kind() { WirePointerKind::Other => { panic!("Don't know how to handle OTHER") } WirePointerKind::Struct => { let pointer_section: *mut WirePointer = ptr.offset((*tag).struct_ref().data_size.get() as isize) as *mut _; let count = (*tag).struct_ref().ptr_count.get() as isize; for i in 0..count { zero_object(segment, pointer_section.offset(i)); } ptr::write_bytes(ptr, 0u8, (*tag).struct_ref().word_size() as usize); } WirePointerKind::List => { match (*tag).list_ref().element_size() { Void => { } Bit | Byte | TwoBytes | FourBytes | EightBytes => { ptr::write_bytes( ptr, 0u8, round_bits_up_to_words(( (*tag).list_ref().element_count() * data_bits_per_element( (*tag).list_ref().element_size())) as u64) as usize) } Pointer => { let count = (*tag).list_ref().element_count() as usize; for i in 0..count as isize { zero_object(segment, ptr.offset(i) as *mut _); } ptr::write_bytes(ptr, 0u8, count); } InlineComposite => { let element_tag: *mut WirePointer = ptr as *mut _; assert!((*element_tag).kind() == WirePointerKind::Struct, "Don't know how to handle non-STRUCT inline composite"); let data_size = (*element_tag).struct_ref().data_size.get(); let pointer_count = (*element_tag).struct_ref().ptr_count.get(); let mut pos: *mut Word = ptr.offset(1); let count = (*element_tag).inline_composite_list_element_count(); for _ in 0..count { pos = pos.offset(data_size as isize); for _ in 0..pointer_count { zero_object(segment, pos as *mut WirePointer); pos = pos.offset(1); } } ::std::ptr::write_bytes(ptr, 0u8, ((*element_tag).struct_ref().word_size() * count + 1) as usize); } } } WirePointerKind::Far => { panic!("Unexpected FAR pointer") } } } #[inline] pub unsafe fn zero_pointer_and_fars(segment: *mut SegmentBuilder, reff: *mut WirePointer) -> Result<()> { // Zero out the pointer itself and, if it is a far pointer, zero the landing pad as well, // but do not zero the object body. Used when upgrading. if (*reff).kind() == WirePointerKind::Far { let pad = (* try!((* (*segment).get_arena()).get_segment((*reff).far_ref().segment_id.get()))) .get_ptr_unchecked((*reff).far_position_in_segment()); let num_elements = if (*reff).is_double_far() { 2 } else { 1 }; ::std::ptr::write_bytes(pad, 0, num_elements); } ::std::ptr::write_bytes(reff, 0, 1); Ok(()) } pub unsafe fn total_size(mut segment: *const SegmentReader, mut reff: *const WirePointer, mut nesting_limit: i32) -> Result<MessageSize> { let mut result = MessageSize { word_count: 0, cap_count: 0}; if (*reff).is_null() { return Ok(result) }; if nesting_limit <= 0 { return Err(Error::failed("Message is too deeply nested.".to_string())); } nesting_limit -= 1; let ptr = try!(follow_fars(&mut reff, (*reff).target(), &mut segment)); match (*reff).kind() { WirePointerKind::Struct => { try!(bounds_check(segment, ptr, ptr.offset((*reff).struct_ref().word_size() as isize), WirePointerKind::Struct)); result.word_count += (*reff).struct_ref().word_size() as u64; let pointer_section: *const WirePointer = ptr.offset((*reff).struct_ref().data_size.get() as isize) as *const _; let count: isize = (*reff).struct_ref().ptr_count.get() as isize; for i in 0..count { result.plus_eq(try!(total_size(segment, pointer_section.offset(i), nesting_limit))); } } WirePointerKind::List => { match (*reff).list_ref().element_size() { Void => {} Bit | Byte | TwoBytes | FourBytes | EightBytes => { let total_words = round_bits_up_to_words( (*reff).list_ref().element_count() as u64 * data_bits_per_element((*reff).list_ref().element_size()) as u64); try!(bounds_check(segment, ptr, ptr.offset(total_words as isize), WirePointerKind::List)); result.word_count += total_words as u64; } Pointer => { let count = (*reff).list_ref().element_count(); try!(bounds_check(segment, ptr, ptr.offset((count * WORDS_PER_POINTER as u32) as isize), WirePointerKind::List)); result.word_count += count as u64 * WORDS_PER_POINTER as u64; for i in 0..count as isize { result.plus_eq( try!(total_size(segment, (ptr as *const WirePointer).offset(i), nesting_limit))); } } InlineComposite => { let word_count = (*reff).list_ref().inline_composite_word_count(); try!(bounds_check(segment, ptr, ptr.offset(word_count as isize + POINTER_SIZE_IN_WORDS as isize), WirePointerKind::List)); result.word_count += word_count as u64 + POINTER_SIZE_IN_WORDS as u64; if word_count == 0 { return Ok(result); } let element_tag: *const WirePointer = ptr as *const _; let count = (*element_tag).inline_composite_list_element_count(); if (*element_tag).kind() != WirePointerKind::Struct { return Err(Error::failed( "Don't know how to handle non-STRUCT inline composite.".to_string())); } if (*element_tag).struct_ref().word_size() as u64 * count as u64 > word_count as u64 { return Err(Error::failed( "InlineComposite list's elements overrun its word count.".to_string())); } let data_size = (*element_tag).struct_ref().data_size.get(); let pointer_count = (*element_tag).struct_ref().ptr_count.get(); let mut pos: *const Word = ptr.offset(POINTER_SIZE_IN_WORDS as isize); for _ in 0..count { pos = pos.offset(data_size as isize); for _ in 0..pointer_count { result.plus_eq( try!(total_size(segment, pos as *const WirePointer, nesting_limit))); pos = pos.offset(POINTER_SIZE_IN_WORDS as isize); } } } } } WirePointerKind::Far => { panic!("Unexpected FAR pointer."); } WirePointerKind::Other => { if (*reff).is_capability() { result.cap_count += 1; } else { return Err(Error::failed("Unknown pointer type.".to_string())); } } } Ok(result) } pub unsafe fn transfer_pointer(dst_segment: *mut SegmentBuilder, dst: *mut WirePointer, src_segment: *mut SegmentBuilder, src: *mut WirePointer) { //# Make *dst point to the same object as *src. Both must //# reside in the same message, but can be in different //# segments. Not always-inline because this is rarely used. // //# Caller MUST zero out the source pointer after calling this, //# to make sure no later code mistakenly thinks the source //# location still owns the object. transferPointer() doesn't //# do this zeroing itself because many callers transfer //# several pointers in a loop then zero out the whole section. assert!((*dst).is_null()); // We expect the caller to ensure the target is already null so won't leak. if (*src).is_null() { ::std::ptr::write_bytes(dst, 0, 1); } else if (*src).kind() == WirePointerKind::Far { ::std::ptr::copy_nonoverlapping(src, dst, 1); } else { transfer_pointer_split(dst_segment, dst, src_segment, src, (*src).mut_target()); } } pub unsafe fn transfer_pointer_split(dst_segment: *mut SegmentBuilder, dst: *mut WirePointer, src_segment: *mut SegmentBuilder, src_tag: *mut WirePointer, src_ptr: *mut Word) { // Like the other transfer_pointer, but splits src into a tag and a // target. Particularly useful for OrphanBuilder. if dst_segment == src_segment { // Same segment, so create a direct pointer. if (*src_tag).kind() == WirePointerKind::Struct && (*src_tag).struct_ref().word_size() == 0 { (*dst).set_kind_and_target_for_empty_struct(); } else { (*dst).set_kind_and_target((*src_tag).kind(), src_ptr); } // We can just copy the upper 32 bits. (Use memcpy() to comply with aliasing rules.) ::std::ptr::copy_nonoverlapping(&(*src_tag).upper32bits, &mut (*dst).upper32bits, 1); } else { // Need to create a far pointer. Try to allocate it in the same segment as the source, // so that it doesn't need to be a double-far. match (*src_segment).allocate(1) { None => { //# Darn, need a double-far. panic!("unimplemented"); } Some(landing_pad_word) => { //# Simple landing pad is just a pointer. let landing_pad: *mut WirePointer = landing_pad_word as *mut _; (*landing_pad).set_kind_and_target((*src_tag).kind(), src_ptr); ::std::ptr::copy_nonoverlapping(&(*src_tag).upper32bits, &mut (*landing_pad).upper32bits, 1); (*dst).set_far(false, (*src_segment).get_word_offset_to(landing_pad_word)); (*dst).mut_far_ref().set((*src_segment).get_segment_id()); } } } } #[inline]<|fim▁hole|> mut segment_builder: *mut SegmentBuilder, cap_table: CapTableBuilder, size: StructSize) -> StructBuilder<'a> { let ptr: *mut Word = allocate(&mut reff, &mut segment_builder, size.total(), WirePointerKind::Struct); (*reff).mut_struct_ref().set_from_struct_size(size); StructBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment_builder, cap_table: cap_table, data: ptr as *mut _, pointers: ptr.offset((size.data as usize) as isize) as *mut _, data_size: size.data as WordCount32 * (BITS_PER_WORD as BitCount32), pointer_count: size.pointers } } #[inline] pub unsafe fn get_writable_struct_pointer<'a>(mut reff: *mut WirePointer, mut segment: *mut SegmentBuilder, cap_table: CapTableBuilder, size: StructSize, default_value: *const Word) -> Result<StructBuilder<'a>> { let ref_target = (*reff).mut_target(); if (*reff).is_null() { if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() { return Ok(init_struct_pointer(reff, segment, cap_table, size)); } unimplemented!() } let mut old_ref = reff; let mut old_segment = segment; let old_ptr = try!(follow_builder_fars(&mut old_ref, ref_target, &mut old_segment)); if (*old_ref).kind() != WirePointerKind::Struct { return Err(Error::failed( "Message contains non-struct pointer where struct pointer was expected.".to_string())); } let old_data_size = (*old_ref).struct_ref().data_size.get(); let old_pointer_count = (*old_ref).struct_ref().ptr_count.get(); let old_pointer_section: *mut WirePointer = old_ptr.offset(old_data_size as isize) as *mut _; if old_data_size < size.data || old_pointer_count < size.pointers { //# The space allocated for this struct is too small. //# Unlike with readers, we can't just run with it and do //# bounds checks at access time, because how would we //# handle writes? Instead, we have to copy the struct to a //# new space now. let new_data_size = ::std::cmp::max(old_data_size, size.data); let new_pointer_count = ::std::cmp::max(old_pointer_count, size.pointers); let total_size = new_data_size as u32 + new_pointer_count as u32 * WORDS_PER_POINTER as u32; //# Don't let allocate() zero out the object just yet. try!(zero_pointer_and_fars(segment, reff)); let ptr = allocate(&mut reff, &mut segment, total_size, WirePointerKind::Struct); (*reff).mut_struct_ref().set(new_data_size, new_pointer_count); // Copy data section. // Note: copy_nonoverlapping's third argument is an element count, not a byte count. ::std::ptr::copy_nonoverlapping(old_ptr, ptr, old_data_size as usize); //# Copy pointer section. let new_pointer_section: *mut WirePointer = ptr.offset(new_data_size as isize) as *mut _; for i in 0..old_pointer_count as isize { transfer_pointer(segment, new_pointer_section.offset(i), old_segment, old_pointer_section.offset(i)); } ::std::ptr::write_bytes(old_ptr, 0, old_data_size as usize + old_pointer_count as usize); Ok(StructBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment, cap_table: cap_table, data: ptr as *mut _, pointers: new_pointer_section, data_size: new_data_size as u32 * BITS_PER_WORD as u32, pointer_count: new_pointer_count }) } else { Ok(StructBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: old_segment, cap_table: cap_table, data: old_ptr as *mut _, pointers: old_pointer_section, data_size: old_data_size as u32 * BITS_PER_WORD as u32, pointer_count: old_pointer_count }) } } #[inline] pub unsafe fn init_list_pointer<'a>(mut reff: *mut WirePointer, mut segment_builder: *mut SegmentBuilder, cap_table: CapTableBuilder, element_count: ElementCount32, element_size: ElementSize) -> ListBuilder<'a> { assert!(element_size != InlineComposite, "Should have called initStructListPointer() instead"); let data_size = data_bits_per_element(element_size); let pointer_count = pointers_per_element(element_size); let step = data_size + pointer_count * BITS_PER_POINTER as u32; let word_count = round_bits_up_to_words(element_count as ElementCount64 * (step as u64)); let ptr = allocate(&mut reff, &mut segment_builder, word_count, WirePointerKind::List); (*reff).mut_list_ref().set(element_size, element_count); ListBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment_builder, cap_table: cap_table, ptr: ptr as *mut _, step: step, element_count: element_count, struct_data_size: data_size, struct_pointer_count: pointer_count as u16 } } #[inline] pub unsafe fn init_struct_list_pointer<'a>(mut reff: *mut WirePointer, mut segment_builder: *mut SegmentBuilder, cap_table: CapTableBuilder, element_count: ElementCount32, element_size: StructSize) -> ListBuilder<'a> { let words_per_element = element_size.total(); //# Allocate the list, prefixed by a single WirePointer. let word_count: WordCount32 = element_count * words_per_element; let ptr: *mut WirePointer = allocate(&mut reff, &mut segment_builder, POINTER_SIZE_IN_WORDS as u32 + word_count, WirePointerKind::List) as *mut _; //# Initialize the pointer. (*reff).mut_list_ref().set_inline_composite(word_count); (*ptr).set_kind_and_inline_composite_list_element_count(WirePointerKind::Struct, element_count); (*ptr).mut_struct_ref().set_from_struct_size(element_size); let ptr1 = ptr.offset(POINTER_SIZE_IN_WORDS as isize); ListBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment_builder, cap_table: cap_table, ptr: ptr1 as *mut _, step: words_per_element * BITS_PER_WORD as u32, element_count: element_count, struct_data_size: element_size.data as u32 * (BITS_PER_WORD as u32), struct_pointer_count: element_size.pointers } } #[inline] pub unsafe fn get_writable_list_pointer<'a>(orig_ref: *mut WirePointer, orig_segment: *mut SegmentBuilder, cap_table: CapTableBuilder, element_size: ElementSize, default_value: *const Word) -> Result<ListBuilder<'a>> { assert!(element_size != InlineComposite, "Use get_struct_list_{element,field}() for structs"); let orig_ref_target = (*orig_ref).mut_target(); if (*orig_ref).is_null() { if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() { return Ok(ListBuilder::new_default()); } unimplemented!() } // We must verify that the pointer has the right size. Unlike in // get_writable_struct_list_pointer(), we never need to "upgrade" the data, because this // method is called only for non-struct lists, and there is no allowed upgrade path *to* a // non-struct list, only *from* them. let mut reff = orig_ref; let mut segment = orig_segment; let mut ptr = try!(follow_builder_fars(&mut reff, orig_ref_target, &mut segment)); if (*reff).kind() != WirePointerKind::List { return Err(Error::failed( "Called get_list_{{field,element}}() but existing pointer is not a list.".to_string())); } let old_size = (*reff).list_ref().element_size(); if old_size == InlineComposite { // The existing element size is InlineComposite, which means that it is at least two // words, which makes it bigger than the expected element size. Since fields can only // grow when upgraded, the existing data must have been written with a newer version of // the protocol. We therefore never need to upgrade the data in this case, but we do // need to validate that it is a valid upgrade from what we expected. // Read the tag to get the actual element count. let tag: *const WirePointer = ptr as *const _; if (*tag).kind() != WirePointerKind::Struct { return Err(Error::failed( "InlineComposite list with non-STRUCT elements not supported.".to_string())); } ptr = ptr.offset(POINTER_SIZE_IN_WORDS as isize); let data_size = (*tag).struct_ref().data_size.get(); let pointer_count = (*tag).struct_ref().ptr_count.get(); match element_size { Void => {} // Anything is a valid upgrade from Void. Bit => { return Err(Error::failed( "Found struct list where bit list was expected.".to_string())); } Byte | TwoBytes | FourBytes | EightBytes => { if data_size < 1 { return Err(Error::failed( "Existing list value is incompatible with expected type.".to_string())); } } Pointer => { if pointer_count < 1 { return Err(Error::failed( "Existing list value is incompatible with expected type.".to_string())); } // Adjust the pointer to point at the reference segment. ptr = ptr.offset(data_size as isize); } InlineComposite => { unreachable!() } } // OK, looks valid. Ok(ListBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment, cap_table: cap_table, ptr: ptr as *mut _, element_count: (*tag).inline_composite_list_element_count(), step: (*tag).struct_ref().word_size() * BITS_PER_WORD as u32, struct_data_size: data_size as u32 * BITS_PER_WORD as u32, struct_pointer_count: pointer_count }) } else { let data_size = data_bits_per_element(old_size); let pointer_count = pointers_per_element(old_size); if data_size < data_bits_per_element(element_size) || pointer_count < pointers_per_element(element_size) { return Err(Error::failed( "Existing list value is incompatible with expected type.".to_string())); } let step = data_size + pointer_count * BITS_PER_POINTER as u32; Ok(ListBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment, cap_table: cap_table, ptr: ptr as *mut _, step: step, element_count: (*reff).list_ref().element_count(), struct_data_size: data_size, struct_pointer_count: pointer_count as u16 }) } } #[inline] pub unsafe fn get_writable_struct_list_pointer<'a>(orig_ref: *mut WirePointer, orig_segment: *mut SegmentBuilder, cap_table: CapTableBuilder, element_size: StructSize, default_value: *const Word) -> Result<ListBuilder<'a>> { let orig_ref_target = (*orig_ref).mut_target(); if (*orig_ref).is_null() { if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() { return Ok(ListBuilder::new_default()); } unimplemented!() } // We must verify that the pointer has the right size and potentially upgrade it if not. let mut old_ref = orig_ref; let mut old_segment = orig_segment; let mut old_ptr = try!(follow_builder_fars(&mut old_ref, orig_ref_target, &mut old_segment)); if (*old_ref).kind() != WirePointerKind::List { return Err(Error::failed( "Called getList{{Field,Element}} but existing pointer is not a list.".to_string())); } let old_size = (*old_ref).list_ref().element_size(); if old_size == InlineComposite { // Existing list is InlineComposite, but we need to verify that the sizes match. let old_tag: *const WirePointer = old_ptr as *const _; old_ptr = old_ptr.offset(POINTER_SIZE_IN_WORDS as isize); if (*old_tag).kind() != WirePointerKind::Struct { return Err(Error::failed( "InlineComposite list with non-STRUCT elements not supported.".to_string())); } let old_data_size = (*old_tag).struct_ref().data_size.get(); let old_pointer_count = (*old_tag).struct_ref().ptr_count.get(); let old_step = old_data_size as u32 + old_pointer_count as u32 * WORDS_PER_POINTER as u32; let element_count = (*old_tag).inline_composite_list_element_count(); if old_data_size >= element_size.data && old_pointer_count >= element_size.pointers { // Old size is at least as large as we need. Ship it. return Ok(ListBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: old_segment, cap_table: cap_table, ptr: old_ptr as *mut _, element_count: element_count, step: old_step * BITS_PER_WORD as u32, struct_data_size: old_data_size as u32 * BITS_PER_WORD as u32, struct_pointer_count: old_pointer_count }); } // The structs in this list are smaller than expected, probably written using an older // version of the protocol. We need to make a copy and expand them. unimplemented!(); } else { // We're upgrading from a non-struct list. let old_data_size = data_bits_per_element(old_size); let old_pointer_count = pointers_per_element(old_size); let old_step = old_data_size + old_pointer_count * BITS_PER_POINTER as u32; let element_count = (*old_ref).list_ref().element_count(); if old_size == ElementSize::Void { // Nothing to copy, just allocate a new list. return Ok(init_struct_list_pointer(orig_ref, orig_segment, cap_table, element_count, element_size)); } else { // Upgrade to an inline composite list. if old_size == ElementSize::Bit { return Err(Error::failed( "Found bit list where struct list was expected; upgrading boolean \ lists to struct lists is no longer supported.".to_string())); } let mut new_data_size = element_size.data; let mut new_pointer_count = element_size.pointers; if old_size == ElementSize::Pointer { new_pointer_count = ::std::cmp::max(new_pointer_count, 1); } else { // Old list contains data elements, so we need at least one word of data. new_data_size = ::std::cmp::max(new_data_size, 1); } let new_step = new_data_size as u32 + new_pointer_count as u32 * WORDS_PER_POINTER as u32; let total_words = element_count * new_step; // Don't let allocate() zero out the object just yet. try!(zero_pointer_and_fars(orig_segment, orig_ref)); let mut new_ref = orig_ref; let mut new_segment = orig_segment; let mut new_ptr = allocate(&mut new_ref, &mut new_segment, total_words + POINTER_SIZE_IN_WORDS as u32, WirePointerKind::List); (*new_ref).mut_list_ref().set_inline_composite(total_words); let tag: *mut WirePointer = new_ptr as *mut _; (*tag).set_kind_and_inline_composite_list_element_count(WirePointerKind::Struct, element_count); (*tag).mut_struct_ref().set(new_data_size, new_pointer_count); new_ptr = new_ptr.offset(POINTER_SIZE_IN_WORDS as isize); if old_size == ElementSize::Pointer { let mut dst: *mut Word = new_ptr.offset(new_data_size as isize); let mut src: *mut WirePointer = old_ptr as *mut _; for _ in 0..element_count { transfer_pointer(new_segment, dst as *mut _, old_segment, src); dst = dst.offset(new_step as isize / WORDS_PER_POINTER as isize); src = src.offset(1); } } else { let mut dst: *mut Word = new_ptr; let mut src: *mut u8 = old_ptr as *mut u8; let old_byte_step = old_data_size / BITS_PER_BYTE as u32; for _ in 0..element_count { ptr::copy_nonoverlapping(src, dst as *mut _, old_byte_step as usize); src = src.offset(old_byte_step as isize); dst = dst.offset(new_step as isize); } } // Zero out old location. ptr::write_bytes(old_ptr as *mut u8, 0, round_bits_up_to_bytes(old_step as u64 * element_count as u64) as usize); return Ok(ListBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: new_segment, cap_table: cap_table, ptr: new_ptr as *mut _, element_count: element_count, step: new_step * BITS_PER_WORD as u32, struct_data_size: new_data_size as u32 * BITS_PER_WORD as u32, struct_pointer_count: new_pointer_count }); } } } #[inline] pub unsafe fn init_text_pointer<'a>(mut reff: *mut WirePointer, mut segment: *mut SegmentBuilder, size: ByteCount32) -> SegmentAnd<text::Builder<'a>> { //# The byte list must include a NUL terminator. let byte_size = size + 1; //# Allocate the space. let ptr = allocate(&mut reff, &mut segment, round_bytes_up_to_words(byte_size), WirePointerKind::List); //# Initialize the pointer. (*reff).mut_list_ref().set(Byte, byte_size); return SegmentAnd { segment: segment, value: text::Builder::new(slice::from_raw_parts_mut(ptr as *mut _, size as usize), 0).unwrap() } } #[inline] pub unsafe fn set_text_pointer<'a>(reff: *mut WirePointer, segment: *mut SegmentBuilder, value: &str) -> SegmentAnd<text::Builder<'a>> { let value_bytes = value.as_bytes(); // TODO make sure the string is not longer than 2 ** 29. let mut allocation = init_text_pointer(reff, segment, value_bytes.len() as u32); allocation.value.push_str(value); allocation } #[inline] pub unsafe fn get_writable_text_pointer<'a>(mut reff: *mut WirePointer, mut segment: *mut SegmentBuilder, _default_value: *const Word, default_size: ByteCount32) -> Result<text::Builder<'a>> { if (*reff).is_null() { if default_size == 0 { return text::Builder::new(slice::from_raw_parts_mut(ptr::null_mut(), 0), 0); } else { let _builder = init_text_pointer(reff, segment, default_size).value; unimplemented!() } } let ref_target = (*reff).mut_target(); let ptr = try!(follow_builder_fars(&mut reff, ref_target, &mut segment)); let cptr: *mut u8 = ptr as *mut _; if (*reff).kind() != WirePointerKind::List { return Err(Error::failed( "Called getText{{Field,Element}}() but existing pointer is not a list.".to_string())); } if (*reff).list_ref().element_size() != Byte { return Err(Error::failed( "Called getText{{Field,Element}}() but existing list pointer is not byte-sized.".to_string())); } let count = (*reff).list_ref().element_count(); if count <= 0 || *cptr.offset((count - 1) as isize) != 0 { return Err(Error::failed( "Text blob missing NUL terminator.".to_string())); } // Subtract 1 from the size for the NUL terminator. return text::Builder::new(slice::from_raw_parts_mut(cptr, (count - 1) as usize), count - 1); } #[inline] pub unsafe fn init_data_pointer<'a>(mut reff: *mut WirePointer, mut segment: *mut SegmentBuilder, size: ByteCount32) -> SegmentAnd<data::Builder<'a>> { //# Allocate the space. let ptr = allocate(&mut reff, &mut segment, round_bytes_up_to_words(size), WirePointerKind::List); //# Initialize the pointer. (*reff).mut_list_ref().set(Byte, size); return SegmentAnd { segment: segment, value: data::new_builder(ptr as *mut _, size) }; } #[inline] pub unsafe fn set_data_pointer<'a>(reff: *mut WirePointer, segment: *mut SegmentBuilder, value: &[u8]) -> SegmentAnd<data::Builder<'a>> { let allocation = init_data_pointer(reff, segment, value.len() as u32); ptr::copy_nonoverlapping(value.as_ptr(), allocation.value.as_mut_ptr(), value.len()); return allocation; } #[inline] pub unsafe fn get_writable_data_pointer<'a>(mut reff: *mut WirePointer, mut segment: *mut SegmentBuilder, default_value: *const Word, default_size: ByteCount32) -> Result<data::Builder<'a>> { if (*reff).is_null() { if default_size == 0 { return Ok(data::new_builder(::std::ptr::null_mut(), 0)); } else { let builder = init_data_pointer(reff, segment, default_size).value; ::std::ptr::copy_nonoverlapping(default_value as *const _, builder.as_mut_ptr() as *mut _, default_size as usize); return Ok(builder); } } let ref_target = (*reff).mut_target(); let ptr = try!(follow_builder_fars(&mut reff, ref_target, &mut segment)); if (*reff).kind() != WirePointerKind::List { return Err(Error::failed( "Called getData{{Field,Element}}() but existing pointer is not a list.".to_string())); } if (*reff).list_ref().element_size() != Byte { return Err(Error::failed( "Called getData{{Field,Element}}() but existing list pointer is not byte-sized.".to_string())); } return Ok(data::new_builder(ptr as *mut _, (*reff).list_ref().element_count())); } pub unsafe fn set_struct_pointer<'a>(mut segment: *mut SegmentBuilder, cap_table: CapTableBuilder, mut reff: *mut WirePointer, value: StructReader) -> Result<SegmentAnd<*mut Word>> { let data_size: WordCount32 = round_bits_up_to_words(value.data_size as u64); let total_size: WordCount32 = data_size + value.pointer_count as u32 * WORDS_PER_POINTER as u32; let ptr = allocate(&mut reff, &mut segment, total_size, WirePointerKind::Struct); (*reff).mut_struct_ref().set(data_size as u16, value.pointer_count); if value.data_size == 1 { *(ptr as *mut u8) = value.get_bool_field(0) as u8 } else { ptr::copy_nonoverlapping::<Word>(value.data as *const _, ptr, value.data_size as usize / BITS_PER_WORD); } let pointer_section: *mut WirePointer = ptr.offset(data_size as isize) as *mut _; for i in 0..value.pointer_count as isize { try!(copy_pointer(segment, cap_table, pointer_section.offset(i), value.segment, value.cap_table, value.pointers.offset(i), value.nesting_limit)); } Ok(SegmentAnd { segment: segment, value: ptr }) } pub unsafe fn set_capability_pointer(_segment: *mut SegmentBuilder, mut cap_table: CapTableBuilder, reff: *mut WirePointer, cap: Box<ClientHook>) { // TODO if ref is null, zero object. (*reff).set_cap(cap_table.inject_cap(cap) as u32); } pub unsafe fn set_list_pointer<'a>(mut segment: *mut SegmentBuilder, cap_table: CapTableBuilder, mut reff: *mut WirePointer, value: ListReader) -> Result<SegmentAnd<*mut Word>> { let total_size = round_bits_up_to_words((value.element_count * value.step) as u64); if value.step <= BITS_PER_WORD as u32 { //# List of non-structs. let ptr = allocate(&mut reff, &mut segment, total_size, WirePointerKind::List); if value.struct_pointer_count == 1 { //# List of pointers. (*reff).mut_list_ref().set(Pointer, value.element_count); for i in 0.. value.element_count as isize { try!(copy_pointer(segment, cap_table, (ptr as *mut _).offset(i), value.segment, value.cap_table, (value.ptr as *const _).offset(i), value.nesting_limit)); } } else { //# List of data. let element_size = match value.step { 0 => Void, 1 => Bit, 8 => Byte, 16 => TwoBytes, 32 => FourBytes, 64 => EightBytes, _ => { panic!("invalid list step size: {}", value.step) } }; (*reff).mut_list_ref().set(element_size, value.element_count); ::std::ptr::copy_nonoverlapping(value.ptr as *const Word, ptr, total_size as usize); } Ok(SegmentAnd { segment: segment, value: ptr }) } else { //# List of structs. let ptr = allocate(&mut reff, &mut segment, total_size + POINTER_SIZE_IN_WORDS as u32, WirePointerKind::List); (*reff).mut_list_ref().set_inline_composite(total_size); let data_size = round_bits_up_to_words(value.struct_data_size as u64); let pointer_count = value.struct_pointer_count; let tag: *mut WirePointer = ptr as *mut _; (*tag).set_kind_and_inline_composite_list_element_count(WirePointerKind::Struct, value.element_count); (*tag).mut_struct_ref().set(data_size as u16, pointer_count); let mut dst = ptr.offset(POINTER_SIZE_IN_WORDS as isize); let mut src: *const Word = value.ptr as *const _; for _ in 0.. value.element_count { ::std::ptr::copy_nonoverlapping(src, dst, value.struct_data_size as usize / BITS_PER_WORD); dst = dst.offset(data_size as isize); src = src.offset(data_size as isize); for _ in 0..pointer_count { try!(copy_pointer(segment, cap_table, dst as *mut _, value.segment, value.cap_table, src as *const _, value.nesting_limit)); dst = dst.offset(POINTER_SIZE_IN_WORDS as isize); src = src.offset(POINTER_SIZE_IN_WORDS as isize); } } Ok(SegmentAnd { segment: segment, value: ptr }) } } pub unsafe fn copy_pointer(dst_segment: *mut SegmentBuilder, dst_cap_table: CapTableBuilder, dst: *mut WirePointer, mut src_segment: *const SegmentReader, src_cap_table: CapTableReader, mut src: *const WirePointer, nesting_limit: i32) -> Result<SegmentAnd<*mut Word>> { let src_target = (*src).target(); if (*src).is_null() { ptr::write_bytes(dst, 0, 1); return Ok(SegmentAnd { segment: dst_segment, value: ::std::ptr::null_mut() }); } let mut ptr = try!(follow_fars(&mut src, src_target, &mut src_segment)); match (*src).kind() { WirePointerKind::Struct => { if nesting_limit <= 0 { return Err(Error::failed( "Message is too deeply-nested or contains cycles. See ReaderOptions.".to_string())); } try!(bounds_check(src_segment, ptr, ptr.offset((*src).struct_ref().word_size() as isize), WirePointerKind::Struct)); return set_struct_pointer( dst_segment, dst_cap_table, dst, StructReader { marker: ::std::marker::PhantomData, segment: src_segment, cap_table: src_cap_table, data: ptr as *mut _, pointers: ptr.offset((*src).struct_ref().data_size.get() as isize) as *mut _, data_size: (*src).struct_ref().data_size.get() as u32 * BITS_PER_WORD as u32, pointer_count: (*src).struct_ref().ptr_count.get(), nesting_limit: nesting_limit - 1 }); } WirePointerKind::List => { let element_size = (*src).list_ref().element_size(); if nesting_limit <= 0 { return Err(Error::failed( "Message is too deeply-nested or contains cycles. See ReaderOptions.".to_string())); } if element_size == InlineComposite { let word_count = (*src).list_ref().inline_composite_word_count(); let tag: *const WirePointer = ptr as *const _; ptr = ptr.offset(POINTER_SIZE_IN_WORDS as isize); try!(bounds_check(src_segment, ptr.offset(-1), ptr.offset(word_count as isize), WirePointerKind::List)); if (*tag).kind() != WirePointerKind::Struct { return Err(Error::failed( "InlineComposite lists of non-STRUCT type are not supported.".to_string())); } let element_count = (*tag).inline_composite_list_element_count(); let words_per_element = (*tag).struct_ref().word_size(); if words_per_element as u64 * element_count as u64 > word_count as u64 { return Err(Error::failed( "InlineComposite list's elements overrun its word count.".to_string())); } if words_per_element == 0 { // Watch out for lists of zero-sized structs, which can claim to be // arbitrarily large without having sent actual data. try!(amplified_read(src_segment, element_count as u64)); } return set_list_pointer( dst_segment, dst_cap_table, dst, ListReader { marker: ::std::marker::PhantomData, segment: src_segment, cap_table: src_cap_table, ptr: ptr as *mut _, element_count: element_count, step: words_per_element * BITS_PER_WORD as u32, struct_data_size: (*tag).struct_ref().data_size.get() as u32 * BITS_PER_WORD as u32, struct_pointer_count: (*tag).struct_ref().ptr_count.get(), nesting_limit: nesting_limit - 1 }) } else { let data_size = data_bits_per_element(element_size); let pointer_count = pointers_per_element(element_size); let step = data_size + pointer_count * BITS_PER_POINTER as u32; let element_count = (*src).list_ref().element_count(); let word_count = round_bits_up_to_words(element_count as u64 * step as u64); try!(bounds_check(src_segment, ptr, ptr.offset(word_count as isize), WirePointerKind::List)); if element_size == Void { // Watch out for lists of void, which can claim to be arbitrarily large // without having sent actual data. try!(amplified_read(src_segment, element_count as u64)); } return set_list_pointer( dst_segment, dst_cap_table, dst, ListReader { marker: ::std::marker::PhantomData, segment: src_segment, cap_table : src_cap_table, ptr: ptr as *mut _, element_count: element_count, step: step, struct_data_size: data_size, struct_pointer_count: pointer_count as u16, nesting_limit: nesting_limit - 1 }) } } WirePointerKind::Far => { panic!("Far pointer should have been handled above"); } WirePointerKind::Other => { if !(*src).is_capability() { return Err(Error::failed("Unknown pointer type.".to_string())); } match src_cap_table.extract_cap((*src).cap_ref().index.get() as usize) { Some(cap) => { set_capability_pointer(dst_segment, dst_cap_table, dst, cap); return Ok(SegmentAnd { segment: dst_segment, value: ::std::ptr::null_mut() }); } None => { return Err(Error::failed( "Message contained invalid capability pointer.".to_string())); } } } } } #[inline] pub unsafe fn read_struct_pointer<'a>(mut segment: *const SegmentReader, cap_table: CapTableReader, mut reff: *const WirePointer, default_value: *const Word, nesting_limit: i32) -> Result<StructReader<'a>> { let ref_target: *const Word = (*reff).target(); if (*reff).is_null() { if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() { return Ok(StructReader::new_default()); } //segment = ::std::ptr::null(); //reff = default_value as *const WirePointer; unimplemented!() } if nesting_limit <= 0 { return Err(Error::failed("Message is too deeply-nested or contains cycles.".to_string())); } let ptr = try!(follow_fars(&mut reff, ref_target, &mut segment)); let data_size_words = (*reff).struct_ref().data_size.get(); if (*reff).kind() != WirePointerKind::Struct { return Err(Error::failed( "Message contains non-struct pointer where struct pointer was expected.".to_string())); } try!(bounds_check(segment, ptr, ptr.offset((*reff).struct_ref().word_size() as isize), WirePointerKind::Struct)); return Ok(StructReader { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment, cap_table: cap_table, data: ptr as *mut _, pointers: ptr.offset(data_size_words as isize) as *mut _, data_size: data_size_words as u32 * BITS_PER_WORD as BitCount32, pointer_count: (*reff).struct_ref().ptr_count.get(), nesting_limit: nesting_limit - 1 }); } #[inline] pub unsafe fn read_capability_pointer(_segment: *const SegmentReader, cap_table: CapTableReader, reff: *const WirePointer, _nesting_limit: i32) -> Result<Box<ClientHook>> { if (*reff).is_null() { Err(Error::failed( "Message contains null capability pointer.".to_string())) } else if !(*reff).is_capability() { Err(Error::failed( "Message contains non-capability pointer where capability pointer was expected.".to_string())) } else { let n = (*reff).cap_ref().index.get() as usize; match cap_table.extract_cap(n) { Some(client_hook) => { Ok(client_hook) } None => { Err(Error::failed( format!("Message contains invalid capability pointer. Index: {}", n))) } } } } #[inline] pub unsafe fn read_list_pointer<'a>(mut segment: *const SegmentReader, cap_table: CapTableReader, mut reff: *const WirePointer, default_value: *const Word, expected_element_size: ElementSize, nesting_limit: i32) -> Result<ListReader<'a>> { let ref_target: *const Word = (*reff).target(); if (*reff).is_null() { if default_value.is_null() || (*(default_value as *const WirePointer)).is_null() { return Ok(ListReader::new_default()); } panic!("list default values unimplemented"); } if nesting_limit <= 0 { return Err(Error::failed("nesting limit exceeded".to_string())); } let mut ptr: *const Word = try!(follow_fars(&mut reff, ref_target, &mut segment)); if (*reff).kind() != WirePointerKind::List { return Err(Error::failed( "Message contains non-list pointer where list pointer was expected".to_string())); } let list_ref = (*reff).list_ref(); let element_size = list_ref.element_size(); match element_size { InlineComposite => { let word_count = list_ref.inline_composite_word_count(); let tag: *const WirePointer = ::std::mem::transmute(ptr); ptr = ptr.offset(1); try!(bounds_check(segment, ptr.offset(-1), ptr.offset(word_count as isize), WirePointerKind::List)); if (*tag).kind() != WirePointerKind::Struct { return Err(Error::failed( "InlineComposite lists of non-STRUCT type are not supported.".to_string())); } let size = (*tag).inline_composite_list_element_count(); let struct_ref = (*tag).struct_ref(); let words_per_element = struct_ref.word_size(); if size as u64 * words_per_element as u64 > word_count as u64 { return Err(Error::failed( "InlineComposite list's elements overrun its word count.".to_string())); } if words_per_element == 0 { // Watch out for lists of zero-sized structs, which can claim to be // arbitrarily large without having sent actual data. try!(amplified_read(segment, size as u64)); } // If a struct list was not expected, then presumably a non-struct list was upgraded // to a struct list. We need to manipulate the pointer to point at the first field // of the struct. Together with the "stepBits", this will allow the struct list to // be accessed as if it were a primitive list without branching. // Check whether the size is compatible. match expected_element_size { Void => {} Bit => { return Err(Error::failed( "Found struct list where bit list was expected.".to_string())); } Byte | TwoBytes | FourBytes | EightBytes => { if struct_ref.data_size.get() <= 0 { return Err(Error::failed( "Expected a primitive list, but got a list of pointer-only structs".to_string())); } } Pointer => { // We expected a list of pointers but got a list of structs. Assuming the // first field in the struct is the pointer we were looking for, we want to // munge the pointer to point at the first element's pointer section. ptr = ptr.offset(struct_ref.data_size.get() as isize); if struct_ref.ptr_count.get() <= 0 { return Err(Error::failed( "Expected a pointer list, but got a list of data-only structs".to_string())); } } InlineComposite => {} } return Ok(ListReader { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment, cap_table: cap_table, ptr: ::std::mem::transmute(ptr), element_count: size, step: words_per_element * BITS_PER_WORD as u32, struct_data_size: struct_ref.data_size.get() as u32 * (BITS_PER_WORD as u32), struct_pointer_count: struct_ref.ptr_count.get(), nesting_limit: nesting_limit - 1 }); } _ => { // This is a primitive or pointer list, but all such lists can also be interpreted // as struct lists. We need to compute the data size and pointer count for such // structs. let data_size = data_bits_per_element(list_ref.element_size()); let pointer_count = pointers_per_element(list_ref.element_size()); let element_count = list_ref.element_count(); let step = data_size + pointer_count * BITS_PER_POINTER as u32; let word_count = round_bits_up_to_words(list_ref.element_count() as u64 * step as u64); try!(bounds_check(segment, ptr, ptr.offset(word_count as isize), WirePointerKind::List)); if element_size == Void { // Watch out for lists of void, which can claim to be arbitrarily large // without having sent actual data. try!(amplified_read(segment, element_count as u64)); } // Verify that the elements are at least as large as the expected type. Note that if // we expected InlineComposite, the expected sizes here will be zero, because bounds // checking will be performed at field access time. So this check here is for the // case where we expected a list of some primitive or pointer type. let expected_data_bits_per_element = data_bits_per_element(expected_element_size); let expected_pointers_per_element = pointers_per_element(expected_element_size); if expected_data_bits_per_element > data_size || expected_pointers_per_element > pointer_count { return Err(Error::failed( "Message contains list with incompatible element type.".to_string())); } return Ok(ListReader { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment, cap_table: cap_table, ptr: ::std::mem::transmute(ptr), element_count: list_ref.element_count(), step: step, struct_data_size: data_size, struct_pointer_count: pointer_count as u16, nesting_limit: nesting_limit - 1 }); } } } #[inline] pub unsafe fn read_text_pointer<'a>(mut segment: *const SegmentReader, mut reff: *const WirePointer, default_value: *const Word, default_size: ByteCount32) -> Result<text::Reader<'a>> { if (*reff).is_null() { // TODO? if default_value.is_null() { default_value = &"" } return text::new_reader( slice::from_raw_parts(::std::mem::transmute(default_value), default_size as usize)); } let ref_target = (*reff).target(); let ptr: *const Word = try!(follow_fars(&mut reff, ref_target, &mut segment)); let list_ref = (*reff).list_ref(); let size = list_ref.element_count(); if (*reff).kind() != WirePointerKind::List { return Err(Error::failed( "Message contains non-list pointer where text was expected.".to_string())); } if list_ref.element_size() != Byte { return Err(Error::failed( "Message contains list pointer of non-bytes where text was expected.".to_string())); } try!(bounds_check(segment, ptr, ptr.offset(round_bytes_up_to_words(size) as isize), WirePointerKind::List)); if size <= 0 { return Err(Error::failed("Message contains text that is not NUL-terminated.".to_string())); } let str_ptr = ptr as *const u8; if (*str_ptr.offset((size - 1) as isize)) != 0u8 { return Err(Error::failed( "Message contains text that is not NUL-terminated".to_string())); } Ok(try!(text::new_reader(slice::from_raw_parts(str_ptr, size as usize -1)))) } #[inline] pub unsafe fn read_data_pointer<'a>(mut segment: *const SegmentReader, mut reff: *const WirePointer, default_value: *const Word, default_size: ByteCount32) -> Result<data::Reader<'a>> { if (*reff).is_null() { return Ok(data::new_reader(default_value as *const _, default_size)); } let ref_target = (*reff).target(); let ptr: *const Word = try!(follow_fars(&mut reff, ref_target, &mut segment)); let list_ref = (*reff).list_ref(); let size: u32 = list_ref.element_count(); if (*reff).kind() != WirePointerKind::List { return Err(Error::failed( "Message contains non-list pointer where data was expected.".to_string())); } if list_ref.element_size() != Byte { return Err(Error::failed( "Message contains list pointer of non-bytes where data was expected.".to_string())); } try!(bounds_check(segment, ptr, ptr.offset(round_bytes_up_to_words(size) as isize), WirePointerKind::List)); Ok(data::new_reader(::std::mem::transmute(ptr), size)) } } static ZERO: u64 = 0; fn zero_pointer() -> *const WirePointer { &ZERO as *const _ as *const _ } pub type CapTable = Vec<Option<Box<ClientHook>>>; #[derive(Copy, Clone)] pub enum CapTableReader { Dummy, Plain(*const Vec<Option<Box<ClientHook>>>), } impl CapTableReader { pub fn extract_cap(&self, index: usize) -> Option<Box<ClientHook>> { match self { &CapTableReader::Dummy => None, &CapTableReader::Plain(hooks) => { let hooks: &Vec<Option<Box<ClientHook>>> = unsafe { &*hooks }; if index >= hooks.len() { None } else { match hooks[index] { None => None, Some(ref hook) => Some(hook.add_ref()) } } } } } } #[derive(Copy, Clone)] pub enum CapTableBuilder { Dummy, Plain(*mut Vec<Option<Box<ClientHook>>>), } impl CapTableBuilder { pub fn as_reader(self) -> CapTableReader { match self { CapTableBuilder::Dummy => CapTableReader::Dummy, CapTableBuilder::Plain(hooks) => CapTableReader::Plain(hooks), } } pub fn extract_cap(&self, index: usize) -> Option<Box<ClientHook>> { match self { &CapTableBuilder::Dummy => None, &CapTableBuilder::Plain(hooks) => { let hooks: &Vec<Option<Box<ClientHook>>> = unsafe { &*hooks }; if index >= hooks.len() { None } else { match hooks[index] { None => None, Some(ref hook) => Some(hook.add_ref()) } } } } } pub fn inject_cap(&mut self, cap: Box<ClientHook>) -> usize { match self { &mut CapTableBuilder::Dummy => 0, // XXX maybe we shouldn't swallow this. &mut CapTableBuilder::Plain(hooks) => { let hooks: &mut Vec<Option<Box<ClientHook>>> = unsafe { &mut *hooks }; hooks.push(Some(cap)); hooks.len() - 1 } } } pub fn drop_cap(&mut self, index: usize) { match self { &mut CapTableBuilder::Dummy => (), // XXX maybe we shouldn't swallow this. &mut CapTableBuilder::Plain(hooks) => { let hooks: &mut Vec<Option<Box<ClientHook>>> = unsafe { &mut *hooks }; if index < hooks.len() { hooks[index] = None; } } } } } #[derive(Clone, Copy)] pub struct PointerReader<'a> { marker: ::std::marker::PhantomData<&'a ()>, segment: *const SegmentReader, cap_table: CapTableReader, pointer: *const WirePointer, nesting_limit: i32 } impl <'a> PointerReader<'a> { pub fn new_default<'b>() -> PointerReader<'b> { PointerReader { marker: ::std::marker::PhantomData::<&'b ()>, segment: ::std::ptr::null(), cap_table: CapTableReader::Dummy, pointer: ::std::ptr::null(), nesting_limit: 0x7fffffff } } pub fn get_root<'b>(segment: *const SegmentReader, location: *const Word, nesting_limit: i32) -> Result<PointerReader<'b>> { unsafe { try!(wire_helpers::bounds_check(segment, location, location.offset(POINTER_SIZE_IN_WORDS as isize), WirePointerKind::Struct)); Ok(PointerReader { marker: ::std::marker::PhantomData::<&'b ()>, segment: segment, cap_table: CapTableReader::Dummy, pointer: location as *mut _, nesting_limit: nesting_limit }) } } pub fn get_root_unchecked<'b>(location: *const Word) -> PointerReader<'b> { PointerReader { marker: ::std::marker::PhantomData::<&'b ()>, segment: ::std::ptr::null(), cap_table: CapTableReader::Dummy, pointer: location as *mut _, nesting_limit: 0x7fffffff } } pub fn imbue(&mut self, cap_table: CapTableReader) { self.cap_table = cap_table; } pub fn is_null(&self) -> bool { self.pointer.is_null() || unsafe { (*self.pointer).is_null() } } pub fn total_size(&self) -> Result<MessageSize> { if self.pointer.is_null() { Ok( MessageSize { word_count: 0, cap_count: 0 } ) } else { unsafe { wire_helpers::total_size(self.segment, self.pointer, self.nesting_limit) } } } pub fn get_struct(&self, default_value: *const Word) -> Result<StructReader<'a>> { let reff: *const WirePointer = if self.pointer.is_null() { zero_pointer() } else { self.pointer }; unsafe { wire_helpers::read_struct_pointer(self.segment, self.cap_table, reff, default_value, self.nesting_limit) } } pub fn get_list(&self, expected_element_size: ElementSize, default_value: *const Word) -> Result<ListReader<'a>> { let reff = if self.pointer.is_null() { zero_pointer() } else { self.pointer }; unsafe { wire_helpers::read_list_pointer(self.segment, self.cap_table, reff, default_value, expected_element_size, self.nesting_limit) } } pub fn get_text(&self, default_value: *const Word, default_size: ByteCount32) -> Result<text::Reader<'a>> { let reff = if self.pointer.is_null() { zero_pointer() } else { self.pointer }; unsafe { wire_helpers::read_text_pointer(self.segment, reff, default_value, default_size) } } pub fn get_data(&self, default_value: *const Word, default_size: ByteCount32) -> Result<data::Reader<'a>> { let reff = if self.pointer.is_null() { zero_pointer() } else { self.pointer }; unsafe { wire_helpers::read_data_pointer(self.segment, reff, default_value, default_size) } } pub fn get_capability(&self) -> Result<Box<ClientHook>> { let reff: *const WirePointer = if self.pointer.is_null() { zero_pointer() } else { self.pointer }; unsafe { wire_helpers::read_capability_pointer(self.segment, self.cap_table, reff, self.nesting_limit) } } } #[derive(Clone, Copy)] pub struct PointerBuilder<'a> { marker: ::std::marker::PhantomData<&'a ()>, segment: *mut SegmentBuilder, cap_table: CapTableBuilder, pointer: *mut WirePointer } impl <'a> PointerBuilder<'a> { #[inline] pub fn get_root(segment: *mut SegmentBuilder, location: *mut Word) -> PointerBuilder<'a> { PointerBuilder { marker: ::std::marker::PhantomData::<&'a ()>, cap_table: CapTableBuilder::Dummy, segment: segment, pointer: location as *mut _, } } pub fn borrow<'b>(&'b mut self) -> PointerBuilder<'b> { PointerBuilder { .. *self } } pub fn imbue(&mut self, cap_table: CapTableBuilder) { self.cap_table = cap_table; } pub fn is_null(&self) -> bool { unsafe { (*self.pointer).is_null() } } pub fn get_struct(&self, size: StructSize, default_value: *const Word) -> Result<StructBuilder<'a>> { unsafe { wire_helpers::get_writable_struct_pointer( self.pointer, self.segment, self.cap_table, size, default_value) } } pub fn get_list(&self, element_size: ElementSize, default_value: *const Word) -> Result<ListBuilder<'a>> { unsafe { wire_helpers::get_writable_list_pointer( self.pointer, self.segment, self.cap_table, element_size, default_value) } } pub fn get_struct_list(&self, element_size: StructSize, default_value: *const Word) -> Result<ListBuilder<'a>> { unsafe { wire_helpers::get_writable_struct_list_pointer( self.pointer, self.segment, self.cap_table, element_size, default_value) } } pub fn get_text(&self, default_value: *const Word, default_size: ByteCount32) -> Result<text::Builder<'a>> { unsafe { wire_helpers::get_writable_text_pointer( self.pointer, self.segment, default_value, default_size) } } pub fn get_data(&self, default_value: *const Word, default_size: ByteCount32) -> Result<data::Builder<'a>> { unsafe { wire_helpers::get_writable_data_pointer( self.pointer, self.segment, default_value, default_size) } } pub fn get_capability(&self) -> Result<Box<ClientHook>> { unsafe { wire_helpers::read_capability_pointer( &(*self.segment).reader, self.cap_table.as_reader(), self.pointer, ::std::i32::MAX) } } pub fn init_struct(&self, size: StructSize) -> StructBuilder<'a> { unsafe { wire_helpers::init_struct_pointer(self.pointer, self.segment, self.cap_table, size) } } pub fn init_list(&self, element_size: ElementSize, element_count: ElementCount32) -> ListBuilder<'a> { unsafe { wire_helpers::init_list_pointer( self.pointer, self.segment, self.cap_table, element_count, element_size) } } pub fn init_struct_list(&self, element_count: ElementCount32, element_size: StructSize) -> ListBuilder<'a> { unsafe { wire_helpers::init_struct_list_pointer( self.pointer, self.segment, self.cap_table, element_count, element_size) } } pub fn init_text(&self, size: ByteCount32) -> text::Builder<'a> { unsafe { wire_helpers::init_text_pointer(self.pointer, self.segment, size).value } } pub fn init_data(&self, size: ByteCount32) -> data::Builder<'a> { unsafe { wire_helpers::init_data_pointer(self.pointer, self.segment, size).value } } pub fn set_struct(&self, value: &StructReader) -> Result<()> { unsafe { try!(wire_helpers::set_struct_pointer(self.segment, self.cap_table, self.pointer, *value)); Ok(()) } } pub fn set_list(&self, value: &ListReader) -> Result<()> { unsafe { try!(wire_helpers::set_list_pointer(self.segment, self.cap_table, self.pointer, *value)); Ok(()) } } pub fn set_text(&self, value: &str) { unsafe { wire_helpers::set_text_pointer(self.pointer, self.segment, value); } } pub fn set_data(&self, value: &[u8]) { unsafe { wire_helpers::set_data_pointer(self.pointer, self.segment, value); } } pub fn set_capability(&self, cap: Box<ClientHook>) { unsafe { wire_helpers::set_capability_pointer(self.segment, self.cap_table, self.pointer, cap); } } pub fn copy_from(&mut self, other: PointerReader) -> Result<()> { if other.pointer.is_null() { if !self.pointer.is_null() { unsafe { wire_helpers::zero_object(self.segment, self.pointer); *self.pointer = ::std::mem::zeroed(); } } } else { unsafe { try!(wire_helpers::copy_pointer(self.segment, self.cap_table, self.pointer, other.segment, other.cap_table, other.pointer, other.nesting_limit)); } } Ok(()) } pub fn clear(&self) { unsafe { wire_helpers::zero_object(self.segment, self.pointer); ptr::write_bytes(self.pointer, 0, 1); } } pub fn as_reader(self) -> PointerReader<'a> { unsafe { let segment_reader = &(*self.segment).reader; PointerReader { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment_reader, cap_table: self.cap_table.as_reader(), pointer: self.pointer, nesting_limit: 0x7fffffff } } } } #[derive(Clone, Copy)] pub struct StructReader<'a> { marker: ::std::marker::PhantomData<&'a ()>, segment: *const SegmentReader, cap_table: CapTableReader, data: *const u8, pointers: *const WirePointer, data_size: BitCount32, pointer_count: WirePointerCount16, nesting_limit: i32 } impl <'a> StructReader<'a> { pub fn new_default<'b>() -> StructReader<'b> { StructReader { marker: ::std::marker::PhantomData::<&'b ()>, segment: ::std::ptr::null(), cap_table: CapTableReader::Dummy, data: ::std::ptr::null(), pointers: ::std::ptr::null(), data_size: 0, pointer_count: 0, nesting_limit: 0x7fffffff} } pub fn imbue(&mut self, cap_table: CapTableReader) { self.cap_table = cap_table } pub fn get_data_section_size(&self) -> BitCount32 { self.data_size } pub fn get_pointer_section_size(&self) -> WirePointerCount16 { self.pointer_count } pub fn get_data_section_as_blob(&self) -> usize { panic!("unimplemented") } #[inline] pub fn get_data_field<T:Endian + zero::Zero>(&self, offset: ElementCount) -> T { // We need to check the offset because the struct may have // been created with an old version of the protocol that did // not contain the field. if (offset + 1) * bits_per_element::<T>() <= self.data_size as usize { unsafe { let dwv: *const WireValue<T> = self.data as *const _; (*dwv.offset(offset as isize)).get() } } else { return T::zero(); } } #[inline] pub fn get_bool_field(&self, offset: ElementCount) -> bool { let boffset: BitCount32 = offset as BitCount32; if boffset < self.data_size { unsafe { let b: *const u8 = self.data.offset((boffset as usize / BITS_PER_BYTE) as isize); ((*b) & (1u8 << (boffset % BITS_PER_BYTE as u32) as usize)) != 0 } } else { false } } #[inline] pub fn get_data_field_mask<T:Endian + zero::Zero + Mask>(&self, offset: ElementCount, mask: <T as Mask>::T) -> T { Mask::mask(self.get_data_field(offset), mask) } #[inline] pub fn get_bool_field_mask(&self, offset: ElementCount, mask: bool) -> bool { self.get_bool_field(offset) ^ mask } #[inline] pub fn get_pointer_field(&self, ptr_index: WirePointerCount) -> PointerReader<'a> { if ptr_index < self.pointer_count as WirePointerCount { PointerReader { marker: ::std::marker::PhantomData::<&'a ()>, segment: self.segment, cap_table: self.cap_table, pointer: unsafe { self.pointers.offset(ptr_index as isize) }, nesting_limit: self.nesting_limit } } else { PointerReader::new_default() } } pub fn total_size(&self) -> Result<MessageSize> { let mut result = MessageSize { word_count: wire_helpers::round_bits_up_to_words(self.data_size as u64) as u64 + self.pointer_count as u64 * WORDS_PER_POINTER as u64, cap_count: 0 }; for i in 0.. self.pointer_count as isize { unsafe { result.plus_eq(try!(wire_helpers::total_size(self.segment, self.pointers.offset(i), self.nesting_limit))); } } // TODO when we have read limiting: segment->unread() Ok(result) } } #[derive(Clone, Copy)] pub struct StructBuilder<'a> { marker: ::std::marker::PhantomData<&'a ()>, segment: *mut SegmentBuilder, cap_table: CapTableBuilder, data: *mut u8, pointers: *mut WirePointer, data_size: BitCount32, pointer_count: WirePointerCount16 } impl <'a> StructBuilder<'a> { pub fn as_reader(self) -> StructReader<'a> { unsafe { let segment_reader = &(*self.segment).reader; StructReader { marker: ::std::marker::PhantomData::<&'a ()>, segment: segment_reader, cap_table: self.cap_table.as_reader(), data: self.data as *mut _, pointers: self.pointers as *mut _, data_size: self.data_size, pointer_count: self.pointer_count, nesting_limit: 0x7fffffff } } } pub fn imbue(&mut self, cap_table: CapTableBuilder) { self.cap_table = cap_table } #[inline] pub fn set_data_field<T:Endian>(&self, offset: ElementCount, value: T) { unsafe { let ptr: *mut WireValue<T> = self.data as *mut _; (*ptr.offset(offset as isize)).set(value) } } #[inline] pub fn set_data_field_mask<T:Endian + Mask>(&self, offset: ElementCount, value: T, mask: <T as Mask>::T) { self.set_data_field(offset, Mask::mask(value, mask)); } #[inline] pub fn get_data_field<T: Endian>(&self, offset: ElementCount) -> T { unsafe { let ptr: *mut WireValue<T> = self.data as *mut _; (*ptr.offset(offset as isize)).get() } } #[inline] pub fn get_data_field_mask<T:Endian + Mask>(&self, offset: ElementCount, mask: <T as Mask>::T) -> T { Mask::mask(self.get_data_field(offset), mask) } #[inline] pub fn set_bool_field(&self, offset: ElementCount, value: bool) { //# This branch should be compiled out whenever this is //# inlined with a constant offset. let boffset: BitCount0 = offset; let b = unsafe { self.data.offset((boffset / BITS_PER_BYTE) as isize)}; let bitnum = boffset % BITS_PER_BYTE; unsafe { (*b) = ( (*b) & !(1 << bitnum)) | ((value as u8) << bitnum) } } #[inline] pub fn set_bool_field_mask(&self, offset: ElementCount, value: bool, mask: bool) { self.set_bool_field(offset , value ^ mask); } #[inline] pub fn get_bool_field(&self, offset: ElementCount) -> bool { let boffset: BitCount0 = offset; let b = unsafe { self.data.offset((boffset / BITS_PER_BYTE) as isize) }; unsafe { ((*b) & (1 << (boffset % BITS_PER_BYTE ))) != 0 } } #[inline] pub fn get_bool_field_mask(&self, offset: ElementCount, mask: bool) -> bool { self.get_bool_field(offset) ^ mask } #[inline] pub fn get_pointer_field(&self, ptr_index: WirePointerCount) -> PointerBuilder<'a> { PointerBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: self.segment, cap_table: self.cap_table, pointer: unsafe { self.pointers.offset(ptr_index as isize) } } } } #[derive(Clone, Copy)] pub struct ListReader<'a> { marker: ::std::marker::PhantomData<&'a ()>, segment: *const SegmentReader, cap_table: CapTableReader, ptr: *const u8, element_count: ElementCount32, step: BitCount32, struct_data_size: BitCount32, struct_pointer_count: WirePointerCount16, nesting_limit: i32 } impl <'a> ListReader<'a> { pub fn new_default<'b>() -> ListReader<'b> { ListReader { marker: ::std::marker::PhantomData::<&'b ()>, segment: ::std::ptr::null(), cap_table: CapTableReader::Dummy, ptr: ::std::ptr::null(), element_count: 0, step: 0, struct_data_size: 0, struct_pointer_count: 0, nesting_limit: 0x7fffffff} } pub fn imbue(&mut self, cap_table: CapTableReader) { self.cap_table = cap_table } #[inline] pub fn len(&self) -> ElementCount32 { self.element_count } pub fn get_struct_element(&self, index: ElementCount32) -> StructReader<'a> { let index_bit: BitCount64 = index as ElementCount64 * (self.step as BitCount64); let struct_data: *const u8 = unsafe { self.ptr.offset((index_bit as usize / BITS_PER_BYTE) as isize) }; let struct_pointers: *const WirePointer = unsafe { struct_data.offset((self.struct_data_size as usize / BITS_PER_BYTE) as isize) as *const _ }; StructReader { marker: ::std::marker::PhantomData::<&'a ()>, segment: self.segment, cap_table: self.cap_table, data: struct_data, pointers: struct_pointers, data_size: self.struct_data_size, pointer_count: self.struct_pointer_count, nesting_limit: self.nesting_limit - 1 } } #[inline] pub fn get_pointer_element(&self, index: ElementCount32) -> PointerReader<'a> { PointerReader { marker: ::std::marker::PhantomData::<&'a ()>, segment: self.segment, cap_table: self.cap_table, pointer: unsafe { self.ptr.offset((index * self.step / BITS_PER_BYTE as u32) as isize) as *mut _ }, nesting_limit: self.nesting_limit } } } #[derive(Clone, Copy)] pub struct ListBuilder<'a> { marker: ::std::marker::PhantomData<&'a ()>, segment: *mut SegmentBuilder, cap_table: CapTableBuilder, ptr: *mut u8, element_count: ElementCount32, step: BitCount32, struct_data_size: BitCount32, struct_pointer_count: WirePointerCount16 } impl <'a> ListBuilder<'a> { #[inline] pub fn new_default<'b>() -> ListBuilder<'b> { ListBuilder { marker: ::std::marker::PhantomData::<&'b ()>, segment: ::std::ptr::null_mut(), cap_table: CapTableBuilder::Dummy, ptr: ::std::ptr::null_mut(), element_count: 0, step: 0, struct_data_size: 0, struct_pointer_count: 0 } } pub fn imbue(&mut self, cap_table: CapTableBuilder) { self.cap_table = cap_table } #[inline] pub fn len(&self) -> ElementCount32 { self.element_count } pub fn get_struct_element(&self, index: ElementCount32) -> StructBuilder<'a> { let index_bit = index * self.step; let struct_data = unsafe{ self.ptr.offset((index_bit / BITS_PER_BYTE as u32) as isize)}; let struct_pointers = unsafe { struct_data.offset(((self.struct_data_size as usize) / BITS_PER_BYTE) as isize) as *mut _ }; StructBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: self.segment, cap_table: self.cap_table, data: struct_data, pointers: struct_pointers, data_size: self.struct_data_size, pointer_count: self.struct_pointer_count, } } #[inline] pub fn get_pointer_element(&self, index: ElementCount32) -> PointerBuilder<'a> { PointerBuilder { marker: ::std::marker::PhantomData::<&'a ()>, segment: self.segment, cap_table: self.cap_table, pointer: unsafe { self.ptr.offset((index * self.step / BITS_PER_BYTE as u32) as isize) as *mut _ } } } } pub trait PrimitiveElement: Endian { #[inline] fn get(list_reader: &ListReader, index: ElementCount32) -> Self { unsafe { let ptr: *const u8 = list_reader.ptr.offset( (index as ElementCount * list_reader.step as usize / BITS_PER_BYTE) as isize); (*(ptr as *const WireValue<Self>)).get() } } #[inline] fn get_from_builder(list_builder: &ListBuilder, index: ElementCount32) -> Self { unsafe { let ptr: *mut WireValue<Self> = list_builder.ptr.offset( (index as ElementCount * list_builder.step as usize / BITS_PER_BYTE) as isize) as *mut _; (*ptr).get() } } #[inline] fn set(list_builder: &ListBuilder, index: ElementCount32, value: Self) { unsafe { let ptr: *mut WireValue<Self> = list_builder.ptr.offset( (index as ElementCount * list_builder.step as usize / BITS_PER_BYTE) as isize) as *mut _; (*ptr).set(value); } } fn element_size() -> ElementSize { match ::std::mem::size_of::<Self>() { 0 => Void, 1 => Byte, 2 => TwoBytes, 4 => FourBytes, 8 => EightBytes, _ => unreachable!(), } } } impl PrimitiveElement for u8 { } impl PrimitiveElement for u16 { } impl PrimitiveElement for u32 { } impl PrimitiveElement for u64 { } impl PrimitiveElement for i8 { } impl PrimitiveElement for i16 { } impl PrimitiveElement for i32 { } impl PrimitiveElement for i64 { } impl PrimitiveElement for f32 { } impl PrimitiveElement for f64 { } impl PrimitiveElement for bool { #[inline] fn get(list: &ListReader, index: ElementCount32) -> bool { let bindex: BitCount0 = index as ElementCount * list.step as usize; unsafe { let b: *const u8 = list.ptr.offset((bindex / BITS_PER_BYTE) as isize); ((*b) & (1 << (bindex % BITS_PER_BYTE))) != 0 } } #[inline] fn get_from_builder(list: &ListBuilder, index: ElementCount32) -> bool { let bindex: BitCount0 = index as ElementCount * list.step as usize; let b = unsafe { list.ptr.offset((bindex / BITS_PER_BYTE) as isize) }; unsafe { ((*b) & (1 << (bindex % BITS_PER_BYTE ))) != 0 } } #[inline] fn set(list: &ListBuilder, index: ElementCount32, value: bool) { let bindex: BitCount0 = index as ElementCount * list.step as usize; let b = unsafe { list.ptr.offset((bindex / BITS_PER_BYTE) as isize) }; let bitnum = bindex % BITS_PER_BYTE; unsafe { (*b) = ((*b) & !(1 << bitnum)) | ((value as u8) << bitnum) } } fn element_size() -> ElementSize { Bit } } impl PrimitiveElement for () { #[inline] fn get(_list: &ListReader, _index: ElementCount32) -> () { () } #[inline] fn get_from_builder(_list: &ListBuilder, _index: ElementCount32) -> () { () } #[inline] fn set(_list: &ListBuilder, _index: ElementCount32, _value: ()) { } }<|fim▁end|>
pub unsafe fn init_struct_pointer<'a>(mut reff: *mut WirePointer,
<|file_name|>CargaUsuario.java<|end_file_name|><|fim▁begin|>/* * * Copyright (c) 2013 - 2014 INT - National Institute of Technology & COPPE - Alberto Luiz Coimbra Institute - Graduate School and Research in Engineering. * See the file license.txt for copyright permission. * */ package cargaDoSistema; import modelo.TipoUsuario; import modelo.Usuario; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import service.TipoUsuarioAppService; import service.UsuarioAppService; import service.controleTransacao.FabricaDeAppService; import service.exception.AplicacaoException; import util.JPAUtil; /** * Classe responsável pela inclusão de Tipos de Usuário e de Usuário. * É usada na carga do sistema e deve ser a primeira a ser executada. * Está criando um usuário para cada tipo. (dma) * * @author marques * */ public class CargaUsuario { // Services public TipoUsuarioAppService tipoUsuarioService; public UsuarioAppService usuarioService; @BeforeClass public void setupClass(){ try { tipoUsuarioService = FabricaDeAppService.getAppService(TipoUsuarioAppService.class); usuarioService = FabricaDeAppService.getAppService(UsuarioAppService.class); } catch (Exception e) { e.printStackTrace(); } } @Test public void incluirTiposDeUsuario() { TipoUsuario tipoUsuarioAdmin = new TipoUsuario(); TipoUsuario tipoUsuarioAluno = new TipoUsuario(); TipoUsuario tipoUsuarioGestor = new TipoUsuario(); TipoUsuario tipoUsuarioEngenheiro = new TipoUsuario(); tipoUsuarioAdmin.setTipoUsuario(TipoUsuario.ADMINISTRADOR); tipoUsuarioAdmin.setDescricao("O usuário ADMINISTRADOR pode realizar qualquer operação no Sistema."); tipoUsuarioAluno.setTipoUsuario(TipoUsuario.ALUNO); tipoUsuarioAluno.setDescricao("O usuário ALUNO pode realizar apenas consultas e impressão de relatórios nas telas " + "relativas ao Horizonte de Planejamento (HP,Periodo PMP, Periodo PAP) e não acessa " + "Administração e Eng. Conhecimento"); tipoUsuarioGestor.setTipoUsuario(TipoUsuario.GESTOR); tipoUsuarioGestor.setDescricao("O usuário GESTOR pode realizar qualquer operação no Sistema, porém não possui acesso" + "as áreas de Administração e Engenharia de Conhecimento."); tipoUsuarioEngenheiro.setTipoUsuario(TipoUsuario.ENGENHEIRO_DE_CONHECIMENTO); tipoUsuarioEngenheiro.setDescricao("O usuário ENGENHEIRO pode realizar a parte de Logica Fuzzy (Engenharia de Conhecimento)" + <|fim▁hole|> tipoUsuarioService.inclui(tipoUsuarioAdmin); tipoUsuarioService.inclui(tipoUsuarioAluno); tipoUsuarioService.inclui(tipoUsuarioGestor); tipoUsuarioService.inclui(tipoUsuarioEngenheiro); Usuario usuarioAdmin = new Usuario(); Usuario usuarioAluno = new Usuario(); Usuario usuarioGestor = new Usuario(); Usuario usuarioEngenheiro = new Usuario(); usuarioAdmin.setNome("Administrador"); usuarioAdmin.setLogin("dgep"); usuarioAdmin.setSenha("admgesplan2@@8"); usuarioAdmin.setTipoUsuario(tipoUsuarioAdmin); usuarioAluno.setNome("Alberto da Silva"); usuarioAluno.setLogin("alberto"); usuarioAluno.setSenha("alberto"); usuarioAluno.setTipoUsuario(tipoUsuarioAluno); usuarioEngenheiro.setNome("Bernadete da Silva"); usuarioEngenheiro.setLogin("bernadete"); usuarioEngenheiro.setSenha("bernadete"); usuarioEngenheiro.setTipoUsuario(tipoUsuarioEngenheiro); usuarioGestor.setNome("Carlos da Silva"); usuarioGestor.setLogin("carlos"); usuarioGestor.setSenha("carlos"); usuarioGestor.setTipoUsuario(tipoUsuarioGestor); try { usuarioService.inclui(usuarioAdmin, usuarioAdmin.getSenha()); usuarioService.inclui(usuarioEngenheiro, usuarioEngenheiro.getSenha()); usuarioService.inclui(usuarioGestor, usuarioGestor.getSenha()); usuarioService.inclui(usuarioAluno, usuarioAluno.getSenha()); } catch (AplicacaoException e) { //e.printStackTrace(); System.out.println("Erro na inclusao do usuario: "+ e.getMessage()); } } }<|fim▁end|>
"no Sistema. Porém, não possui acesso a área Administrativa.");
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>import os import shutil from codecs import open as codecs_open import numpy as np from setuptools import setup, find_packages from distutils.core import Distribution, Extension from distutils.command.build_ext import build_ext from distutils import errors from Cython.Build import cythonize from Cython.Compiler.Errors import CompileError def check_for_openmp(): """ There does not seem to be a cross platform and standard way to check for OpenMP support. Attempt to compile a test script. Proceed with OpenMP implementation if it works. """ distribution = Distribution() ext_options = { 'extra_compile_args': ['-fopenmp'], 'extra_link_args': ['-fopenmp'] } extensions = [ Extension('geoblend.openmp_check', ['geoblend/openmp_check.pyx'], **ext_options) ] build_extension = build_ext(distribution) build_extension.finalize_options() build_extension.extensions = cythonize(extensions, force=True) build_extension.run() ext_options = {<|fim▁hole|> Extension('geoblend.vector', ['geoblend/vector.pyx'], **ext_options), Extension('geoblend.convolve', ['geoblend/convolve.pyx'], **ext_options) ] pkg_dir = os.path.dirname(os.path.realpath(__file__)) dst = os.path.join(pkg_dir, 'geoblend', 'coefficients.pyx') try: check_for_openmp() ext_options['extra_compile_args'] = ['-fopenmp'] ext_options['extra_link_args'] = ['-fopenmp'] src = os.path.join(pkg_dir, 'geoblend', '_coefficients_omp.pyx') except (errors.LinkError, errors.CompileError, CompileError): src = os.path.join(pkg_dir, 'geoblend', '_coefficients.pyx') shutil.copy(src, dst) extensions.append( Extension('geoblend.coefficients', ['geoblend/coefficients.pyx'], **ext_options), ) # Get the long description from the relevant file with codecs_open('README.rst', encoding='utf-8') as f: long_description = f.read() setup(name='geoblend', version='0.2.3', description=u"Geo-aware poisson blending.", long_description=long_description, classifiers=[], keywords='', author=u"Amit Kapadia", author_email='[email protected]', url='https://github.com/kapadia/geoblend', license='MIT', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), include_package_data=True, ext_modules=cythonize(extensions), zip_safe=False, install_requires=[ 'click', # 'rasterio', 'pyamg', 'scipy', 'scikit-image' ], extras_require={ 'test': ['pytest'], 'development': [ 'cython>=0.23.0', 'benchmark' ] }, entry_points=""" [console_scripts] geoblend=geoblend.scripts.cli:geoblend """ )<|fim▁end|>
'include_dirs': [ np.get_include() ] } extensions = [
<|file_name|>main.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>extern crate piston; use std::path::Path; use sdl2_window::Sdl2Window; use opengl_graphics::{ GlGraphics, Texture, OpenGL }; use graphics::math::Matrix2d; use piston::window::WindowSettings; use piston::event::*; fn render_text(face: &mut ft::Face, gl: &mut GlGraphics, t: Matrix2d, text: &str) { use graphics::*; let mut x = 10; let mut y = 0; for ch in text.chars() { face.load_char(ch as usize, ft::face::RENDER).unwrap(); let g = face.glyph(); let bitmap = g.bitmap(); let texture = Texture::from_memory_alpha(bitmap.buffer(), bitmap.width() as u32, bitmap.rows() as u32).unwrap(); Image::new_colored(color::BLACK).draw( &texture, default_draw_state(), t.trans((x + g.bitmap_left()) as f64, (y - g.bitmap_top()) as f64), gl ); x += (g.advance().x >> 6) as i32; y += (g.advance().y >> 6) as i32; } } fn main() { let opengl = OpenGL::_3_2; let window = Sdl2Window::new( opengl, WindowSettings::new("piston-example-freetype", [300, 300]) .exit_on_esc(true) ); let freetype = ft::Library::init().unwrap(); let font = Path::new("./bin/assets/FiraSans-Regular.ttf"); let mut face = freetype.new_face(&font, 0).unwrap(); face.set_pixel_sizes(0, 48).unwrap(); let ref mut gl = GlGraphics::new(opengl); for e in window.events() { if let Some(args) = e.render_args() { use graphics::*; gl.draw(args.viewport(), |c, gl| { let transform = c.transform.trans(0.0, 100.0); clear(color::WHITE, gl); render_text(&mut face, gl, transform, "Hello Piston!"); }); } } }<|fim▁end|>
extern crate graphics; extern crate freetype as ft; extern crate sdl2_window; extern crate opengl_graphics;
<|file_name|>channel_announcement_test.go<|end_file_name|><|fim▁begin|>package lnwire import ( "bytes" "reflect" "testing" "github.com/roasbeef/btcd/btcec" "github.com/roasbeef/btcd/chaincfg/chainhash" ) func TestChannelAnnoucementEncodeDecode(t *testing.T) { ca := &ChannelAnnouncement{ FirstNodeSig: someSig, SecondNodeSig: someSig, ChannelID: someChannelID, FirstBitcoinSig: someSig, SecondBitcoinSig: someSig, FirstNodeID: pubKey, SecondNodeID: pubKey, FirstBitcoinKey: pubKey, SecondBitcoinKey: pubKey, } // Next encode the CA message into an empty bytes buffer. var b bytes.Buffer if err := ca.Encode(&b, 0); err != nil { t.Fatalf("unable to encode ChannelAnnouncement: %v", err) } // Ensure the max payload estimate is correct. serializedLength := uint32(b.Len()) if serializedLength != ca.MaxPayloadLength(0) { t.Fatalf("payload length estimate is incorrect: expected %v "+ "got %v", serializedLength, ca.MaxPayloadLength(0)) } // Deserialize the encoded CA message into a new empty struct. ca2 := &ChannelAnnouncement{} if err := ca2.Decode(&b, 0); err != nil { t.Fatalf("unable to decode ChannelAnnouncement: %v", err) } // Assert equality of the two instances. if !reflect.DeepEqual(ca, ca2) { t.Fatalf("encode/decode error messages don't match %#v vs %#v", ca, ca2) } } func TestChannelAnnoucementValidation(t *testing.T) { getKeys := func(s string) (*btcec.PrivateKey, *btcec.PublicKey) { return btcec.PrivKeyFromBytes(btcec.S256(), []byte(s)) } firstNodePrivKey, firstNodePubKey := getKeys("node-id-1") secondNodePrivKey, secondNodePubKey := getKeys("node-id-2") firstBitcoinPrivKey, firstBitcoinPubKey := getKeys("bitcoin-key-1") secondBitcoinPrivKey, secondBitcoinPubKey := getKeys("bitcoin-key-2") var hash []byte hash = chainhash.DoubleHashB(firstNodePubKey.SerializeCompressed()) firstBitcoinSig, _ := firstBitcoinPrivKey.Sign(hash) <|fim▁hole|> ca := &ChannelAnnouncement{ ChannelID: someChannelID, FirstBitcoinSig: firstBitcoinSig, SecondBitcoinSig: secondBitcoinSig, FirstNodeID: firstNodePubKey, SecondNodeID: secondNodePubKey, FirstBitcoinKey: firstBitcoinPubKey, SecondBitcoinKey: secondBitcoinPubKey, } dataToSign, _ := ca.DataToSign() hash = chainhash.DoubleHashB(dataToSign) firstNodeSign, _ := firstNodePrivKey.Sign(hash) ca.FirstNodeSig = firstNodeSign secondNodeSign, _ := secondNodePrivKey.Sign(hash) ca.SecondNodeSig = secondNodeSign if err := ca.Validate(); err != nil { t.Fatal(err) } }<|fim▁end|>
hash = chainhash.DoubleHashB(secondNodePubKey.SerializeCompressed()) secondBitcoinSig, _ := secondBitcoinPrivKey.Sign(hash)
<|file_name|>kubeconfig_test.go<|end_file_name|><|fim▁begin|>/* Copyright (c) 2019 the Octant contributors. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 */ package kubeconfig import ( "context" "io/ioutil" "os" "path/filepath" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" internalCluster "github.com/vmware-tanzu/octant/internal/cluster" "github.com/vmware-tanzu/octant/pkg/cluster" ) func Test_NewKubeConfigs(t *testing.T) { kubeConfig := filepath.Join("testdata", "kubeconfig.yaml") config := cluster.RESTConfigOptions{} _, err := NewKubeConfigContextManager( context.TODO(), WithKubeConfigList(kubeConfig), FromClusterOption(internalCluster.WithRESTConfigOptions(config)), ) require.NoError(t, err) } func Test_SwitchContextUpdatesCurrentContext(t *testing.T) { kubeConfigs, err := NewKubeConfigContextManager( context.TODO(), WithKubeConfigList(filepath.Join("testdata", "kubeconfig.yaml")), ) require.NoError(t, err) kubeConfigs.SwitchContext(context.TODO(), "other-context") require.Equal(t, "other-context", kubeConfigs.CurrentContext()) } func Test_SwitchContextToEmptyUpdatesCurrentContextFromFileSystem(t *testing.T) { kubeConfigs, err := NewKubeConfigContextManager( context.TODO(), WithKubeConfigList(filepath.Join("testdata", "kubeconfig.yaml")), ) require.NoError(t, err) kubeConfigs.SwitchContext(context.TODO(), "") require.Equal(t, "my-cluster", kubeConfigs.CurrentContext()) } func Test_SwitchContextUpdatesClientNamespace(t *testing.T) { kubeConfigs, err := NewKubeConfigContextManager( context.TODO(), WithKubeConfigList(filepath.Join("testdata", "kubeconfig.yaml")),<|fim▁hole|> require.Equal(t, "non-default", kubeConfigs.ClusterClient().DefaultNamespace()) } func TestFSLoader_Load(t *testing.T) { dir, err := ioutil.TempDir("", "loader-test") require.NoError(t, err) defer func() { require.NoError(t, os.RemoveAll(dir)) }() inputs := []string{"kubeconfig-1.yaml", "kubeconfig-2.yaml"} var paths []string for i := range inputs { data, err := ioutil.ReadFile(filepath.Join("testdata", inputs[i])) require.NoError(t, err) kubeConfigPath := filepath.Join(dir, inputs[i]) require.NoError(t, ioutil.WriteFile(kubeConfigPath, data, 0644)) paths = append(paths, kubeConfigPath) } kc, err := NewKubeConfigContextManager( context.TODO(), WithKubeConfigList(strings.Join(paths, string(os.PathListSeparator))), ) require.NoError(t, err) assert.Equal(t, "dev-frontend", kc.CurrentContext()) assert.Equal(t, []Context{ {Name: "dev-frontend"}, {Name: "dev-storage"}, {Name: "exp-scratch"}, }, kc.Contexts()) } func Test_NewKubeConfigNoCluster(t *testing.T) { noClusterOptions := KubeConfigOption{nil, nil} _, err := NewKubeConfigContextManager( context.TODO(), WithKubeConfigList(filepath.Join("testdata", "kubeconfig.yaml")), FromClusterOption(internalCluster.WithRESTConfigOptions(cluster.RESTConfigOptions{})), noClusterOptions, ) require.NoError(t, err) }<|fim▁end|>
) require.NoError(t, err) kubeConfigs.SwitchContext(context.TODO(), "other-context")
<|file_name|>ascii.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms.<|fim▁hole|> //! Operations on ASCII strings and characters. //! //! Most string operations in Rust act on UTF-8 strings. However, at times it //! makes more sense to only consider the ASCII character set for a specific //! operation. //! //! The [`AsciiExt`] trait provides methods that allow for character //! operations that only act on the ASCII subset and leave non-ASCII characters //! alone. //! //! The [`escape_default`] function provides an iterator over the bytes of an //! escaped version of the character given. //! //! [`AsciiExt`]: trait.AsciiExt.html //! [`escape_default`]: fn.escape_default.html #![stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")] pub use core::ascii::{EscapeDefault, escape_default}; /// Extension methods for ASCII-subset only operations. /// /// Be aware that operations on seemingly non-ASCII characters can sometimes /// have unexpected results. Consider this example: /// /// ``` /// use std::ascii::AsciiExt; /// /// assert_eq!(AsciiExt::to_ascii_uppercase("café"), "CAFÉ"); /// assert_eq!(AsciiExt::to_ascii_uppercase("café"), "CAFé"); /// ``` /// /// In the first example, the lowercased string is represented `"cafe\u{301}"` /// (the last character is an acute accent [combining character]). Unlike the /// other characters in the string, the combining character will not get mapped /// to an uppercase variant, resulting in `"CAFE\u{301}"`. In the second /// example, the lowercased string is represented `"caf\u{e9}"` (the last /// character is a single Unicode character representing an 'e' with an acute /// accent). Since the last character is defined outside the scope of ASCII, /// it will not get mapped to an uppercase variant, resulting in `"CAF\u{e9}"`. /// /// [combining character]: https://en.wikipedia.org/wiki/Combining_character #[stable(feature = "rust1", since = "1.0.0")] #[rustc_deprecated(since = "1.26.0", reason = "use inherent methods instead")] pub trait AsciiExt { /// Container type for copied ASCII characters. #[stable(feature = "rust1", since = "1.0.0")] type Owned; /// Checks if the value is within the ASCII range. /// /// # Note /// /// This method will be deprecated in favor of the identically-named /// inherent methods on `u8`, `char`, `[u8]` and `str`. #[stable(feature = "rust1", since = "1.0.0")] fn is_ascii(&self) -> bool; /// Makes a copy of the value in its ASCII upper case equivalent. /// /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z', /// but non-ASCII letters are unchanged. /// /// To uppercase the value in-place, use [`make_ascii_uppercase`]. /// /// To uppercase ASCII characters in addition to non-ASCII characters, use /// [`str::to_uppercase`]. /// /// # Note /// /// This method will be deprecated in favor of the identically-named /// inherent methods on `u8`, `char`, `[u8]` and `str`. /// /// [`make_ascii_uppercase`]: #tymethod.make_ascii_uppercase /// [`str::to_uppercase`]: ../primitive.str.html#method.to_uppercase #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated)] fn to_ascii_uppercase(&self) -> Self::Owned; /// Makes a copy of the value in its ASCII lower case equivalent. /// /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z', /// but non-ASCII letters are unchanged. /// /// To lowercase the value in-place, use [`make_ascii_lowercase`]. /// /// To lowercase ASCII characters in addition to non-ASCII characters, use /// [`str::to_lowercase`]. /// /// # Note /// /// This method will be deprecated in favor of the identically-named /// inherent methods on `u8`, `char`, `[u8]` and `str`. /// /// [`make_ascii_lowercase`]: #tymethod.make_ascii_lowercase /// [`str::to_lowercase`]: ../primitive.str.html#method.to_lowercase #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated)] fn to_ascii_lowercase(&self) -> Self::Owned; /// Checks that two values are an ASCII case-insensitive match. /// /// Same as `to_ascii_lowercase(a) == to_ascii_lowercase(b)`, /// but without allocating and copying temporaries. /// /// # Note /// /// This method will be deprecated in favor of the identically-named /// inherent methods on `u8`, `char`, `[u8]` and `str`. #[stable(feature = "rust1", since = "1.0.0")] fn eq_ignore_ascii_case(&self, other: &Self) -> bool; /// Converts this type to its ASCII upper case equivalent in-place. /// /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z', /// but non-ASCII letters are unchanged. /// /// To return a new uppercased value without modifying the existing one, use /// [`to_ascii_uppercase`]. /// /// # Note /// /// This method will be deprecated in favor of the identically-named /// inherent methods on `u8`, `char`, `[u8]` and `str`. /// /// [`to_ascii_uppercase`]: #tymethod.to_ascii_uppercase #[stable(feature = "ascii", since = "1.9.0")] fn make_ascii_uppercase(&mut self); /// Converts this type to its ASCII lower case equivalent in-place. /// /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z', /// but non-ASCII letters are unchanged. /// /// To return a new lowercased value without modifying the existing one, use /// [`to_ascii_lowercase`]. /// /// # Note /// /// This method will be deprecated in favor of the identically-named /// inherent methods on `u8`, `char`, `[u8]` and `str`. /// /// [`to_ascii_lowercase`]: #tymethod.to_ascii_lowercase #[stable(feature = "ascii", since = "1.9.0")] fn make_ascii_lowercase(&mut self); } macro_rules! delegating_ascii_methods { () => { #[inline] fn is_ascii(&self) -> bool { self.is_ascii() } #[inline] fn to_ascii_uppercase(&self) -> Self::Owned { self.to_ascii_uppercase() } #[inline] fn to_ascii_lowercase(&self) -> Self::Owned { self.to_ascii_lowercase() } #[inline] fn eq_ignore_ascii_case(&self, o: &Self) -> bool { self.eq_ignore_ascii_case(o) } #[inline] fn make_ascii_uppercase(&mut self) { self.make_ascii_uppercase(); } #[inline] fn make_ascii_lowercase(&mut self) { self.make_ascii_lowercase(); } } } #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated)] impl AsciiExt for u8 { type Owned = u8; delegating_ascii_methods!(); } #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated)] impl AsciiExt for char { type Owned = char; delegating_ascii_methods!(); } #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated)] impl AsciiExt for [u8] { type Owned = Vec<u8>; delegating_ascii_methods!(); } #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated)] impl AsciiExt for str { type Owned = String; delegating_ascii_methods!(); }<|fim▁end|>
<|file_name|>multihash.rs<|end_file_name|><|fim▁begin|>use crate::Error; #[cfg(feature = "alloc")] use alloc::vec::Vec; use core::convert::TryFrom; use core::convert::TryInto; use core::fmt::Debug; #[cfg(feature = "serde-codec")] use serde_big_array::BigArray; use unsigned_varint::encode as varint_encode; #[cfg(feature = "std")] use std::io; #[cfg(not(feature = "std"))] use core2::io; /// Trait that implements hashing. /// /// It is usually implemented by a custom code table enum that derives the [`Multihash` derive]. /// /// [`Multihash` derive]: crate::derive pub trait MultihashDigest<const S: usize>: TryFrom<u64> + Into<u64> + Send + Sync + Unpin + Copy + Eq + Debug + 'static { /// Calculate the hash of some input data. /// /// # Example /// /// ``` /// // `Code` implements `MultihashDigest` /// use multihash::{Code, MultihashDigest}; /// /// let hash = Code::Sha3_256.digest(b"Hello world!"); /// println!("{:02x?}", hash); /// ``` fn digest(&self, input: &[u8]) -> Multihash<S>; /// Create a multihash from an existing multihash digest. /// /// # Example /// /// ``` /// use multihash::{Code, Hasher, MultihashDigest, Sha3_256}; /// /// let mut hasher = Sha3_256::default(); /// hasher.update(b"Hello world!"); /// let hash = Code::Sha3_256.wrap(&hasher.finalize()).unwrap(); /// println!("{:02x?}", hash); /// ``` fn wrap(&self, digest: &[u8]) -> Result<Multihash<S>, Error>; } /// A Multihash instance that only supports the basic functionality and no hashing. /// /// With this Multihash implementation you can operate on Multihashes in a generic way, but /// no hasher implementation is associated with the code. /// /// # Example /// /// ``` /// use multihash::Multihash; /// /// const Sha3_256: u64 = 0x16; /// let digest_bytes = [ /// 0x16, 0x20, 0x64, 0x4b, 0xcc, 0x7e, 0x56, 0x43, 0x73, 0x04, 0x09, 0x99, 0xaa, 0xc8, 0x9e, /// 0x76, 0x22, 0xf3, 0xca, 0x71, 0xfb, 0xa1, 0xd9, 0x72, 0xfd, 0x94, 0xa3, 0x1c, 0x3b, 0xfb, /// 0xf2, 0x4e, 0x39, 0x38, /// ]; /// let mh = Multihash::from_bytes(&digest_bytes).unwrap(); /// assert_eq!(mh.code(), Sha3_256); /// assert_eq!(mh.size(), 32); /// assert_eq!(mh.digest(), &digest_bytes[2..]); /// ``` #[cfg_attr(feature = "serde-codec", derive(serde::Deserialize))] #[cfg_attr(feature = "serde-codec", derive(serde::Serialize))] #[derive(Clone, Copy, Debug, Eq, Ord, PartialOrd)] pub struct Multihash<const S: usize> { /// The code of the Multihash. code: u64, /// The actual size of the digest in bytes (not the allocated size). size: u8, /// The digest. #[cfg_attr(feature = "serde-codec", serde(with = "BigArray"))] digest: [u8; S], } impl<const S: usize> Default for Multihash<S> { fn default() -> Self { Self { code: 0, size: 0, digest: [0; S], } } } impl<const S: usize> Multihash<S> { /// Wraps the digest in a multihash. pub const fn wrap(code: u64, input_digest: &[u8]) -> Result<Self, Error> { if input_digest.len() > S { return Err(Error::InvalidSize(input_digest.len() as _)); } let size = input_digest.len(); let mut digest = [0; S]; let mut i = 0; while i < size { digest[i] = input_digest[i]; i += 1; } Ok(Self { code, size: size as u8, digest, }) } /// Returns the code of the multihash. pub const fn code(&self) -> u64 { self.code } /// Returns the size of the digest. pub const fn size(&self) -> u8 { self.size } /// Returns the digest. pub fn digest(&self) -> &[u8] { &self.digest[..self.size as usize] } /// Reads a multihash from a byte stream. pub fn read<R: io::Read>(r: R) -> Result<Self, Error> where Self: Sized, { let (code, size, digest) = read_multihash(r)?; Ok(Self { code, size, digest }) } /// Parses a multihash from a bytes. /// /// You need to make sure the passed in bytes have the correct length. The digest length /// needs to match the `size` value of the multihash. pub fn from_bytes(mut bytes: &[u8]) -> Result<Self, Error> where Self: Sized, { let result = Self::read(&mut bytes)?; // There were more bytes supplied than read if !bytes.is_empty() { return Err(Error::InvalidSize(bytes.len().try_into().expect( "Currently the maximum size is 255, therefore always fits into usize", ))); } Ok(result) } /// Writes a multihash to a byte stream. pub fn write<W: io::Write>(&self, w: W) -> Result<(), Error> { write_multihash(w, self.code(), self.size(), self.digest()) } #[cfg(feature = "alloc")] /// Returns the bytes of a multihash. pub fn to_bytes(&self) -> Vec<u8> { let mut bytes = Vec::with_capacity(self.size().into()); self.write(&mut bytes) .expect("writing to a vec should never fail"); bytes } /// Truncates the multihash to the given size. It's up to the caller to ensure that the new size /// is secure (cryptographically) to use. /// /// If the new size is larger than the current size, this method does nothing. /// /// ``` /// use multihash::{Code, MultihashDigest}; /// /// let hash = Code::Sha3_256.digest(b"Hello world!").truncate(20); /// ``` pub fn truncate(&self, size: u8) -> Self { let mut mh = *self; mh.size = mh.size.min(size); mh } /// Resizes the backing multihash buffer. This function fails if the hash digest is larger than /// the target size. /// /// ``` /// use multihash::{Code, MultihashDigest, MultihashGeneric}; /// /// let hash = Code::Sha3_256.digest(b"Hello world!"); /// let large_hash: MultihashGeneric<32> = hash.resize().unwrap(); /// ``` pub fn resize<const R: usize>(&self) -> Result<Multihash<R>, Error> { let size = self.size as usize; if size > R { return Err(Error::InvalidSize(self.size as u64)); } let mut mh = Multihash { code: self.code, size: self.size, digest: [0; R], }; mh.digest[..size].copy_from_slice(&self.digest[..size]); Ok(mh) } } // Don't hash the whole allocated space, but just the actual digest #[allow(clippy::derive_hash_xor_eq)] impl<const S: usize> core::hash::Hash for Multihash<S> { fn hash<T: core::hash::Hasher>(&self, state: &mut T) { self.code.hash(state); self.digest().hash(state); } } #[cfg(feature = "alloc")] impl<const S: usize> From<Multihash<S>> for Vec<u8> { fn from(multihash: Multihash<S>) -> Self { multihash.to_bytes() } } impl<const A: usize, const B: usize> PartialEq<Multihash<B>> for Multihash<A> { fn eq(&self, other: &Multihash<B>) -> bool { // NOTE: there's no need to explicitly check the sizes, that's implicit in the digest. self.code == other.code && self.digest() == other.digest() } } #[cfg(feature = "scale-codec")] impl<const S: usize> parity_scale_codec::Encode for Multihash<S> { fn encode_to<EncOut: parity_scale_codec::Output + ?Sized>(&self, dest: &mut EncOut) { self.code.encode_to(dest); self.size.encode_to(dest); // **NOTE** We write the digest directly to dest, since we have known the size of digest. // // We do not choose to encode &[u8] directly, because it will add extra bytes (the compact length of digest). // For a valid multihash, the length of digest must equal to `size`. // Therefore, we can only read raw bytes whose length is equal to `size` when decoding. dest.write(self.digest()); } } #[cfg(feature = "scale-codec")] impl<const S: usize> parity_scale_codec::EncodeLike for Multihash<S> {} #[cfg(feature = "scale-codec")] impl<const S: usize> parity_scale_codec::Decode for Multihash<S> { fn decode<DecIn: parity_scale_codec::Input>( input: &mut DecIn, ) -> Result<Self, parity_scale_codec::Error> { let mut mh = Multihash { code: parity_scale_codec::Decode::decode(input)?, size: parity_scale_codec::Decode::decode(input)?, digest: [0; S], }; if mh.size as usize > S { return Err(parity_scale_codec::Error::from("invalid size")); } // For a valid multihash, the length of digest must equal to the size. input.read(&mut mh.digest[..mh.size as usize])?; Ok(mh) } } /// Writes the multihash to a byte stream. pub fn write_multihash<W>(mut w: W, code: u64, size: u8, digest: &[u8]) -> Result<(), Error> where W: io::Write, { let mut code_buf = varint_encode::u64_buffer(); let code = varint_encode::u64(code, &mut code_buf); let mut size_buf = varint_encode::u8_buffer(); let size = varint_encode::u8(size, &mut size_buf); w.write_all(code)?; w.write_all(size)?; w.write_all(digest)?; Ok(()) } /// Reads a multihash from a byte stream that contains a full multihash (code, size and the digest) /// /// Returns the code, size and the digest. The size is the actual size and not the /// maximum/allocated size of the digest. /// /// Currently the maximum size for a digest is 255 bytes. pub fn read_multihash<R, const S: usize>(mut r: R) -> Result<(u64, u8, [u8; S]), Error> where R: io::Read, { let code = read_u64(&mut r)?; let size = read_u64(&mut r)?; if size > S as u64 || size > u8::MAX as u64 { return Err(Error::InvalidSize(size)); } let mut digest = [0; S]; r.read_exact(&mut digest[..size as usize])?; Ok((code, size as u8, digest)) } #[cfg(feature = "std")] pub(crate) use unsigned_varint::io::read_u64; /// Reads 64 bits from a byte array into a u64 /// Adapted from unsigned-varint's generated read_u64 function at /// https://github.com/paritytech/unsigned-varint/blob/master/src/io.rs #[cfg(not(feature = "std"))] pub(crate) fn read_u64<R: io::Read>(mut r: R) -> Result<u64, Error> { use unsigned_varint::decode; let mut b = varint_encode::u64_buffer(); for i in 0..b.len() { let n = r.read(&mut (b[i..i + 1]))?; if n == 0 { return Err(Error::Varint(decode::Error::Insufficient)); } else if decode::is_last(b[i]) { return Ok(decode::u64(&b[..=i]).unwrap().0); } } Err(Error::Varint(decode::Error::Overflow)) } #[cfg(test)] mod tests { use super::*; use crate::multihash_impl::Code; #[test] fn roundtrip() { let hash = Code::Sha2_256.digest(b"hello world"); let mut buf = [0u8; 35]; hash.write(&mut buf[..]).unwrap(); let hash2 = Multihash::<32>::read(&buf[..]).unwrap(); assert_eq!(hash, hash2); } #[test] fn test_truncate_down() { let hash = Code::Sha2_256.digest(b"hello world"); let small = hash.truncate(20); assert_eq!(small.size(), 20); } #[test] fn test_truncate_up() { let hash = Code::Sha2_256.digest(b"hello world"); let small = hash.truncate(100); assert_eq!(small.size(), 32); } #[test] fn test_resize_fits() { let hash = Code::Sha2_256.digest(b"hello world"); let _: Multihash<32> = hash.resize().unwrap(); } <|fim▁hole|> let _: Multihash<100> = hash.resize().unwrap(); } #[test] fn test_resize_truncate() { let hash = Code::Sha2_256.digest(b"hello world"); hash.resize::<20>().unwrap_err(); } #[test] #[cfg(feature = "scale-codec")] fn test_scale() { use parity_scale_codec::{Decode, Encode}; let mh1 = Code::Sha2_256.digest(b"hello world"); // println!("mh1: code = {}, size = {}, digest = {:?}", mh1.code(), mh1.size(), mh1.digest()); let mh1_bytes = mh1.encode(); // println!("Multihash<32>: {}", hex::encode(&mh1_bytes)); let mh2: Multihash<32> = Decode::decode(&mut &mh1_bytes[..]).unwrap(); assert_eq!(mh1, mh2); let mh3: Multihash<64> = Code::Sha2_256.digest(b"hello world"); // println!("mh3: code = {}, size = {}, digest = {:?}", mh3.code(), mh3.size(), mh3.digest()); let mh3_bytes = mh3.encode(); // println!("Multihash<64>: {}", hex::encode(&mh3_bytes)); let mh4: Multihash<64> = Decode::decode(&mut &mh3_bytes[..]).unwrap(); assert_eq!(mh3, mh4); assert_eq!(mh1_bytes, mh3_bytes); } #[test] #[cfg(feature = "serde-codec")] fn test_serde() { let mh = Multihash::<32>::default(); let bytes = serde_json::to_string(&mh).unwrap(); let mh2: Multihash<32> = serde_json::from_str(&bytes).unwrap(); assert_eq!(mh, mh2); } #[test] fn test_eq_sizes() { let mh1 = Multihash::<32>::default(); let mh2 = Multihash::<64>::default(); assert_eq!(mh1, mh2); } }<|fim▁end|>
#[test] fn test_resize_up() { let hash = Code::Sha2_256.digest(b"hello world");
<|file_name|>joint.rs<|end_file_name|><|fim▁begin|>use detection::joint::anchor::Anchor; // FIXME: this wont be very helpful to mix several joints. /// Trait implemented by every joint. pub trait Joint<A> { /// The first anchor affected by this joint. fn anchor1(&self) -> &Anchor<A>; /// The second anchor affected by this joint. fn anchor2(&self) -> &Anchor<A>; /// The first attach point in global coordinates. fn anchor1_pos(&self) -> A;<|fim▁hole|><|fim▁end|>
/// The second attach point in global coordinates. fn anchor2_pos(&self) -> A; }
<|file_name|>shinryu1_h4.js<|end_file_name|><|fim▁begin|>{ id: "shinryu1_h4", name: "竜帝級 雷鳴轟く至天塔", desc: "", overlap: false, aprnum: 5, data: [ { appearance: [ 1 ], enemy: [ { name: "ボルケーノドラゴン", hp: 45000, imageno: 1660, attr: 0, spec: 0, isStrong: false, move: { on_move: [ s_enemy_force_reservoir(), s_enemy_attack(1500, 1, 1, true), s_enemy_attack(750, 1, 1, true) ], atrandom: false, turn: 1, wait: 1 } }, { name: "ブリザードドラゴン", hp: 22500, imageno: 1662, attr: 1, spec: 0, isStrong: false, move: { on_move: [ s_enemy_attack_attrsp(666, 222, [1,0,0,0,0], 5, 1, false) ], atrandom: false, turn: 2, wait: 2 } }, { name: "ライトニングドラゴン", hp: 90000, imageno: 1664, attr: 2, spec: 0, isStrong: false, move: { on_move: [ s_enemy_attack(1500, 1, 1, true), s_enemy_attack(1500, 1, 1, true), s_enemy_as_sealed(3, 3), ], atrandom: false, turn: 1, wait: 1 } } ] }, { appearance: [ 2 ], enemy: [ { name: "ライトニングドラゴン", hp: 90000, imageno: 1664, attr: 2, spec: 0, isStrong: false, move: { on_move: [ s_enemy_attack(1500, 1, 1, true), s_enemy_attack(1500, 1, 1, true), s_enemy_as_sealed(3, 3), ], atrandom: false, turn: 1, wait: 1 } }, { name: "ルビードラゴン", hp: 52500, imageno: 1666,<|fim▁hole|> attr: 0, spec: 0, isStrong: false, move: { on_move: [ s_enemy_force_reservoir(), s_enemy_attack(1000, 5, 1, true) ], atrandom: false, turn: 1, wait: 1 } }, { name: "ライトニングドラゴン", hp: 90000, imageno: 1664, attr: 2, spec: 0, isStrong: false, move: { on_move: [ s_enemy_attack(1500, 1, 1, true), s_enemy_attack(1500, 1, 1, true), s_enemy_as_sealed(3, 3), ], atrandom: false, turn: 1, wait: 1 } } ] }, { appearance: [ 3 ], enemy: [ { name: "ライトニングドラゴン", hp: 90000, imageno: 1664, attr: 2, spec: 0, isStrong: false, move: { on_popup: [ m_enemy_once(skill_counter_func(s_enemy_as_sealed, "-", 100, false, 5, 4)) ], on_move: [ s_enemy_attack(1500, 1, 1, true), s_enemy_attack(1500, 1, 1, true), s_enemy_as_sealed(3, 3) ], atrandom: false, turn: 1, wait: 1 } }, { name: "サファイアドラゴン", hp: 30000, imageno: 1668, attr: 1, spec: 0, isStrong: false, move: { on_move: [ s_enemy_attack_attrsp(840, 280, [1,0,0,0,0], 5, 1, false) ], atrandom: false, turn: 2, wait: 2 } }, { name: "ライトニングドラゴン", hp: 90000, imageno: 1664, attr: 2, spec: 0, isStrong: false, move: { on_popup: [ m_enemy_once(skill_counter_func(s_enemy_as_sealed, "-", 100, false, 5, 4)) ], on_move: [ s_enemy_attack(1500, 1, 1, true), s_enemy_attack(1500, 1, 1, true), s_enemy_as_sealed(3, 3) ], atrandom: false, turn: 1, wait: 1 } } ] }, { appearance: [ 4 ], enemy: [ { name: "ボルケーノドラゴン", hp: 60000, imageno: 1660, attr: 0, spec: 0, isStrong: false, move: { on_move: [ s_enemy_force_reservoir(), s_enemy_attack(1500, 1, 1, true), s_enemy_attack(750, 1, 1, true) ], atrandom: false, turn: 1, wait: 1 } }, { name: "トパーズドラゴン", hp: 150000, imageno: 1670, attr: 2, spec: 0, isStrong: false, move: { on_move: [ s_enemy_as_sealed(3, 3), s_enemy_attack(1500, 1, 1, true), s_enemy_attack(1500, 1, 1, true) ], atrandom: false, turn: 1, wait: 1 } }, { name: "ボルケーノドラゴン", hp: 60000, imageno: 1660, attr: 0, spec: 0, isStrong: false, move: { on_move: [ s_enemy_force_reservoir(), s_enemy_attack(1500, 1, 1, true), s_enemy_attack(750, 1, 1, true) ], atrandom: false, turn: 1, wait: 1 } } ] }, { appearance: [ 5 ], enemy: [ { name: "ボルケーノドラゴン", hp: 150000, imageno: 1660, attr: 0, spec: 0, isStrong: false, move: { on_move: [ s_enemy_force_reservoir(), s_enemy_attack(1500, 1, 1, true), s_enemy_attack(750, 1, 1, true) ], atrandom: false, turn: 1, wait: 1 } }, { name: "雷を司る竜帝 ワイバーン", hp: 900000, imageno: 1498, attr: 2, spec: 0, isStrong: true, move: { on_popup: [ m_enemy_once(skill_counter_func(s_enemy_attack, "-", 100, false, 1200, 5, 5, true)), damage_switch(s_enemy_when_hpdown(0.5), m_enemy_angry(), true) ], on_move: [ s_enemy_force_reservoir(), s_enemy_attack(3000, 5, 1, true) ], on_angry: [ s_enemy_attr_weaken([1,1,1,1,1], 1.5, 5, 4)/*!*/ ], on_move_angry: [ s_enemy_attack(1500, 5, 1, true) ], atrandom: false, turn: 1, wait: 1 } }, { name: "トパーズドラゴン", hp: 450000, imageno: 1670, attr: 2, spec: 0, isStrong: false, move: { on_popup: [ m_enemy_once(skill_counter_func(s_enemy_as_sealed, "-", 100, false, 5, 4)) ], on_move: [ s_enemy_attack(1500, 5, 1, true), s_enemy_as_sealed(5, 3), s_enemy_attack(1500, 5, 1, true) ], atrandom: false, turn: 1, wait: 1 } } ] } ] }<|fim▁end|>
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 from linker import Linker<|fim▁hole|># TODO put into config spbBudgetXlsPath='../spb-budget-xls' if __name__=='__main__': linker=Linker('filelists',{ 'csv':['csv'], 'xls':['xls'], 'db':['zip','sql','xlsx'], }) htmlPage.HtmlPage('index.html','Данные бюджета Санкт-Петербурга',content.index.content,linker).write('output/index.html') htmlPage.HtmlPage('xls.html','Ведомственная структура расходов бюджета Санкт-Петербурга в csv и xls',htmlPage.importContent(spbBudgetXlsPath+'/index.html'),linker).write('output/xls.html') htmlPage.HtmlPage('db.html','БД и таблицы расходов бюджета Санкт-Петербурга из разных источников',content.db.content,linker).write('output/db.html') htmlPage.HtmlPage('fincom.html','Что можно найти на сайте Комитета финансов',content.fincom.content,linker).write('output/fincom.html')<|fim▁end|>
import htmlPage import content.index,content.db,content.fincom
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>#![cfg_attr(feature = "clippy", feature(plugin))] #![cfg_attr(feature = "clippy", plugin(clippy))] #[macro_use] extern crate clap; #[macro_use] extern crate gfx; extern crate gfx_window_glutin; extern crate glutin; extern crate image; #[macro_use] extern crate log; extern crate env_logger; extern crate failure;<|fim▁hole|>extern crate notify; extern crate reqwest; extern crate serde_json; #[macro_use] extern crate failure_derive; extern crate old_school_gfx_glutin_ext; mod argvalues; mod download; mod error; mod loader; mod runner; use argvalues::ArgValues; fn main() { env_logger::init().expect("Unable to initialize logger"); if let Err(e) = ArgValues::from_cli().and_then(|av| runner::run(av)) { error!("{}", e); } }<|fim▁end|>
<|file_name|>testFarray.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python from __future__ import division, absolute_import, print_function # System imports from distutils.util import get_platform import os import sys import unittest # Import NumPy import numpy as np major, minor = [ int(d) for d in np.__version__.split(".")[:2] ] if major == 0: BadListError = TypeError else: BadListError = ValueError # Add the distutils-generated build directory to the python search path and then # import the extension module libDir = "lib.%s-%s" % (get_platform(), sys.version[:3]) sys.path.insert(0, os.path.join("build", libDir)) import Farray ###################################################################### class FarrayTestCase(unittest.TestCase): def setUp(self): self.nrows = 5 self.ncols = 4 self.array = Farray.Farray(self.nrows, self.ncols) def testConstructor1(self): "Test Farray size constructor"<|fim▁hole|> def testConstructor2(self): "Test Farray copy constructor" for i in range(self.nrows): for j in range(self.ncols): self.array[i, j] = i + j arrayCopy = Farray.Farray(self.array) self.failUnless(arrayCopy == self.array) def testConstructorBad1(self): "Test Farray size constructor, negative nrows" self.assertRaises(ValueError, Farray.Farray, -4, 4) def testConstructorBad2(self): "Test Farray size constructor, negative ncols" self.assertRaises(ValueError, Farray.Farray, 4, -4) def testNrows(self): "Test Farray nrows method" self.failUnless(self.array.nrows() == self.nrows) def testNcols(self): "Test Farray ncols method" self.failUnless(self.array.ncols() == self.ncols) def testLen(self): "Test Farray __len__ method" self.failUnless(len(self.array) == self.nrows*self.ncols) def testSetGet(self): "Test Farray __setitem__, __getitem__ methods" m = self.nrows n = self.ncols for i in range(m): for j in range(n): self.array[i, j] = i*j for i in range(m): for j in range(n): self.failUnless(self.array[i, j] == i*j) def testSetBad1(self): "Test Farray __setitem__ method, negative row" self.assertRaises(IndexError, self.array.__setitem__, (-1, 3), 0) def testSetBad2(self): "Test Farray __setitem__ method, negative col" self.assertRaises(IndexError, self.array.__setitem__, (1, -3), 0) def testSetBad3(self): "Test Farray __setitem__ method, out-of-range row" self.assertRaises(IndexError, self.array.__setitem__, (self.nrows+1, 0), 0) def testSetBad4(self): "Test Farray __setitem__ method, out-of-range col" self.assertRaises(IndexError, self.array.__setitem__, (0, self.ncols+1), 0) def testGetBad1(self): "Test Farray __getitem__ method, negative row" self.assertRaises(IndexError, self.array.__getitem__, (-1, 3)) def testGetBad2(self): "Test Farray __getitem__ method, negative col" self.assertRaises(IndexError, self.array.__getitem__, (1, -3)) def testGetBad3(self): "Test Farray __getitem__ method, out-of-range row" self.assertRaises(IndexError, self.array.__getitem__, (self.nrows+1, 0)) def testGetBad4(self): "Test Farray __getitem__ method, out-of-range col" self.assertRaises(IndexError, self.array.__getitem__, (0, self.ncols+1)) def testAsString(self): "Test Farray asString method" result = """\ [ [ 0, 1, 2, 3 ], [ 1, 2, 3, 4 ], [ 2, 3, 4, 5 ], [ 3, 4, 5, 6 ], [ 4, 5, 6, 7 ] ] """ for i in range(self.nrows): for j in range(self.ncols): self.array[i, j] = i+j self.failUnless(self.array.asString() == result) def testStr(self): "Test Farray __str__ method" result = """\ [ [ 0, -1, -2, -3 ], [ 1, 0, -1, -2 ], [ 2, 1, 0, -1 ], [ 3, 2, 1, 0 ], [ 4, 3, 2, 1 ] ] """ for i in range(self.nrows): for j in range(self.ncols): self.array[i, j] = i-j self.failUnless(str(self.array) == result) def testView(self): "Test Farray view method" for i in range(self.nrows): for j in range(self.ncols): self.array[i, j] = i+j a = self.array.view() self.failUnless(isinstance(a, np.ndarray)) self.failUnless(a.flags.f_contiguous) for i in range(self.nrows): for j in range(self.ncols): self.failUnless(a[i, j] == i+j) ###################################################################### if __name__ == "__main__": # Build the test suite suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(FarrayTestCase)) # Execute the test suite print("Testing Classes of Module Farray") print("NumPy version", np.__version__) print() result = unittest.TextTestRunner(verbosity=2).run(suite) sys.exit(bool(result.errors + result.failures))<|fim▁end|>
self.failUnless(isinstance(self.array, Farray.Farray))
<|file_name|>part2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """Docstring.""" import re from functools import reduce from collections import Counter from common import get_input class SSLTester: """.""" def __init__(self, input_list=[]): """Initialize.""" self.input_list = input_list def find_aba(self, seq): """Return list: all 'aba's found in the string <seq>.""" i = 0 abas = [] while i < len(seq) - 2: if (seq[i] == seq[i + 2] and seq[i] != seq[i + 1]): abas.append(seq[i:i + 3]) i += 1 return abas def batch_find_aba(self, seqs): """Return list: all 'aba's found from all strings in <seqs>.""" return reduce(lambda x, y: x + y, [self.find_aba(x) for x in seqs], []) def has_corresponding_bab(self, input_key, seqs): """Return bool: <key> exists for any string in <seqs>.""" key = input_key[1::] + input_key[1] return True in [key in x for x in seqs] def ip_supports_ssl(self, ip_string): """Return bool: <ip_string> supports SSL. True: some string in <unbracketed> contains 'aba' pattern AND some string in <bracketed> contains matching 'bab' pattern """ all_segments = re.split(r"\[(\w+)\]", ip_string) bracketed = re.findall(r"\[(\w+)\]", ip_string) unbracketed = [x for x in all_segments if x not in bracketed] return True in [self.has_corresponding_bab(x, bracketed) for x in self.batch_find_aba(unbracketed)] def count_supported_ips(self, input_list=None):<|fim▁hole|>if __name__ == "__main__": tester = SSLTester(get_input()) print("Out of {} IPs, {} support SSL.".format( len(tester.input_list), tester.count_supported_ips() ))<|fim▁end|>
"""Return int: number of items in <input_list> which support SSL.""" input_list = input_list if input_list else self.input_list return Counter([self.ip_supports_ssl(x) for x in input_list])[True]
<|file_name|>Resources.hpp<|end_file_name|><|fim▁begin|>#include <cpp3ds/Config.hpp> #include <map> namespace cpp3ds { namespace priv { struct ResourceInfo { ResourceInfo() : data(nullptr), size(0) { } ResourceInfo(const Uint8 *x, const Uint32 y) : data(x), size(y) { }<|fim▁hole|> const Uint32 size; }; // Defined by source file generated by res_compile.py extern std::map<std::string, ResourceInfo> core_resources; } // namespace priv } // namespace cpp3ds<|fim▁end|>
const Uint8 *data;
<|file_name|>tabs.js<|end_file_name|><|fim▁begin|>/** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Academic Free License (AFL 3.0) * that is bundled with this package in the file LICENSE_AFL.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/afl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magentocommerce.com for more information. * * @copyright Copyright (c) 2008 Irubin Consulting Inc. DBA Varien (http://www.varien.com) * @license http://opensource.org/licenses/afl-3.0.php Academic Free License (AFL 3.0) */ var varienTabs = new Class.create(); varienTabs.prototype = { initialize : function(containerId, destElementId, activeTabId, shadowTabs){ this.containerId = containerId; this.destElementId = destElementId; this.activeTab = null; this.tabOnClick = this.tabMouseClick.bindAsEventListener(this); this.tabs = $$('#'+this.containerId+' li a.tab-item-link'); this.hideAllTabsContent(); for (var tab=0; tab<this.tabs.length; tab++) { Event.observe(this.tabs[tab],'click',this.tabOnClick); // move tab contents to destination element if($(this.destElementId)){ var tabContentElement = $(this.getTabContentElementId(this.tabs[tab])); if(tabContentElement && tabContentElement.parentNode.id != this.destElementId){ $(this.destElementId).appendChild(tabContentElement); tabContentElement.container = this; tabContentElement.statusBar = this.tabs[tab]; tabContentElement.tabObject = this.tabs[tab]; this.tabs[tab].contentMoved = true; this.tabs[tab].container = this; this.tabs[tab].show = function(){ this.container.showTabContent(this); } if(varienGlobalEvents){ varienGlobalEvents.fireEvent('moveTab', {tab:this.tabs[tab]}); } } } /* // this code is pretty slow in IE, so lets do it in tabs*.phtml // mark ajax tabs as not loaded if (Element.hasClassName($(this.tabs[tab].id), 'ajax')) { Element.addClassName($(this.tabs[tab].id), 'notloaded'); } */ // bind shadow tabs if (this.tabs[tab].id && shadowTabs && shadowTabs[this.tabs[tab].id]) { this.tabs[tab].shadowTabs = shadowTabs[this.tabs[tab].id]; } } this.displayFirst = activeTabId; Event.observe(window,'load',this.moveTabContentInDest.bind(this)); }, setSkipDisplayFirstTab : function(){ this.displayFirst = null; }, moveTabContentInDest : function(){ for(var tab=0; tab<this.tabs.length; tab++){ if($(this.destElementId) && !this.tabs[tab].contentMoved){ var tabContentElement = $(this.getTabContentElementId(this.tabs[tab])); if(tabContentElement && tabContentElement.parentNode.id != this.destElementId){ $(this.destElementId).appendChild(tabContentElement); tabContentElement.container = this; tabContentElement.statusBar = this.tabs[tab]; tabContentElement.tabObject = this.tabs[tab]; this.tabs[tab].container = this; this.tabs[tab].show = function(){ this.container.showTabContent(this); } if(varienGlobalEvents){ varienGlobalEvents.fireEvent('moveTab', {tab:this.tabs[tab]}); } } } } if (this.displayFirst) { this.showTabContent($(this.displayFirst)); this.displayFirst = null; } }, getTabContentElementId : function(tab){ if(tab){ return tab.id+'_content'; } return false; }, tabMouseClick : function(event) { var tab = Event.findElement(event, 'a'); // go directly to specified url or switch tab if ((tab.href.indexOf('#') != tab.href.length-1) && !(Element.hasClassName(tab, 'ajax')) ) { location.href = tab.href; } else { this.showTabContent(tab); } Event.stop(event); }, hideAllTabsContent : function(){ for(var tab in this.tabs){ this.hideTabContent(this.tabs[tab]); } }, // show tab, ready or not showTabContentImmediately : function(tab) { this.hideAllTabsContent(); var tabContentElement = $(this.getTabContentElementId(tab)); if (tabContentElement) { Element.show(tabContentElement); Element.addClassName(tab, 'active'); // load shadow tabs, if any if (tab.shadowTabs && tab.shadowTabs.length) { for (var k in tab.shadowTabs) { this.loadShadowTab($(tab.shadowTabs[k])); } } if (!Element.hasClassName(tab, 'ajax only')) { Element.removeClassName(tab, 'notloaded'); } this.activeTab = tab; } if (varienGlobalEvents) { varienGlobalEvents.fireEvent('showTab', {tab:tab}); } }, // the lazy show tab method showTabContent : function(tab) { var tabContentElement = $(this.getTabContentElementId(tab)); if (tabContentElement) { if (this.activeTab != tab) { if (varienGlobalEvents) { if (varienGlobalEvents.fireEvent('tabChangeBefore', $(this.getTabContentElementId(this.activeTab))).indexOf('cannotchange') != -1) { return; }; } } // wait for ajax request, if defined var isAjax = Element.hasClassName(tab, 'ajax'); var isEmpty = tabContentElement.innerHTML=='' && tab.href.indexOf('#')!=tab.href.length-1; var isNotLoaded = Element.hasClassName(tab, 'notloaded'); if ( isAjax && (isEmpty || isNotLoaded) ) { new Ajax.Request(tab.href, { parameters: {form_key: FORM_KEY}, evalScripts: true, onSuccess: function(transport) { try { if (transport.responseText.isJSON()) { var response = transport.responseText.evalJSON() if (response.error) { alert(response.message); } if(response.ajaxExpired && response.ajaxRedirect) { setLocation(response.ajaxRedirect); } } else { $(tabContentElement.id).update(transport.responseText); this.showTabContentImmediately(tab) } } catch (e) { $(tabContentElement.id).update(transport.responseText); this.showTabContentImmediately(tab) } }.bind(this) }); } else { this.showTabContentImmediately(tab); } } }, loadShadowTab : function(tab) { var tabContentElement = $(this.getTabContentElementId(tab)); if (tabContentElement && Element.hasClassName(tab, 'ajax') && Element.hasClassName(tab, 'notloaded')) { new Ajax.Request(tab.href, { parameters: {form_key: FORM_KEY}, evalScripts: true, onSuccess: function(transport) { try { if (transport.responseText.isJSON()) { var response = transport.responseText.evalJSON() if (response.error) { alert(response.message); } if(response.ajaxExpired && response.ajaxRedirect) { setLocation(response.ajaxRedirect);<|fim▁hole|> } } else { $(tabContentElement.id).update(transport.responseText); if (!Element.hasClassName(tab, 'ajax only')) { Element.removeClassName(tab, 'notloaded'); } } } catch (e) { $(tabContentElement.id).update(transport.responseText); if (!Element.hasClassName(tab, 'ajax only')) { Element.removeClassName(tab, 'notloaded'); } } }.bind(this) }); } }, hideTabContent : function(tab){ var tabContentElement = $(this.getTabContentElementId(tab)); if($(this.destElementId) && tabContentElement){ Element.hide(tabContentElement); Element.removeClassName(tab, 'active'); } if(varienGlobalEvents){ varienGlobalEvents.fireEvent('hideTab', {tab:tab}); } } }<|fim▁end|>
<|file_name|>clean_mac_info_plist.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Jonas Schnelli, 2013 # make sure the Nichts-Qt.app contains the right plist (including the right version) # fix made because of serval bugs in Qt mac deployment (https://bugreports.qt-project.org/browse/QTBUG-21267) from string import Template from datetime import date bitcoinDir = "./"; <|fim▁hole|>inFile = bitcoinDir+"/share/qt/Info.plist" outFile = "Nichts-Qt.app/Contents/Info.plist" version = "unknown"; fileForGrabbingVersion = bitcoinDir+"bitcoin-qt.pro" for line in open(fileForGrabbingVersion): lineArr = line.replace(" ", "").split("="); if lineArr[0].startswith("VERSION"): version = lineArr[1].replace("\n", ""); fIn = open(inFile, "r") fileContent = fIn.read() s = Template(fileContent) newFileContent = s.substitute(VERSION=version,YEAR=date.today().year) fOut = open(outFile, "w"); fOut.write(newFileContent); print "Info.plist fresh created"<|fim▁end|>
<|file_name|>__openerp__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # © 2009 Pexego/Comunitea # © 2011-2012 Iker Coranti (www.avanzosc.es) # © 2014 Juanjo Algaz (gutierrezweb.es) # © 2014-2016 Pedro M. Baeza # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl-3.0). { "name": "Account balance reporting engine", "version": "8.0.1.2.0", "author": "Pexego, " "AvanzOSC, " "Tecnativa, " "Odoo Community Association (OCA)", "website": "http://www.pexego.es", "category": "Accounting & Finance", "contributors": [ "Juanjo Algaz <[email protected]>", "Joaquín Gutierrez <[email protected]>", "Pedro M. Baeza <[email protected]>", "Oihane Crucelaegui <[email protected]>", ], "license": 'AGPL-3', "depends": [ "account", ], "data": [ "security/ir.model.access.csv", "views/account_account_view.xml", "views/account_balance_reporting_template_view.xml", "views/account_balance_reporting_report_view.xml",<|fim▁hole|> "wizard/wizard_print_view.xml", ], "installable": True, }<|fim▁end|>
"views/account_balance_reporting_menu.xml", "report/account_balance_reporting_reports.xml", "report/report_generic.xml",
<|file_name|>test_iptables_firewall.py<|end_file_name|><|fim▁begin|># Copyright 2012, Nachi Ueno, NTT MCL, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy import mock from neutron_lib import constants from oslo_config import cfg import six import testtools from neutron.agent.common import config as a_cfg from neutron.agent import firewall from neutron.agent.linux import ipset_manager from neutron.agent.linux import iptables_comments as ic from neutron.agent.linux import iptables_firewall from neutron.common import exceptions as n_exc from neutron.common import utils from neutron.conf.agent import securitygroups_rpc as security_config from neutron.tests import base from neutron.tests.unit.api.v2 import test_base _uuid = test_base._uuid #TODO(mangelajo): replace all 'IPv4', 'IPv6' to constants FAKE_PREFIX = {'IPv4': '10.0.0.0/24', 'IPv6': 'fe80::/48'} FAKE_IP = {'IPv4': '10.0.0.1', 'IPv6': 'fe80::1'} #TODO(mangelajo): replace all '*_sgid' strings for the constants FAKE_SGID = 'fake_sgid' OTHER_SGID = 'other_sgid' _IPv6 = constants.IPv6 _IPv4 = constants.IPv4 RAW_TABLE_OUTPUT = """ # Generated by iptables-save v1.4.21 on Fri Jul 31 16:13:28 2015 *raw :PREROUTING ACCEPT [11561:3470468] :OUTPUT ACCEPT [11504:4064044] :neutron-openvswi-OUTPUT - [0:0] :neutron-openvswi-PREROUTING - [0:0] -A PREROUTING -j neutron-openvswi-PREROUTING -A OUTPUT -j neutron-openvswi-OUTPUT -A neutron-openvswi-PREROUTING -m physdev --physdev-in qvbe804433b-61 -j CT --zone 1 -A neutron-openvswi-PREROUTING -m physdev --physdev-in tape804433b-61 -j CT --zone 1 -A neutron-openvswi-PREROUTING -m physdev --physdev-in qvb95c24827-02 -j CT --zone 2 -A neutron-openvswi-PREROUTING -m physdev --physdev-in tap95c24827-02 -j CT --zone 2 -A neutron-openvswi-PREROUTING -m physdev --physdev-in qvb61634509-31 -j CT --zone 2 -A neutron-openvswi-PREROUTING -m physdev --physdev-in tap61634509-31 -j CT --zone 2 -A neutron-openvswi-PREROUTING -m physdev --physdev-in qvb8f46cf18-12 -j CT --zone 9 -A neutron-openvswi-PREROUTING -m physdev --physdev-in tap8f46cf18-12 -j CT --zone 9 COMMIT # Completed on Fri Jul 31 16:13:28 2015 """ # noqa class BaseIptablesFirewallTestCase(base.BaseTestCase): def setUp(self): super(BaseIptablesFirewallTestCase, self).setUp() cfg.CONF.register_opts(a_cfg.ROOT_HELPER_OPTS, 'AGENT') security_config.register_securitygroups_opts() cfg.CONF.set_override('comment_iptables_rules', False, 'AGENT') self.utils_exec_p = mock.patch( 'neutron.agent.linux.utils.execute') self.utils_exec = self.utils_exec_p.start() self.iptables_cls_p = mock.patch( 'neutron.agent.linux.iptables_manager.IptablesManager') iptables_cls = self.iptables_cls_p.start() self.iptables_inst = mock.Mock() self.v4filter_inst = mock.Mock() self.v6filter_inst = mock.Mock() self.iptables_inst.ipv4 = {'filter': self.v4filter_inst, 'raw': self.v4filter_inst } self.iptables_inst.ipv6 = {'filter': self.v6filter_inst, 'raw': self.v6filter_inst } iptables_cls.return_value = self.iptables_inst self.iptables_inst.get_rules_for_table.return_value = ( RAW_TABLE_OUTPUT.splitlines()) self.firewall = iptables_firewall.IptablesFirewallDriver() self.firewall.iptables = self.iptables_inst class IptablesFirewallTestCase(BaseIptablesFirewallTestCase): def _fake_port(self): return {'device': 'tapfake_dev', 'mac_address': 'ff:ff:ff:ff:ff:ff', 'network_id': 'fake_net', 'fixed_ips': [FAKE_IP['IPv4'], FAKE_IP['IPv6']]} def test_prepare_port_filter_with_no_sg(self): port = self._fake_port() self.firewall.prepare_port_filter(port) calls = [mock.call.add_chain('sg-fallback'), mock.call.add_rule( 'sg-fallback', '-j DROP', comment=ic.UNMATCH_DROP), mock.call.remove_chain('sg-chain'), mock.call.add_chain('sg-chain'), mock.call.add_chain('ifake_dev'), mock.call.add_rule('FORWARD', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged ' '-j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule('sg-chain', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged ' '-j $ifake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule( 'ifake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule( 'ifake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule( 'ifake_dev', '-j $sg-fallback', comment=None), mock.call.add_chain('ofake_dev'), mock.call.add_rule('FORWARD', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged ' '-j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule('sg-chain', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule('INPUT', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.INPUT_TO_SG), mock.call.add_chain('sfake_dev'), mock.call.add_rule( 'sfake_dev', '-s 10.0.0.1/32 -m mac --mac-source FF:FF:FF:FF:FF:FF ' '-j RETURN', comment=ic.PAIR_ALLOW), mock.call.add_rule( 'sfake_dev', '-j DROP', comment=ic.PAIR_DROP), mock.call.add_rule( 'ofake_dev', '-s 0.0.0.0/32 -d 255.255.255.255/32 -p udp -m udp ' '--sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule('ofake_dev', '-j $sfake_dev', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 67 -m udp --dport 68 -j DROP', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule( 'ofake_dev', '-j $sg-fallback', comment=None), mock.call.add_rule('sg-chain', '-j ACCEPT')] self.v4filter_inst.assert_has_calls(calls) def test_filter_ipv4_ingress(self): rule = {'ethertype': 'IPv4', 'direction': 'ingress'} ingress = mock.call.add_rule('ifake_dev', '-j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'source_ip_prefix': prefix} ingress = mock.call.add_rule( 'ifake_dev', '-s %s -j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_tcp(self): rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'tcp'} ingress = mock.call.add_rule( 'ifake_dev', '-p tcp -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_tcp_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'tcp', 'source_ip_prefix': prefix} ingress = mock.call.add_rule('ifake_dev', '-s %s -p tcp -j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_icmp(self): rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'icmp'} ingress = mock.call.add_rule('ifake_dev', '-p icmp -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_icmp_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'icmp', 'source_ip_prefix': prefix} ingress = mock.call.add_rule( 'ifake_dev', '-s %s -p icmp -j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_tcp_port(self): rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 10} ingress = mock.call.add_rule('ifake_dev', '-p tcp -m tcp --dport 10 -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_tcp_mport(self): rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 100} ingress = mock.call.add_rule( 'ifake_dev', '-p tcp -m tcp -m multiport --dports 10:100 -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_tcp_mport_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 100, 'source_ip_prefix': prefix} ingress = mock.call.add_rule( 'ifake_dev', '-s %s -p tcp -m tcp -m multiport --dports 10:100 ' '-j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_udp(self): rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'udp'} ingress = mock.call.add_rule( 'ifake_dev', '-p udp -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_udp_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'udp', 'source_ip_prefix': prefix} ingress = mock.call.add_rule('ifake_dev', '-s %s -p udp -j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_udp_port(self): rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 10} ingress = mock.call.add_rule('ifake_dev', '-p udp -m udp --dport 10 -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_udp_mport(self): rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 100} ingress = mock.call.add_rule( 'ifake_dev', '-p udp -m udp -m multiport --dports 10:100 -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_ingress_udp_mport_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'ingress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 100, 'source_ip_prefix': prefix} ingress = mock.call.add_rule( 'ifake_dev', '-s %s -p udp -m udp -m multiport --dports 10:100 ' '-j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress(self): rule = {'ethertype': 'IPv4', 'direction': 'egress'} egress = mock.call.add_rule('ofake_dev', '-j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'egress', 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_tcp(self): rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'tcp'} egress = mock.call.add_rule( 'ofake_dev', '-p tcp -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_tcp_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'tcp', 'source_ip_prefix': prefix} egress = mock.call.add_rule('ofake_dev', '-s %s -p tcp -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_icmp(self): rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'icmp'} egress = mock.call.add_rule('ofake_dev', '-p icmp -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_icmp_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'icmp', 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p icmp -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_icmp_type(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'icmp', 'source_port_range_min': 8, 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p icmp -m icmp --icmp-type 8 -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_icmp_type_name(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'icmp', 'source_port_range_min': 'echo-request', 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p icmp -m icmp --icmp-type echo-request ' '-j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_icmp_type_code(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'icmp', 'source_port_range_min': 8, 'source_port_range_max': 0, 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p icmp -m icmp --icmp-type 8/0 -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_tcp_port(self): rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 10} egress = mock.call.add_rule('ofake_dev', '-p tcp -m tcp --dport 10 -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_tcp_mport(self): rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 100} egress = mock.call.add_rule( 'ofake_dev', '-p tcp -m tcp -m multiport --dports 10:100 -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_tcp_mport_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 100, 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p tcp -m tcp -m multiport --dports 10:100 ' '-j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_udp(self): rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'udp'} egress = mock.call.add_rule( 'ofake_dev', '-p udp -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_udp_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'udp', 'source_ip_prefix': prefix} egress = mock.call.add_rule('ofake_dev', '-s %s -p udp -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_udp_port(self): rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 10} egress = mock.call.add_rule('ofake_dev', '-p udp -m udp --dport 10 -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_udp_mport(self): rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 100} egress = mock.call.add_rule( 'ofake_dev', '-p udp -m udp -m multiport --dports 10:100 -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv4_egress_udp_mport_prefix(self): prefix = FAKE_PREFIX['IPv4'] rule = {'ethertype': 'IPv4', 'direction': 'egress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 100, 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p udp -m udp -m multiport --dports 10:100 ' '-j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress(self): rule = {'ethertype': 'IPv6', 'direction': 'ingress'} ingress = mock.call.add_rule('ifake_dev', '-j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'source_ip_prefix': prefix} ingress = mock.call.add_rule( 'ifake_dev', '-s %s -j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_tcp(self): rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'tcp'} ingress = mock.call.add_rule( 'ifake_dev', '-p tcp -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_tcp_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'tcp', 'source_ip_prefix': prefix} ingress = mock.call.add_rule('ifake_dev', '-s %s -p tcp -j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_tcp_port(self): rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 10} ingress = mock.call.add_rule('ifake_dev', '-p tcp -m tcp --dport 10 -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_icmp(self): rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'icmp'} ingress = mock.call.add_rule( 'ifake_dev', '-p ipv6-icmp -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_icmp_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'icmp', 'source_ip_prefix': prefix} ingress = mock.call.add_rule( 'ifake_dev', '-s %s -p ipv6-icmp -j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_tcp_mport(self): rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 100} ingress = mock.call.add_rule( 'ifake_dev', '-p tcp -m tcp -m multiport --dports 10:100 -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def _test_filter_ingress_tcp_min_port_0(self, ethertype): rule = {'ethertype': ethertype, 'direction': 'ingress', 'protocol': 'tcp', 'port_range_min': 0, 'port_range_max': 100} ingress = mock.call.add_rule( 'ifake_dev', '-p tcp -m tcp -m multiport --dports 0:100 -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ingress_tcp_min_port_0_for_ipv4(self): self._test_filter_ingress_tcp_min_port_0('IPv4') def test_filter_ingress_tcp_min_port_0_for_ipv6(self): self._test_filter_ingress_tcp_min_port_0('IPv6') def test_filter_ipv6_ingress_tcp_mport_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 100, 'source_ip_prefix': prefix} ingress = mock.call.add_rule( 'ifake_dev', '-s %s -p tcp -m tcp -m multiport --dports 10:100 ' '-j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_udp(self): rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'udp'} ingress = mock.call.add_rule( 'ifake_dev', '-p udp -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_udp_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'udp', 'source_ip_prefix': prefix} ingress = mock.call.add_rule('ifake_dev', '-s %s -p udp -j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_udp_port(self): rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 10} ingress = mock.call.add_rule('ifake_dev', '-p udp -m udp --dport 10 -j RETURN', comment=None)<|fim▁hole|> def test_filter_ipv6_ingress_udp_mport(self): rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 100} ingress = mock.call.add_rule( 'ifake_dev', '-p udp -m udp -m multiport --dports 10:100 -j RETURN', comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_ingress_udp_mport_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'ingress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 100, 'source_ip_prefix': prefix} ingress = mock.call.add_rule( 'ifake_dev', '-s %s -p udp -m udp -m multiport --dports 10:100 ' '-j RETURN' % prefix, comment=None) egress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress(self): rule = {'ethertype': 'IPv6', 'direction': 'egress'} egress = mock.call.add_rule('ofake_dev', '-j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'egress', 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_tcp(self): rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'tcp'} egress = mock.call.add_rule( 'ofake_dev', '-p tcp -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_tcp_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'tcp', 'source_ip_prefix': prefix} egress = mock.call.add_rule('ofake_dev', '-s %s -p tcp -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_icmp(self): rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'icmp'} egress = mock.call.add_rule( 'ofake_dev', '-p ipv6-icmp -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_icmp_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'icmp', 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p ipv6-icmp -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_icmp_type(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'icmp', 'source_port_range_min': 8, 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p ipv6-icmp -m icmp6 --icmpv6-type 8 -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_icmp_type_name(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'icmp', 'source_port_range_min': 'echo-request', 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p ipv6-icmp -m icmp6 --icmpv6-type echo-request ' '-j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_icmp_type_code(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'icmp', 'source_port_range_min': 8, 'source_port_range_max': 0, 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p ipv6-icmp -m icmp6 --icmpv6-type 8/0 -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_tcp_port(self): rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 10} egress = mock.call.add_rule('ofake_dev', '-p tcp -m tcp --dport 10 -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_tcp_mport(self): rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 100} egress = mock.call.add_rule( 'ofake_dev', '-p tcp -m tcp -m multiport --dports 10:100 -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_tcp_mport_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'tcp', 'port_range_min': 10, 'port_range_max': 100, 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p tcp -m tcp -m multiport --dports 10:100 ' '-j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_udp(self): rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'udp'} egress = mock.call.add_rule( 'ofake_dev', '-p udp -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_udp_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'udp', 'source_ip_prefix': prefix} egress = mock.call.add_rule('ofake_dev', '-s %s -p udp -j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_udp_port(self): rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 10} egress = mock.call.add_rule('ofake_dev', '-p udp -m udp --dport 10 -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_udp_mport(self): rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 100} egress = mock.call.add_rule( 'ofake_dev', '-p udp -m udp -m multiport --dports 10:100 -j RETURN', comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def test_filter_ipv6_egress_udp_mport_prefix(self): prefix = FAKE_PREFIX['IPv6'] rule = {'ethertype': 'IPv6', 'direction': 'egress', 'protocol': 'udp', 'port_range_min': 10, 'port_range_max': 100, 'source_ip_prefix': prefix} egress = mock.call.add_rule( 'ofake_dev', '-s %s -p udp -m udp -m multiport --dports 10:100 ' '-j RETURN' % prefix, comment=None) ingress = None self._test_prepare_port_filter(rule, ingress, egress) def _test_prepare_port_filter(self, rule, ingress_expected_call=None, egress_expected_call=None): port = self._fake_port() ethertype = rule['ethertype'] prefix = utils.ip_to_cidr(FAKE_IP[ethertype]) filter_inst = self.v4filter_inst dhcp_rule = [mock.call.add_rule( 'ofake_dev', '-s 0.0.0.0/32 -d 255.255.255.255/32 -p udp -m udp ' '--sport 68 --dport 67 -j RETURN', comment=None)] if ethertype == 'IPv6': filter_inst = self.v6filter_inst dhcp_rule = [mock.call.add_rule('ofake_dev', '-s ::/128 -d ff02::/16 ' '-p ipv6-icmp -m icmp6 ' '--icmpv6-type %s -j RETURN' % icmp6_type, comment=None) for icmp6_type in constants.ICMPV6_ALLOWED_UNSPEC_ADDR_TYPES] sg = [rule] port['security_group_rules'] = sg self.firewall.prepare_port_filter(port) calls = [mock.call.add_chain('sg-fallback'), mock.call.add_rule( 'sg-fallback', '-j DROP', comment=ic.UNMATCH_DROP), mock.call.remove_chain('sg-chain'), mock.call.add_chain('sg-chain'), mock.call.add_chain('ifake_dev'), mock.call.add_rule('FORWARD', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged ' '-j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule('sg-chain', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged ' '-j $ifake_dev', comment=ic.SG_TO_VM_SG) ] if ethertype == 'IPv6': for icmp6_type in firewall.ICMPV6_ALLOWED_TYPES: calls.append( mock.call.add_rule('ifake_dev', '-p ipv6-icmp -m icmp6 --icmpv6-type ' '%s -j RETURN' % icmp6_type, comment=None)) calls += [ mock.call.add_rule( 'ifake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None ) ] if ingress_expected_call: calls.append(ingress_expected_call) calls += [mock.call.add_rule( 'ifake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule('ifake_dev', '-j $sg-fallback', comment=None), mock.call.add_chain('ofake_dev'), mock.call.add_rule('FORWARD', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged ' '-j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule('sg-chain', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule('INPUT', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.INPUT_TO_SG), mock.call.add_chain('sfake_dev'), mock.call.add_rule( 'sfake_dev', '-s %s -m mac --mac-source FF:FF:FF:FF:FF:FF -j RETURN' % prefix, comment=ic.PAIR_ALLOW)] if ethertype == 'IPv6': calls.append(mock.call.add_rule('sfake_dev', '-s fe80::fdff:ffff:feff:ffff/128 -m mac ' '--mac-source FF:FF:FF:FF:FF:FF -j RETURN', comment=ic.PAIR_ALLOW)) calls.append(mock.call.add_rule('sfake_dev', '-j DROP', comment=ic.PAIR_DROP)) calls += dhcp_rule calls.append(mock.call.add_rule('ofake_dev', '-j $sfake_dev', comment=None)) if ethertype == 'IPv4': calls.append(mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 68 --dport 67 -j RETURN', comment=None)) calls.append(mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 67 -m udp --dport 68 -j DROP', comment=None)) if ethertype == 'IPv6': calls.append(mock.call.add_rule('ofake_dev', '-p ipv6-icmp -m icmp6 ' '--icmpv6-type %s -j DROP' % constants.ICMPV6_TYPE_RA, comment=None)) calls.append(mock.call.add_rule('ofake_dev', '-p ipv6-icmp -j RETURN', comment=None)) calls.append(mock.call.add_rule('ofake_dev', '-p udp -m udp ' '--sport 546 -m udp --dport 547 ' '-j RETURN', comment=None)) calls.append(mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 547 -m udp --dport 546 -j DROP', comment=None)) calls += [ mock.call.add_rule( 'ofake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), ] if egress_expected_call: calls.append(egress_expected_call) calls += [mock.call.add_rule( 'ofake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule('ofake_dev', '-j $sg-fallback', comment=None), mock.call.add_rule('sg-chain', '-j ACCEPT')] comb = zip(calls, filter_inst.mock_calls) for (l, r) in comb: self.assertEqual(l, r) filter_inst.assert_has_calls(calls) def _test_remove_conntrack_entries(self, ethertype, protocol, direction): port = self._fake_port() port['security_groups'] = 'fake_sg_id' self.firewall.filtered_ports[port['device']] = port self.firewall.updated_rule_sg_ids = set(['fake_sg_id']) self.firewall.sg_rules['fake_sg_id'] = [ {'direction': direction, 'ethertype': ethertype, 'protocol': protocol}] self.firewall.filter_defer_apply_on() self.firewall.sg_rules['fake_sg_id'] = [] self.firewall.filter_defer_apply_off() cmd = ['conntrack', '-D'] if protocol: cmd.extend(['-p', protocol]) if ethertype == 'IPv4': cmd.extend(['-f', 'ipv4']) if direction == 'ingress': cmd.extend(['-d', '10.0.0.1']) else: cmd.extend(['-s', '10.0.0.1']) else: cmd.extend(['-f', 'ipv6']) if direction == 'ingress': cmd.extend(['-d', 'fe80::1']) else: cmd.extend(['-s', 'fe80::1']) # initial data has 1, 2, and 9 in use, CT zone will start at 10. cmd.extend(['-w', 10]) calls = [ mock.call(cmd, run_as_root=True, check_exit_code=True, extra_ok_codes=[1])] self.utils_exec.assert_has_calls(calls) def test_remove_conntrack_entries_for_delete_rule_ipv4(self): for direction in ['ingress', 'egress']: for pro in [None, 'tcp', 'icmp', 'udp']: self._test_remove_conntrack_entries( 'IPv4', pro, direction) def test_remove_conntrack_entries_for_delete_rule_ipv6(self): for direction in ['ingress', 'egress']: for pro in [None, 'tcp', 'icmp', 'udp']: self._test_remove_conntrack_entries( 'IPv6', pro, direction) def test_remove_conntrack_entries_for_port_sec_group_change(self): port = self._fake_port() port['security_groups'] = ['fake_sg_id'] self.firewall.filtered_ports[port['device']] = port self.firewall.updated_sg_members = set(['tapfake_dev']) self.firewall.filter_defer_apply_on() new_port = copy.deepcopy(port) new_port['security_groups'] = ['fake_sg_id2'] self.firewall.filtered_ports[port['device']] = new_port self.firewall.filter_defer_apply_off() calls = [ # initial data has 1, 2, and 9 in use, CT zone will start at 10. mock.call(['conntrack', '-D', '-f', 'ipv4', '-d', '10.0.0.1', '-w', 10], run_as_root=True, check_exit_code=True, extra_ok_codes=[1]), mock.call(['conntrack', '-D', '-f', 'ipv4', '-s', '10.0.0.1', '-w', 10], run_as_root=True, check_exit_code=True, extra_ok_codes=[1]), mock.call(['conntrack', '-D', '-f', 'ipv6', '-d', 'fe80::1', '-w', 10], run_as_root=True, check_exit_code=True, extra_ok_codes=[1]), mock.call(['conntrack', '-D', '-f', 'ipv6', '-s', 'fe80::1', '-w', 10], run_as_root=True, check_exit_code=True, extra_ok_codes=[1])] self.utils_exec.assert_has_calls(calls) def test_remove_conntrack_entries_for_sg_member_changed_ipv4(self): for direction in ['ingress', 'egress']: for protocol in [None, 'tcp', 'icmp', 'udp']: self._test_remove_conntrack_entries_sg_member_changed( 'IPv4', protocol, direction) def test_remove_conntrack_entries_for_sg_member_changed_ipv6(self): for direction in ['ingress', 'egress']: for protocol in [None, 'tcp', 'icmp', 'udp']: self._test_remove_conntrack_entries_sg_member_changed( 'IPv6', protocol, direction) def _test_remove_conntrack_entries_sg_member_changed(self, ethertype, protocol, direction): port = self._fake_port() port['security_groups'] = ['fake_sg_id'] self.firewall.sg_rules.setdefault('fake_sg_id', []) self.firewall.sg_rules['fake_sg_id'].append( {'direction': direction, 'remote_group_id': 'fake_sg_id2', 'ethertype': ethertype}) self.firewall.filter_defer_apply_on() self.firewall.devices_with_updated_sg_members['fake_sg_id2'] = [port] if ethertype == "IPv4": self.firewall.pre_sg_members = {'fake_sg_id2': { 'IPv4': ['10.0.0.2', '10.0.0.3']}} self.firewall.sg_members = {'fake_sg_id2': { 'IPv4': ['10.0.0.3']}} ethertype = "ipv4" else: self.firewall.pre_sg_members = {'fake_sg_id2': { 'IPv6': ['fe80::2', 'fe80::3']}} self.firewall.sg_members = {'fake_sg_id2': { 'IPv6': ['fe80::3']}} ethertype = "ipv6" self.firewall.filter_defer_apply_off() direction = '-d' if direction == 'ingress' else '-s' remote_ip_direction = '-s' if direction == '-d' else '-d' ips = {"ipv4": ['10.0.0.1', '10.0.0.2'], "ipv6": ['fe80::1', 'fe80::2']} calls = [ # initial data has 1, 2, and 9 in use, CT zone will start # at 10. mock.call(['conntrack', '-D', '-f', ethertype, direction, ips[ethertype][0], '-w', 10, remote_ip_direction, ips[ethertype][1]], run_as_root=True, check_exit_code=True, extra_ok_codes=[1])] self.utils_exec.assert_has_calls(calls) def test_user_sg_rules_deduped_before_call_to_iptables_manager(self): port = self._fake_port() port['security_group_rules'] = [{'ethertype': 'IPv4', 'direction': 'ingress'}] * 2 self.firewall.prepare_port_filter(port) rules = [''.join(c[1]) for c in self.v4filter_inst.add_rule.mock_calls] self.assertEqual(len(set(rules)), len(rules)) def test_update_delete_port_filter(self): port = self._fake_port() port['security_group_rules'] = [{'ethertype': 'IPv4', 'direction': 'ingress'}] self.firewall.prepare_port_filter(port) port['security_group_rules'] = [{'ethertype': 'IPv4', 'direction': 'egress'}] self.firewall.update_port_filter(port) self.firewall.update_port_filter({'device': 'no-exist-device'}) self.firewall.remove_port_filter(port) self.firewall.remove_port_filter({'device': 'no-exist-device'}) calls = [mock.call.add_chain('sg-fallback'), mock.call.add_rule( 'sg-fallback', '-j DROP', comment=ic.UNMATCH_DROP), mock.call.remove_chain('sg-chain'), mock.call.add_chain('sg-chain'), mock.call.add_chain('ifake_dev'), mock.call.add_rule( 'FORWARD', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged -j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule( 'sg-chain', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged -j $ifake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule( 'ifake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule('ifake_dev', '-j RETURN', comment=None), mock.call.add_rule( 'ifake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule( 'ifake_dev', '-j $sg-fallback', comment=None), mock.call.add_chain('ofake_dev'), mock.call.add_rule( 'FORWARD', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule( 'sg-chain', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule( 'INPUT', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.INPUT_TO_SG), mock.call.add_chain('sfake_dev'), mock.call.add_rule( 'sfake_dev', '-s 10.0.0.1/32 -m mac --mac-source FF:FF:FF:FF:FF:FF ' '-j RETURN', comment=ic.PAIR_ALLOW), mock.call.add_rule( 'sfake_dev', '-j DROP', comment=ic.PAIR_DROP), mock.call.add_rule( 'ofake_dev', '-s 0.0.0.0/32 -d 255.255.255.255/32 -p udp -m udp ' '--sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule('ofake_dev', '-j $sfake_dev', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 67 -m udp --dport 68 -j DROP', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule( 'ofake_dev', '-j $sg-fallback', comment=None), mock.call.add_rule('sg-chain', '-j ACCEPT'), mock.call.remove_chain('ifake_dev'), mock.call.remove_chain('ofake_dev'), mock.call.remove_chain('sfake_dev'), mock.call.remove_chain('sg-chain'), mock.call.add_chain('sg-chain'), mock.call.add_chain('ifake_dev'), mock.call.add_rule( 'FORWARD', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged -j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule( 'sg-chain', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged -j $ifake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule( 'ifake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule( 'ifake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule( 'ifake_dev', '-j $sg-fallback', comment=None), mock.call.add_chain('ofake_dev'), mock.call.add_rule( 'FORWARD', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule( 'sg-chain', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule( 'INPUT', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.INPUT_TO_SG), mock.call.add_chain('sfake_dev'), mock.call.add_rule( 'sfake_dev', '-s 10.0.0.1/32 -m mac --mac-source FF:FF:FF:FF:FF:FF ' '-j RETURN', comment=ic.PAIR_ALLOW), mock.call.add_rule( 'sfake_dev', '-j DROP', comment=ic.PAIR_DROP), mock.call.add_rule( 'ofake_dev', '-s 0.0.0.0/32 -d 255.255.255.255/32 -p udp -m udp ' '--sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule('ofake_dev', '-j $sfake_dev', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 67 -m udp --dport 68 -j DROP', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule('ofake_dev', '-j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule('ofake_dev', '-j $sg-fallback', comment=None), mock.call.add_rule('sg-chain', '-j ACCEPT'), mock.call.remove_chain('ifake_dev'), mock.call.remove_chain('ofake_dev'), mock.call.remove_chain('sfake_dev'), mock.call.remove_chain('sg-chain'), mock.call.add_chain('sg-chain')] self.v4filter_inst.assert_has_calls(calls) def test_remove_unknown_port(self): port = self._fake_port() self.firewall.remove_port_filter(port) # checking no exception occurs self.assertFalse(self.v4filter_inst.called) def test_defer_apply(self): with self.firewall.defer_apply(): pass self.iptables_inst.assert_has_calls([mock.call.defer_apply_on(), mock.call.defer_apply_off()]) def test_filter_defer_with_exception(self): try: with self.firewall.defer_apply(): raise Exception("same exception") except Exception: pass self.iptables_inst.assert_has_calls([mock.call.defer_apply_on(), mock.call.defer_apply_off()]) def _mock_chain_applies(self): class CopyingMock(mock.MagicMock): """Copies arguments so mutable arguments can be asserted on. Copied verbatim from unittest.mock documentation. """ def __call__(self, *args, **kwargs): args = copy.deepcopy(args) kwargs = copy.deepcopy(kwargs) return super(CopyingMock, self).__call__(*args, **kwargs) # Need to use CopyingMock because _{setup,remove}_chains_apply are # usually called with that's modified between calls (i.e., # self.firewall.filtered_ports). chain_applies = CopyingMock() self.firewall._setup_chains_apply = chain_applies.setup self.firewall._remove_chains_apply = chain_applies.remove return chain_applies def test_mock_chain_applies(self): chain_applies = self._mock_chain_applies() port_prepare = {'device': 'd1', 'mac_address': 'prepare'} port_update = {'device': 'd1', 'mac_address': 'update'} self.firewall.prepare_port_filter(port_prepare) self.firewall.update_port_filter(port_update) self.firewall.remove_port_filter(port_update) chain_applies.assert_has_calls([mock.call.remove({}, {}), mock.call.setup({'d1': port_prepare}, {}), mock.call.remove({'d1': port_prepare}, {}), mock.call.setup({'d1': port_update}, {}), mock.call.remove({'d1': port_update}, {}), mock.call.setup({}, {})]) def test_defer_chain_apply_need_pre_defer_copy(self): chain_applies = self._mock_chain_applies() port = self._fake_port() device2port = {port['device']: port} self.firewall.prepare_port_filter(port) with self.firewall.defer_apply(): self.firewall.remove_port_filter(port) chain_applies.assert_has_calls([mock.call.remove({}, {}), mock.call.setup(device2port, {}), mock.call.remove(device2port, {}), mock.call.setup({}, {})]) def test_defer_chain_apply_coalesce_simple(self): chain_applies = self._mock_chain_applies() port = self._fake_port() with self.firewall.defer_apply(): self.firewall.prepare_port_filter(port) self.firewall.update_port_filter(port) self.firewall.remove_port_filter(port) chain_applies.assert_has_calls([mock.call.remove({}, {}), mock.call.setup({}, {})]) def test_defer_chain_apply_coalesce_multiple_ports(self): chain_applies = self._mock_chain_applies() port1 = {'device': 'd1', 'mac_address': 'mac1', 'network_id': 'net1'} port2 = {'device': 'd2', 'mac_address': 'mac2', 'network_id': 'net1'} device2port = {'d1': port1, 'd2': port2} with self.firewall.defer_apply(): self.firewall.prepare_port_filter(port1) self.firewall.prepare_port_filter(port2) chain_applies.assert_has_calls([mock.call.remove({}, {}), mock.call.setup(device2port, {})]) def test_ip_spoofing_filter_with_multiple_ips(self): port = {'device': 'tapfake_dev', 'mac_address': 'ff:ff:ff:ff:ff:ff', 'network_id': 'fake_net', 'fixed_ips': ['10.0.0.1', 'fe80::1', '10.0.0.2']} self.firewall.prepare_port_filter(port) calls = [mock.call.add_chain('sg-fallback'), mock.call.add_rule( 'sg-fallback', '-j DROP', comment=ic.UNMATCH_DROP), mock.call.remove_chain('sg-chain'), mock.call.add_chain('sg-chain'), mock.call.add_chain('ifake_dev'), mock.call.add_rule('FORWARD', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged ' '-j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule('sg-chain', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged ' '-j $ifake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule( 'ifake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule( 'ifake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule('ifake_dev', '-j $sg-fallback', comment=None), mock.call.add_chain('ofake_dev'), mock.call.add_rule('FORWARD', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged ' '-j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule('sg-chain', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule('INPUT', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.INPUT_TO_SG), mock.call.add_chain('sfake_dev'), mock.call.add_rule( 'sfake_dev', '-s 10.0.0.1/32 -m mac --mac-source FF:FF:FF:FF:FF:FF ' '-j RETURN', comment=ic.PAIR_ALLOW), mock.call.add_rule( 'sfake_dev', '-s 10.0.0.2/32 -m mac --mac-source FF:FF:FF:FF:FF:FF ' '-j RETURN', comment=ic.PAIR_ALLOW), mock.call.add_rule( 'sfake_dev', '-j DROP', comment=ic.PAIR_DROP), mock.call.add_rule( 'ofake_dev', '-s 0.0.0.0/32 -d 255.255.255.255/32 -p udp -m udp ' '--sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule('ofake_dev', '-j $sfake_dev', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 67 -m udp --dport 68 -j DROP', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule('ofake_dev', '-j $sg-fallback', comment=None), mock.call.add_rule('sg-chain', '-j ACCEPT')] self.v4filter_inst.assert_has_calls(calls) def test_ip_spoofing_no_fixed_ips(self): port = {'device': 'tapfake_dev', 'mac_address': 'ff:ff:ff:ff:ff:ff', 'network_id': 'fake_net', 'fixed_ips': []} self.firewall.prepare_port_filter(port) calls = [mock.call.add_chain('sg-fallback'), mock.call.add_rule( 'sg-fallback', '-j DROP', comment=ic.UNMATCH_DROP), mock.call.remove_chain('sg-chain'), mock.call.add_chain('sg-chain'), mock.call.add_chain('ifake_dev'), mock.call.add_rule('FORWARD', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged ' '-j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule('sg-chain', '-m physdev --physdev-out tapfake_dev ' '--physdev-is-bridged ' '-j $ifake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule( 'ifake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule( 'ifake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule('ifake_dev', '-j $sg-fallback', comment=None), mock.call.add_chain('ofake_dev'), mock.call.add_rule('FORWARD', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged ' '-j $sg-chain', comment=ic.VM_INT_SG), mock.call.add_rule('sg-chain', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.SG_TO_VM_SG), mock.call.add_rule('INPUT', '-m physdev --physdev-in tapfake_dev ' '--physdev-is-bridged -j $ofake_dev', comment=ic.INPUT_TO_SG), mock.call.add_chain('sfake_dev'), mock.call.add_rule( 'sfake_dev', '-m mac --mac-source FF:FF:FF:FF:FF:FF -j RETURN', comment=ic.PAIR_ALLOW), mock.call.add_rule( 'sfake_dev', '-j DROP', comment=ic.PAIR_DROP), mock.call.add_rule( 'ofake_dev', '-s 0.0.0.0/32 -d 255.255.255.255/32 -p udp -m udp ' '--sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule('ofake_dev', '-j $sfake_dev', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 68 --dport 67 -j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-p udp -m udp --sport 67 -m udp --dport 68 -j DROP', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state RELATED,ESTABLISHED -j RETURN', comment=None), mock.call.add_rule( 'ofake_dev', '-m state --state INVALID -j DROP', comment=None), mock.call.add_rule('ofake_dev', '-j $sg-fallback', comment=None), mock.call.add_rule('sg-chain', '-j ACCEPT')] self.v4filter_inst.assert_has_calls(calls) class IptablesFirewallEnhancedIpsetTestCase(BaseIptablesFirewallTestCase): def setUp(self): super(IptablesFirewallEnhancedIpsetTestCase, self).setUp() self.firewall.ipset = mock.Mock() self.firewall.ipset.get_name.side_effect = ( ipset_manager.IpsetManager.get_name) self.firewall.ipset.set_name_exists.return_value = True def _fake_port(self, sg_id=FAKE_SGID): return {'device': 'tapfake_dev', 'mac_address': 'ff:ff:ff:ff:ff:ff', 'network_id': 'fake_net', 'fixed_ips': [FAKE_IP['IPv4'], FAKE_IP['IPv6']], 'security_groups': [sg_id], 'security_group_source_groups': [sg_id]} def _fake_sg_rule_for_ethertype(self, ethertype, remote_group): return {'direction': 'ingress', 'remote_group_id': remote_group, 'ethertype': ethertype} def _fake_sg_rules(self, sg_id=FAKE_SGID, remote_groups=None): remote_groups = remote_groups or {_IPv4: [FAKE_SGID], _IPv6: [FAKE_SGID]} rules = [] for ip_version, remote_group_list in six.iteritems(remote_groups): for remote_group in remote_group_list: rules.append(self._fake_sg_rule_for_ethertype(ip_version, remote_group)) return {sg_id: rules} def _fake_sg_members(self, sg_ids=None): return {sg_id: copy.copy(FAKE_IP) for sg_id in (sg_ids or [FAKE_SGID])} def test_update_security_group_members(self): sg_members = {'IPv4': ['10.0.0.1', '10.0.0.2'], 'IPv6': ['fe80::1']} self.firewall.update_security_group_members('fake_sgid', sg_members) calls = [ mock.call.set_members('fake_sgid', 'IPv4', ['10.0.0.1', '10.0.0.2']), mock.call.set_members('fake_sgid', 'IPv6', ['fe80::1']) ] self.firewall.ipset.assert_has_calls(calls, any_order=True) def _setup_fake_firewall_members_and_rules(self, firewall): firewall.sg_rules = self._fake_sg_rules() firewall.pre_sg_rules = self._fake_sg_rules() firewall.sg_members = self._fake_sg_members() firewall.pre_sg_members = firewall.sg_members def _prepare_rules_and_members_for_removal(self): self._setup_fake_firewall_members_and_rules(self.firewall) self.firewall.pre_sg_members[OTHER_SGID] = ( self.firewall.pre_sg_members[FAKE_SGID]) def test_determine_remote_sgs_to_remove(self): self._prepare_rules_and_members_for_removal() ports = [self._fake_port()] self.assertEqual( {_IPv4: set([OTHER_SGID]), _IPv6: set([OTHER_SGID])}, self.firewall._determine_remote_sgs_to_remove(ports)) def test_determine_remote_sgs_to_remove_ipv6_unreferenced(self): self._prepare_rules_and_members_for_removal() ports = [self._fake_port()] self.firewall.sg_rules = self._fake_sg_rules( remote_groups={_IPv4: [OTHER_SGID, FAKE_SGID], _IPv6: [FAKE_SGID]}) self.assertEqual( {_IPv4: set(), _IPv6: set([OTHER_SGID])}, self.firewall._determine_remote_sgs_to_remove(ports)) def test_get_remote_sg_ids_by_ipversion(self): self.firewall.sg_rules = self._fake_sg_rules( remote_groups={_IPv4: [FAKE_SGID], _IPv6: [OTHER_SGID]}) ports = [self._fake_port()] self.assertEqual( {_IPv4: set([FAKE_SGID]), _IPv6: set([OTHER_SGID])}, self.firewall._get_remote_sg_ids_sets_by_ipversion(ports)) def test_get_remote_sg_ids(self): self.firewall.sg_rules = self._fake_sg_rules( remote_groups={_IPv4: [FAKE_SGID, FAKE_SGID, FAKE_SGID], _IPv6: [OTHER_SGID, OTHER_SGID, OTHER_SGID]}) port = self._fake_port() self.assertEqual( {_IPv4: set([FAKE_SGID]), _IPv6: set([OTHER_SGID])}, self.firewall._get_remote_sg_ids(port)) def test_determine_sg_rules_to_remove(self): self.firewall.pre_sg_rules = self._fake_sg_rules(sg_id=OTHER_SGID) ports = [self._fake_port()] self.assertEqual(set([OTHER_SGID]), self.firewall._determine_sg_rules_to_remove(ports)) def test_get_sg_ids_set_for_ports(self): sg_ids = set([FAKE_SGID, OTHER_SGID]) ports = [self._fake_port(sg_id) for sg_id in sg_ids] self.assertEqual(sg_ids, self.firewall._get_sg_ids_set_for_ports(ports)) def test_remove_sg_members(self): self.firewall.sg_members = self._fake_sg_members([FAKE_SGID, OTHER_SGID]) remote_sgs_to_remove = {_IPv4: set([FAKE_SGID]), _IPv6: set([FAKE_SGID, OTHER_SGID])} self.firewall._remove_sg_members(remote_sgs_to_remove) self.assertIn(OTHER_SGID, self.firewall.sg_members) self.assertNotIn(FAKE_SGID, self.firewall.sg_members) def test_remove_unused_security_group_info_clears_unused_rules(self): self._setup_fake_firewall_members_and_rules(self.firewall) self.firewall.prepare_port_filter(self._fake_port()) # create another SG which won't be referenced by any filtered port fake_sg_rules = self.firewall.sg_rules['fake_sgid'] self.firewall.pre_sg_rules[OTHER_SGID] = fake_sg_rules self.firewall.sg_rules[OTHER_SGID] = fake_sg_rules # call the cleanup function, and check the unused sg_rules are out self.firewall._remove_unused_security_group_info() self.assertNotIn(OTHER_SGID, self.firewall.sg_rules) def test_remove_unused_security_group_info(self): self.firewall.sg_members = {OTHER_SGID: {_IPv4: [], _IPv6: []}} self.firewall.pre_sg_members = self.firewall.sg_members self.firewall.sg_rules = self._fake_sg_rules( remote_groups={_IPv4: [FAKE_SGID], _IPv6: [FAKE_SGID]}) self.firewall.pre_sg_rules = self.firewall.sg_rules port = self._fake_port() self.firewall.filtered_ports['tapfake_dev'] = port self.firewall._remove_unused_security_group_info() self.assertNotIn(OTHER_SGID, self.firewall.sg_members) def test_not_remove_used_security_group_info(self): self.firewall.sg_members = {OTHER_SGID: {_IPv4: [], _IPv6: []}} self.firewall.pre_sg_members = self.firewall.sg_members self.firewall.sg_rules = self._fake_sg_rules( remote_groups={_IPv4: [OTHER_SGID], _IPv6: [OTHER_SGID]}) self.firewall.pre_sg_rules = self.firewall.sg_rules port = self._fake_port() self.firewall.filtered_ports['tapfake_dev'] = port self.firewall._remove_unused_security_group_info() self.assertIn(OTHER_SGID, self.firewall.sg_members) def test_remove_all_unused_info(self): self._setup_fake_firewall_members_and_rules(self.firewall) self.firewall.filtered_ports = {} self.firewall._remove_unused_security_group_info() self.assertFalse(self.firewall.sg_members) self.assertFalse(self.firewall.sg_rules) def test_single_fallback_accept_rule(self): p1, p2 = self._fake_port(), self._fake_port() self.firewall._setup_chains_apply(dict(p1=p1, p2=p2), {}) v4_adds = self.firewall.iptables.ipv4['filter'].add_rule.mock_calls v6_adds = self.firewall.iptables.ipv6['filter'].add_rule.mock_calls sg_chain_v4_accept = [call for call in v4_adds if call == mock.call('sg-chain', '-j ACCEPT')] sg_chain_v6_accept = [call for call in v6_adds if call == mock.call('sg-chain', '-j ACCEPT')] self.assertEqual(1, len(sg_chain_v4_accept)) self.assertEqual(1, len(sg_chain_v6_accept)) def test_remove_port_filter_with_destroy_ipset_chain(self): self.firewall.sg_rules = self._fake_sg_rules() port = self._fake_port() self.firewall.pre_sg_members = {'fake_sgid': { 'IPv4': [], 'IPv6': []}} sg_members = {'IPv4': ['10.0.0.1'], 'IPv6': ['fe80::1']} self.firewall.update_security_group_members('fake_sgid', sg_members) self.firewall.prepare_port_filter(port) self.firewall.filter_defer_apply_on() self.firewall.sg_members = {'fake_sgid': { 'IPv4': [], 'IPv6': []}} self.firewall.pre_sg_members = {'fake_sgid': { 'IPv4': ['10.0.0.1'], 'IPv6': ['fe80::1']}} self.firewall.remove_port_filter(port) self.firewall.filter_defer_apply_off() calls = [ mock.call.set_members('fake_sgid', 'IPv4', ['10.0.0.1']), mock.call.set_members('fake_sgid', 'IPv6', ['fe80::1']), mock.call.get_name('fake_sgid', 'IPv4'), mock.call.set_name_exists('NIPv4fake_sgid'), mock.call.get_name('fake_sgid', 'IPv6'), mock.call.set_name_exists('NIPv6fake_sgid'), mock.call.destroy('fake_sgid', 'IPv4'), mock.call.destroy('fake_sgid', 'IPv6')] self.firewall.ipset.assert_has_calls(calls, any_order=True) def test_filter_defer_apply_off_with_sg_only_ipv6_rule(self): self.firewall.sg_rules = self._fake_sg_rules() self.firewall.pre_sg_rules = self._fake_sg_rules() self.firewall.ipset_chains = {'IPv4fake_sgid': ['10.0.0.2'], 'IPv6fake_sgid': ['fe80::1']} self.firewall.sg_members = {'fake_sgid': { 'IPv4': ['10.0.0.2'], 'IPv6': ['fe80::1']}} self.firewall.pre_sg_members = {'fake_sgid': { 'IPv4': ['10.0.0.2'], 'IPv6': ['fe80::1']}} self.firewall.sg_rules['fake_sgid'].remove( {'direction': 'ingress', 'remote_group_id': 'fake_sgid', 'ethertype': 'IPv4'}) self.firewall.sg_rules.update() self.firewall._defer_apply = True port = self._fake_port() self.firewall.filtered_ports['tapfake_dev'] = port self.firewall._pre_defer_filtered_ports = {} self.firewall._pre_defer_unfiltered_ports = {} self.firewall.filter_defer_apply_off() calls = [mock.call.destroy('fake_sgid', 'IPv4')] self.firewall.ipset.assert_has_calls(calls, True) def test_sg_rule_expansion_with_remote_ips(self): other_ips = ['10.0.0.2', '10.0.0.3', '10.0.0.4'] self.firewall.sg_members = {'fake_sgid': { 'IPv4': [FAKE_IP['IPv4']] + other_ips, 'IPv6': [FAKE_IP['IPv6']]}} port = self._fake_port() rule = self._fake_sg_rule_for_ethertype(_IPv4, FAKE_SGID) rules = self.firewall._expand_sg_rule_with_remote_ips( rule, port, 'ingress') self.assertEqual(list(rules), [dict(list(rule.items()) + [('source_ip_prefix', '%s/32' % ip)]) for ip in other_ips]) def test_build_ipv4v6_mac_ip_list(self): mac_oth = 'ffff-ff0f-ffff' mac_unix = 'FF:FF:FF:0F:FF:FF' ipv4 = FAKE_IP['IPv4'] ipv6 = FAKE_IP['IPv6'] fake_ipv4_pair = [] fake_ipv4_pair.append((mac_unix, ipv4)) fake_ipv6_pair = [] fake_ipv6_pair.append((mac_unix, ipv6)) fake_ipv6_pair.append((mac_unix, 'fe80::fdff:ffff:fe0f:ffff')) mac_ipv4_pairs = [] mac_ipv6_pairs = [] self.firewall._build_ipv4v6_mac_ip_list(mac_oth, ipv4, mac_ipv4_pairs, mac_ipv6_pairs) self.assertEqual(fake_ipv4_pair, mac_ipv4_pairs) self.firewall._build_ipv4v6_mac_ip_list(mac_oth, ipv6, mac_ipv4_pairs, mac_ipv6_pairs) self.assertEqual(fake_ipv6_pair, mac_ipv6_pairs) class OVSHybridIptablesFirewallTestCase(BaseIptablesFirewallTestCase): def setUp(self): super(OVSHybridIptablesFirewallTestCase, self).setUp() self.firewall = iptables_firewall.OVSHybridIptablesFirewallDriver() # initial data has 1, 2, and 9 in use, see RAW_TABLE_OUTPUT above. self._dev_zone_map = {'61634509-31': 2, '8f46cf18-12': 9, '95c24827-02': 2, 'e804433b-61': 1} def test__populate_initial_zone_map(self): self.assertEqual(self._dev_zone_map, self.firewall._device_zone_map) def test__generate_device_zone(self): # initial data has 1, 2, and 9 in use. # we fill from top up first. self.assertEqual(10, self.firewall._generate_device_zone('test')) # once it's maxed out, it scans for gaps self.firewall._device_zone_map['someport'] = ( iptables_firewall.MAX_CONNTRACK_ZONES) for i in range(3, 9): self.assertEqual(i, self.firewall._generate_device_zone(i)) # 9 and 10 are taken so next should be 11 self.assertEqual(11, self.firewall._generate_device_zone('p11')) # take out zone 1 and make sure it's selected self.firewall._device_zone_map.pop('e804433b-61') self.assertEqual(1, self.firewall._generate_device_zone('p1')) # fill it up and then make sure an extra throws an error for i in range(1, 65536): self.firewall._device_zone_map['dev-%s' % i] = i with testtools.ExpectedException(n_exc.CTZoneExhaustedError): self.firewall._find_open_zone() # with it full, try again, this should trigger a cleanup and return 1 self.assertEqual(1, self.firewall._generate_device_zone('p12')) self.assertEqual({'p12': 1}, self.firewall._device_zone_map) def test_get_device_zone(self): # initial data has 1, 2, and 9 in use. self.assertEqual(10, self.firewall.get_device_zone('12345678901234567')) # should have been truncated to 11 chars self._dev_zone_map.update({'12345678901': 10}) self.assertEqual(self._dev_zone_map, self.firewall._device_zone_map)<|fim▁end|>
egress = None self._test_prepare_port_filter(rule, ingress, egress)
<|file_name|>Phoenician-symbols.js<|end_file_name|><|fim▁begin|>// All symbols in the Phoenician block as per Unicode v6.1.0: [ '\uD802\uDD00', '\uD802\uDD01', '\uD802\uDD02', '\uD802\uDD03', '\uD802\uDD04', '\uD802\uDD05', '\uD802\uDD06', '\uD802\uDD07', '\uD802\uDD08', '\uD802\uDD09', '\uD802\uDD0A', '\uD802\uDD0B', '\uD802\uDD0C', '\uD802\uDD0D', '\uD802\uDD0E', '\uD802\uDD0F', '\uD802\uDD10', '\uD802\uDD11', '\uD802\uDD12', '\uD802\uDD13', '\uD802\uDD14', '\uD802\uDD15', '\uD802\uDD16', '\uD802\uDD17', '\uD802\uDD18', '\uD802\uDD19', '\uD802\uDD1A', '\uD802\uDD1B',<|fim▁hole|> '\uD802\uDD1C', '\uD802\uDD1D', '\uD802\uDD1E', '\uD802\uDD1F' ];<|fim▁end|>
<|file_name|>bifibits.py<|end_file_name|><|fim▁begin|>#vim import sqlite3 from flask import Flask, request, g, redirect, url_for, abort, \ render_template, flash, session from wtforms import Form, TextField, validators<|fim▁hole|> #configuration DATABASE = 'bifi.db' DEBUG = True SECRET_KEY = "CHANGEME" app = Flask(__name__) app.config.from_object(__name__) db_filename = os.path.abspath(app.config['DATABASE']) connection_string = 'sqlite:' + db_filename connection = connectionForURI(connection_string) sqlhub.processConnection = connection class SubmitForm(Form): link = TextField('Torrent- or Magnet-URL') def connect_db(): return sqlite3.connect(app.config['DATABASE']) @app.before_request def before_request(): g.db = connect_db() @app.teardown_request def teardown_request(exception): g.db.close() @app.route('/') def index(): torrents = QueueEntry.select().reversed() return render_template('index.html', torrents=torrents) @app.route('/add', methods=['GET', 'POST']) def add(): form = SubmitForm(request.form) if request.method == "POST" and form.validate(): print form.link entry = QueueEntry(torrent=form.link.data) flash('Wurde ja Zeit') return render_template('add.html', form=form) if __name__ == '__main__': app.run()<|fim▁end|>
from model import QueueEntry import os from sqlobject import connectionForURI, sqlhub
<|file_name|>app-routing.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core'; import { RouterModule, Routes } from '@angular/router'; import { DashboardComponent } from './components/dashboard/dashboard.component'; import { CityComponent } from './components/city/city.component'; import { TrailListComponent } from './components/trail/trail-list.component'; import { TrailDetailComponent } from './components/trail/trail-detail.component'; import { NewTrailComponent } from './components/trail/new-trail.component'; import { AuthGuard } from './services/auth-guard.service'; const routes: Routes = [<|fim▁hole|> { path: 'trail/:id/:title', component: TrailDetailComponent }, { path: 'trail/new', component: NewTrailComponent, canActivate: [AuthGuard] } ]; @NgModule({ imports: [ RouterModule.forRoot(routes) ], exports: [ RouterModule ] }) export class AppRoutingModule {}<|fim▁end|>
{ path: '', redirectTo: '/dashboard', pathMatch: 'full' }, { path: 'dashboard', component: DashboardComponent }, { path: 'area/:id/:title', component: CityComponent }, { path: 'area/:id/trails', component: TrailListComponent },
<|file_name|>enum.py<|end_file_name|><|fim▁begin|># Copyright 2016 Quora, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Enum implementation. """ __all__ = ["Enum", "EnumType", "EnumValueGenerator", "Flags", "IntEnum"] import inspect import sys from . import helpers from . import inspection _no_default = helpers.MarkerObject("no_default @ enums") class EnumType(type): """Metaclass for all enum types.""" def __init__(cls, what, bases=None, dict=None): super().__init__(what, bases, dict) cls.process() def __contains__(self, k): return k in self._value_to_name def __len__(self): return len(self._members) def __iter__(self): return iter(self._members) def __call__(self, value, default=_no_default): """Instantiating an Enum always produces an existing value or throws an exception.""" return self.parse(value, default=default) def process(self): name_to_member = {} value_to_member = {} value_to_name = {} flag_values = [] members = [] for k, v in list(inspect.getmembers(self)): # ensure that names are unicode, even in py2 if isinstance(k, bytes): k = k.decode("ascii") if isinstance(type(v), EnumType): v = v.value # For inherited members if isinstance(v, int): assert ( v not in value_to_member ), "Duplicate enum value: %s (class: %s)." % ( v, inspection.get_full_name(self), ) member = self._make_value(v) name_to_member[k] = member value_to_member[v] = member value_to_name[v] = k if v != 0: flag_values.append(v)<|fim▁hole|> members.append(member) self._name_to_member = name_to_member self._value_to_member = value_to_member self._value_to_name = value_to_name self._flag_values = list(reversed(sorted(flag_values))) self._members = sorted(members, key=lambda m: m.value) for m in members: setattr(self, m.short_name, m) def _make_value(self, value): """Instantiates an enum with an arbitrary value.""" member = self.__new__(self, value) member.__init__(value) return member # Needed bcz of a six bug: https://github.com/benjaminp/six/issues/252 @classmethod def __prepare__(cls, name, bases, **kwargs): return {} class EnumBase(metaclass=EnumType): _name_to_member = {} _value_to_member = {} _value_to_name = {} _flag_values = [] _members = [] def __init__(self, value): self.value = int(value) @property def short_name(self): """Returns the enum member's name, like "foo".""" raise NotImplementedError @property def long_name(self): """Returns the enum member's name including the class name, like "MyEnum.foo".""" return "%s.%s" % (self.__class__.__name__, self.short_name) @property def title(self): """Returns the enum member's name in title case, like "FooBar" for MyEnum.foo_bar.""" return self.short_name.replace("_", " ").title() @property def full_name(self): """Returns the enum meber's name including the module, like "mymodule.MyEnum.foo".""" return "%s.%s" % (self.__class__.__module__, self.long_name) def is_valid(self): raise NotImplementedError def assert_valid(self): if not self.is_valid(): raise _create_invalid_value_error(self.__class__, self.value) def __int__(self): return self.value def __call__(self): return self.value def __eq__(self, other): return self.value == other def __ne__(self, other): return self.value != other def __hash__(self): return hash(self.value) def __str__(self): if self.is_valid(): return self.short_name else: return "%s(%s)" % (self.__class__.__name__, self.value) def __repr__(self): if self.is_valid(): return self.__class__.__name__ + "." + self.short_name else: return "%s(%s)" % (self.__class__.__name__, self.value) @classmethod def get_names(cls): """Returns the names of all members of this enum.""" return [m.short_name for m in cls._members] @classmethod def get_members(cls): return cls._members @classmethod def create(cls, name, members): """Creates a new enum type based on this one (cls) and adds newly passed members to the newly created subclass of cls. This method helps to create enums having the same member values as values of other enum(s). :param name: name of the newly created type :param members: 1) a dict or 2) a list of (name, value) tuples and/or EnumBase instances describing new members :return: newly created enum type. """ NewEnum = type(name, (cls,), {}) if isinstance(members, dict): members = members.items() for member in members: if isinstance(member, tuple): name, value = member setattr(NewEnum, name, value) elif isinstance(member, EnumBase): setattr(NewEnum, member.short_name, member.value) else: assert False, ( "members must be either a dict, " + "a list of (name, value) tuples, " + "or a list of EnumBase instances." ) NewEnum.process() # needed for pickling to work (hopefully); taken from the namedtuple implementation in the # standard library try: NewEnum.__module__ = sys._getframe(1).f_globals.get("__name__", "__main__") except (AttributeError, ValueError): pass return NewEnum @classmethod def parse(cls, value, default=_no_default): """Parses a value into a member of this enum.""" raise NotImplementedError def __reduce_ex__(self, proto): return self.__class__, (self.value,) class Enum(EnumBase): def is_valid(self): return self.value in self._value_to_member @property def short_name(self): self.assert_valid() return self._value_to_name[self.value] @classmethod def parse(cls, value, default=_no_default): """Parses an enum member name or value into an enum member. Accepts the following types: - Members of this enum class. These are returned directly. - Integers. If there is an enum member with the integer as a value, that member is returned. - Strings. If there is an enum member with the string as its name, that member is returned. For integers and strings that don't correspond to an enum member, default is returned; if no default is given the function raises KeyError instead. Examples: >>> class Color(Enum): ... red = 1 ... blue = 2 >>> Color.parse(Color.red) Color.red >>> Color.parse(1) Color.red >>> Color.parse('blue') Color.blue """ if isinstance(value, cls): return value elif isinstance(value, int) and not isinstance(value, EnumBase): e = cls._value_to_member.get(value, _no_default) else: e = cls._name_to_member.get(value, _no_default) if e is _no_default or not e.is_valid(): if default is _no_default: raise _create_invalid_value_error(cls, value) return default return e class Flags(EnumBase): def is_valid(self): value = self.value for v in self._flag_values: if (v | value) == value: value ^= v return value == 0 @property def short_name(self): self.assert_valid() result = [] l = self.value for v in self._flag_values: if (v | l) == l: l ^= v result.append(self._value_to_name[v]) if not result: if 0 in self._value_to_name: return self._value_to_name[0] else: return "" return ",".join(result) @classmethod def parse(cls, value, default=_no_default): """Parses a flag integer or string into a Flags instance. Accepts the following types: - Members of this enum class. These are returned directly. - Integers. These are converted directly into a Flags instance with the given name. - Strings. The function accepts a comma-delimited list of flag names, corresponding to members of the enum. These are all ORed together. Examples: >>> class Car(Flags): ... is_big = 1 ... has_wheels = 2 >>> Car.parse(1) Car.is_big >>> Car.parse(3) Car.parse('has_wheels,is_big') >>> Car.parse('is_big,has_wheels') Car.parse('has_wheels,is_big') """ if isinstance(value, cls): return value elif isinstance(value, int): e = cls._make_value(value) else: if not value: e = cls._make_value(0) else: r = 0 for k in value.split(","): v = cls._name_to_member.get(k, _no_default) if v is _no_default: if default is _no_default: raise _create_invalid_value_error(cls, value) else: return default r |= v.value e = cls._make_value(r) if not e.is_valid(): if default is _no_default: raise _create_invalid_value_error(cls, value) return default return e def __contains__(self, item): item = int(item) if item == 0: return True return item == (self.value & item) def __or__(self, other): return self.__class__(self.value | int(other)) def __and__(self, other): return self.__class__(self.value & int(other)) def __xor__(self, other): return self.__class__(self.value ^ int(other)) def __repr__(self): if self.is_valid(): name = self.short_name if "," in name: return "%s.parse('%s')" % (self.__class__.__name__, self.short_name) else: return self.__class__.__name__ + "." + self.short_name else: return "%s(%s)" % (self.__class__.__name__, self.value) class IntEnum(int, Enum): """Enum subclass that offers more compatibility with int.""" def __repr__(self): return Enum.__repr__(self) class EnumValueGenerator(object): def __init__(self, start=1): self._next_value = start def reset(self, start=1): self._next_value = start def next(self): result = self._next_value self._next_value += 1 return result def __call__(self): return self.next() def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self._next_value) # Private part def _create_invalid_value_error(cls, value): return KeyError("Invalid %s value: %r" % (inspection.get_full_name(cls), value))<|fim▁end|>
<|file_name|>TestCreateTable.java<|end_file_name|><|fim▁begin|>/* * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.bigtable.hbase; import static com.google.cloud.bigtable.hbase.test_env.SharedTestEnvRule.COLUMN_FAMILY; import java.io.IOException; import java.util.List; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionLocation;<|fim▁hole|>import org.junit.Test; @SuppressWarnings("deprecation") public class TestCreateTable extends AbstractTestCreateTable { @Override protected void createTable(TableName tableName) throws IOException { getConnection().getAdmin().createTable(createDescriptor(tableName)); } @Override protected void createTable(TableName tableName, byte[] start, byte[] end, int splitCount) throws IOException { getConnection().getAdmin().createTable(createDescriptor(tableName), start, end, splitCount); } @Override protected void createTable(TableName tableName, byte[][] ranges) throws IOException { getConnection().getAdmin().createTable(createDescriptor(tableName), ranges); } private HTableDescriptor createDescriptor(TableName tableName) { return new HTableDescriptor(tableName).addFamily(new HColumnDescriptor(COLUMN_FAMILY)); } @Override protected List<HRegionLocation> getRegions(TableName tableName) throws Exception { return getConnection().getRegionLocator(tableName).getAllRegionLocations(); } @Test public void testGetRegions() throws Exception { TableName tableName = sharedTestEnv.newTestTableName(); getConnection().getAdmin().createTable(createDescriptor(tableName)); List<RegionInfo> regions = getConnection().getAdmin().getRegions(tableName); Assert.assertEquals(1, regions.size()); } @Override protected boolean asyncGetRegions(TableName tableName) throws Exception { return getConnection().getAdmin().getRegions(tableName).size() == 1 ? true : false; } @Override protected boolean isTableEnabled(TableName tableName) throws Exception { return getConnection().getAdmin().isTableEnabled(tableName); } @Override protected void disableTable(TableName tableName) throws Exception { getConnection().getAdmin().disableTable(tableName); } @Override protected void adminDeleteTable(TableName tableName) throws Exception { getConnection().getAdmin().deleteTable(tableName); } @Override protected boolean tableExists(TableName tableName) throws Exception { return getConnection().getAdmin().tableExists(tableName); } }<|fim▁end|>
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; import org.junit.Assert;
<|file_name|>TestXMLNode.java<|end_file_name|><|fim▁begin|>/* * @file TestXMLNode.java * @brief XMLNode unit tests * * @author Akiya Jouraku (Java conversion) * @author Michael Hucka <[email protected]> * * $Id: TestXMLNode.java 11442 2010-07-09 02:23:35Z mhucka $ * $HeadURL: https://sbml.svn.sourceforge.net/svnroot/sbml/trunk/libsbml/src/bindings/java/test/org/sbml/libsbml/test/xml/TestXMLNode.java $ * * ====== WARNING ===== WARNING ===== WARNING ===== WARNING ===== WARNING ====== * * DO NOT EDIT THIS FILE. * * This file was generated automatically by converting the file located at * src/xml/test/TestXMLNode.c * using the conversion program dev/utilities/translateTests/translateTests.pl. * Any changes made here will be lost the next time the file is regenerated. * * ----------------------------------------------------------------------------- * This file is part of libSBML. Please visit http://sbml.org for more * information about SBML, and the latest version of libSBML. * * Copyright 2005-2010 California Institute of Technology. * Copyright 2002-2005 California Institute of Technology and * Japan Science and Technology Corporation. * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation. A copy of the license agreement is provided * in the file named "LICENSE.txt" included with this software distribution * and also available online as http://sbml.org/software/libsbml/license.html * ----------------------------------------------------------------------------- */ package org.sbml.libsbml.test.xml; import org.sbml.libsbml.*; import java.io.File; import java.lang.AssertionError; public class TestXMLNode { static void assertTrue(boolean condition) throws AssertionError { if (condition == true) { return; } throw new AssertionError(); } static void assertEquals(Object a, Object b) throws AssertionError { if ( (a == null) && (b == null) ) { return; } else if ( (a == null) || (b == null) ) { throw new AssertionError(); } else if (a.equals(b)) { return; } throw new AssertionError(); } static void assertNotEquals(Object a, Object b) throws AssertionError { if ( (a == null) && (b == null) ) { throw new AssertionError(); } else if ( (a == null) || (b == null) ) { return; } else if (a.equals(b)) { throw new AssertionError(); } } static void assertEquals(boolean a, boolean b) throws AssertionError { if ( a == b ) { return; } throw new AssertionError(); } static void assertNotEquals(boolean a, boolean b) throws AssertionError { if ( a != b ) { return; } throw new AssertionError(); } static void assertEquals(int a, int b) throws AssertionError { if ( a == b ) { return; } throw new AssertionError(); } static void assertNotEquals(int a, int b) throws AssertionError { if ( a != b ) { return; } throw new AssertionError(); } public void test_XMLNode_attribute_add_remove() { XMLTriple triple = new XMLTriple("test","",""); XMLAttributes attr = new XMLAttributes(); XMLNode node = new XMLNode(triple,attr); XMLTriple xt1 = new XMLTriple("name1", "http://name1.org/", "p1"); XMLTriple xt2 = new XMLTriple("name2", "http://name2.org/", "p2"); XMLTriple xt3 = new XMLTriple("name3", "http://name3.org/", "p3"); XMLTriple xt1a = new XMLTriple("name1", "http://name1a.org/", "p1a"); XMLTriple xt2a = new XMLTriple("name2", "http://name2a.org/", "p2a"); node.addAttr( "name1", "val1", "http://name1.org/", "p1"); node.addAttr(xt2, "val2"); assertTrue( node.getAttributesLength() == 2 ); assertTrue( node.isAttributesEmpty() == false ); assertTrue( !node.getAttrName(0).equals( "name1") == false ); assertTrue( !node.getAttrValue(0).equals( "val1" ) == false ); assertTrue( !node.getAttrURI(0).equals( "http://name1.org/") == false ); assertTrue( !node.getAttrPrefix(0).equals( "p1" ) == false ); assertTrue( !node.getAttrName(1).equals( "name2") == false ); assertTrue( !node.getAttrValue(1).equals( "val2" ) == false ); assertTrue( !node.getAttrURI(1).equals( "http://name2.org/") == false ); assertTrue( !node.getAttrPrefix(1).equals( "p2" ) == false ); assertTrue( node.getAttrValue( "name1").equals("") == true ); assertTrue( node.getAttrValue( "name2").equals("") == true ); assertTrue( !node.getAttrValue( "name1", "http://name1.org/").equals( "val1" ) == false ); assertTrue( !node.getAttrValue( "name2", "http://name2.org/").equals( "val2" ) == false ); assertTrue( !node.getAttrValue(xt1).equals( "val1" ) == false ); assertTrue( !node.getAttrValue(xt2).equals( "val2" ) == false ); assertTrue( node.hasAttr(-1) == false ); assertTrue( node.hasAttr(2) == false ); assertTrue( node.hasAttr(0) == true ); assertTrue( node.hasAttr( "name1", "http://name1.org/") == true ); assertTrue( node.hasAttr( "name2", "http://name2.org/") == true ); assertTrue( node.hasAttr( "name3", "http://name3.org/") == false ); assertTrue( node.hasAttr(xt1) == true ); assertTrue( node.hasAttr(xt2) == true ); assertTrue( node.hasAttr(xt3) == false ); node.addAttr( "noprefix", "val3"); assertTrue( node.getAttributesLength() == 3 ); assertTrue( node.isAttributesEmpty() == false ); assertTrue( !node.getAttrName(2).equals( "noprefix") == false ); assertTrue( !node.getAttrValue(2).equals( "val3" ) == false ); assertTrue( node.getAttrURI(2).equals("") == true ); assertTrue( node.getAttrPrefix(2).equals("") == true ); assertTrue( !node.getAttrValue( "noprefix").equals( "val3" ) == false ); assertTrue( !node.getAttrValue( "noprefix", "").equals( "val3" ) == false ); assertTrue( node.hasAttr( "noprefix" ) == true ); assertTrue( node.hasAttr( "noprefix", "") == true ); node.addAttr(xt1, "mval1"); node.addAttr( "name2", "mval2", "http://name2.org/", "p2"); assertTrue( node.getAttributesLength() == 3 ); assertTrue( node.isAttributesEmpty() == false ); assertTrue( !node.getAttrName(0).equals( "name1") == false ); assertTrue( !node.getAttrValue(0).equals( "mval1") == false ); assertTrue( !node.getAttrURI(0).equals( "http://name1.org/") == false ); assertTrue( !node.getAttrPrefix(0).equals( "p1" ) == false ); assertTrue( !node.getAttrName(1).equals( "name2" ) == false ); assertTrue( !node.getAttrValue(1).equals( "mval2" ) == false ); assertTrue( !node.getAttrURI(1).equals( "http://name2.org/") == false ); assertTrue( !node.getAttrPrefix(1).equals( "p2" ) == false ); assertTrue( node.hasAttr(xt1) == true ); assertTrue( node.hasAttr( "name1", "http://name1.org/") == true ); node.addAttr( "noprefix", "mval3"); assertTrue( node.getAttributesLength() == 3 ); assertTrue( node.isAttributesEmpty() == false ); assertTrue( !node.getAttrName(2).equals( "noprefix") == false ); assertTrue( !node.getAttrValue(2).equals( "mval3" ) == false ); assertTrue( node.getAttrURI(2).equals("") == true ); assertTrue( node.getAttrPrefix(2).equals("") == true ); assertTrue( node.hasAttr( "noprefix") == true ); assertTrue( node.hasAttr( "noprefix", "") == true ); node.addAttr(xt1a, "val1a"); node.addAttr(xt2a, "val2a"); assertTrue( node.getAttributesLength() == 5 ); assertTrue( !node.getAttrName(3).equals( "name1") == false ); assertTrue( !node.getAttrValue(3).equals( "val1a") == false ); assertTrue( !node.getAttrURI(3).equals( "http://name1a.org/") == false ); assertTrue( !node.getAttrPrefix(3).equals( "p1a") == false ); assertTrue( !node.getAttrName(4).equals( "name2") == false ); assertTrue( !node.getAttrValue(4).equals( "val2a") == false ); assertTrue( !node.getAttrURI(4).equals( "http://name2a.org/") == false ); assertTrue( !node.getAttrPrefix(4).equals( "p2a") == false ); assertTrue( !node.getAttrValue( "name1", "http://name1a.org/").equals( "val1a" ) == false ); assertTrue( !node.getAttrValue( "name2", "http://name2a.org/").equals( "val2a" ) == false ); assertTrue( !node.getAttrValue(xt1a).equals( "val1a" ) == false ); assertTrue( !node.getAttrValue(xt2a).equals( "val2a" ) == false ); node.removeAttr(xt1a); node.removeAttr(xt2a); assertTrue( node.getAttributesLength() == 3 ); node.removeAttr( "name1", "http://name1.org/"); assertTrue( node.getAttributesLength() == 2 ); assertTrue( node.isAttributesEmpty() == false ); assertTrue( !node.getAttrName(0).equals( "name2") == false ); assertTrue( !node.getAttrValue(0).equals( "mval2") == false ); assertTrue( !node.getAttrURI(0).equals( "http://name2.org/") == false ); assertTrue( !node.getAttrPrefix(0).equals( "p2") == false ); assertTrue( !node.getAttrName(1).equals( "noprefix") == false ); assertTrue( !node.getAttrValue(1).equals( "mval3") == false ); assertTrue( node.getAttrURI(1).equals("") == true ); assertTrue( node.getAttrPrefix(1).equals("") == true ); assertTrue( node.hasAttr( "name1", "http://name1.org/") == false ); node.removeAttr(xt2); assertTrue( node.getAttributesLength() == 1 ); assertTrue( node.isAttributesEmpty() == false ); assertTrue( !node.getAttrName(0).equals( "noprefix") == false ); assertTrue( !node.getAttrValue(0).equals( "mval3") == false ); assertTrue( node.getAttrURI(0).equals("") == true ); assertTrue( node.getAttrPrefix(0).equals("") == true ); assertTrue( node.hasAttr(xt2) == false ); assertTrue( node.hasAttr( "name2", "http://name2.org/") == false ); node.removeAttr( "noprefix"); assertTrue( node.getAttributesLength() == 0 ); assertTrue( node.isAttributesEmpty() == true ); assertTrue( node.hasAttr( "noprefix" ) == false ); assertTrue( node.hasAttr( "noprefix", "") == false ); node = null; xt1 = null; xt2 = null; xt3 = null; xt1a = null; xt2a = null; triple = null; attr = null; } public void test_XMLNode_attribute_set_clear() { XMLTriple triple = new XMLTriple("test","",""); XMLAttributes attr = new XMLAttributes(); XMLNode node = new XMLNode(triple,attr); XMLAttributes nattr = new XMLAttributes(); XMLTriple xt1 = new XMLTriple("name1", "http://name1.org/", "p1"); XMLTriple xt2 = new XMLTriple("name2", "http://name2.org/", "p2"); XMLTriple xt3 = new XMLTriple("name3", "http://name3.org/", "p3"); XMLTriple xt4 = new XMLTriple("name4", "http://name4.org/", "p4"); XMLTriple xt5 = new XMLTriple("name5", "http://name5.org/", "p5"); nattr.add(xt1, "val1"); nattr.add(xt2, "val2"); nattr.add(xt3, "val3"); nattr.add(xt4, "val4"); nattr.add(xt5, "val5"); node.setAttributes(nattr); assertTrue( node.getAttributesLength() == 5 ); assertTrue( node.isAttributesEmpty() == false ); assertTrue( !node.getAttrName(0).equals( "name1") == false ); assertTrue( !node.getAttrValue(0).equals( "val1" ) == false ); assertTrue( !node.getAttrURI(0).equals( "http://name1.org/") == false ); assertTrue( !node.getAttrPrefix(0).equals( "p1" ) == false ); assertTrue( !node.getAttrName(1).equals( "name2") == false ); assertTrue( !node.getAttrValue(1).equals( "val2" ) == false ); assertTrue( !node.getAttrURI(1).equals( "http://name2.org/") == false ); assertTrue( !node.getAttrPrefix(1).equals( "p2" ) == false ); assertTrue( !node.getAttrName(2).equals( "name3") == false ); assertTrue( !node.getAttrValue(2).equals( "val3" ) == false ); assertTrue( !node.getAttrURI(2).equals( "http://name3.org/") == false ); assertTrue( !node.getAttrPrefix(2).equals( "p3" ) == false ); assertTrue( !node.getAttrName(3).equals( "name4") == false ); assertTrue( !node.getAttrValue(3).equals( "val4" ) == false ); assertTrue( !node.getAttrURI(3).equals( "http://name4.org/") == false ); assertTrue( !node.getAttrPrefix(3).equals( "p4" ) == false ); assertTrue( !node.getAttrName(4).equals( "name5") == false ); assertTrue( !node.getAttrValue(4).equals( "val5" ) == false ); assertTrue( !node.getAttrURI(4).equals( "http://name5.org/") == false ); assertTrue( !node.getAttrPrefix(4).equals( "p5" ) == false ); XMLTriple ntriple = new XMLTriple("test2","http://test2.org/","p2"); node.setTriple(ntriple); assertTrue( !node.getName().equals( "test2") == false ); assertTrue( !node.getURI().equals( "http://test2.org/") == false ); assertTrue( !node.getPrefix().equals( "p2") == false ); node.clearAttributes(); assertTrue( node.getAttributesLength() == 0 ); assertTrue( node.isAttributesEmpty() != false ); triple = null; ntriple = null; node = null; attr = null; nattr = null; xt1 = null; xt2 = null; xt3 = null; xt4 = null; xt5 = null; } public void test_XMLNode_convert() { String xmlstr = "<annotation>\n" + " <test xmlns=\"http://test.org/\" id=\"test\">test</test>\n" + "</annotation>"; XMLNode node; XMLNode child, gchild; XMLAttributes attr; XMLNamespaces ns; node = XMLNode.convertStringToXMLNode(xmlstr,null); child = node.getChild(0); gchild = child.getChild(0); attr = child.getAttributes(); ns = child.getNamespaces(); assertTrue( !node.getName().equals( "annotation") == false ); assertTrue( !child.getName().equals("test" ) == false ); assertTrue( !gchild.getCharacters().equals("test" ) == false ); assertTrue( !attr.getName(0).equals( "id" ) == false ); assertTrue( !attr.getValue(0).equals( "test" ) == false ); assertTrue( !ns.getURI(0).equals( "http://test.org/" ) == false ); assertTrue( ns.getPrefix(0).equals("") == true ); String toxmlstring = node.toXMLString(); assertTrue( !toxmlstring.equals(xmlstr) == false ); node = null; } public void test_XMLNode_convert_dummyroot() { String xmlstr_nodummy1 = "<notes>\n" + " <p>test</p>\n" + "</notes>"; String xmlstr_nodummy2 = "<html>\n" + " <p>test</p>\n" + "</html>";<|fim▁hole|> String xmlstr_nodummy3 = "<body>\n" + " <p>test</p>\n" + "</body>"; String xmlstr_nodummy4 = "<p>test</p>"; String xmlstr_nodummy5 = "<test1>\n" + " <test2>test</test2>\n" + "</test1>"; String xmlstr_dummy1 = "<p>test1</p><p>test2</p>"; String xmlstr_dummy2 = "<test1>test1</test1><test2>test2</test2>"; XMLNode rootnode; XMLNode child, gchild; XMLAttributes attr; XMLNamespaces ns; String toxmlstring; rootnode = XMLNode.convertStringToXMLNode(xmlstr_nodummy1,null); assertTrue( rootnode.getNumChildren() == 1 ); child = rootnode.getChild(0); gchild = child.getChild(0); assertTrue( !rootnode.getName().equals( "notes") == false ); assertTrue( !child.getName().equals("p" ) == false ); assertTrue( !gchild.getCharacters().equals("test" ) == false ); toxmlstring = rootnode.toXMLString(); assertTrue( !toxmlstring.equals(xmlstr_nodummy1) == false ); rootnode = null; rootnode = XMLNode.convertStringToXMLNode(xmlstr_nodummy2,null); assertTrue( rootnode.getNumChildren() == 1 ); child = rootnode.getChild(0); gchild = child.getChild(0); assertTrue( !rootnode.getName().equals( "html") == false ); assertTrue( !child.getName().equals("p" ) == false ); assertTrue( !gchild.getCharacters().equals("test" ) == false ); toxmlstring = rootnode.toXMLString(); assertTrue( !toxmlstring.equals(xmlstr_nodummy2) == false ); rootnode = null; rootnode = XMLNode.convertStringToXMLNode(xmlstr_nodummy3,null); assertTrue( rootnode.getNumChildren() == 1 ); child = rootnode.getChild(0); gchild = child.getChild(0); assertTrue( !rootnode.getName().equals( "body") == false ); assertTrue( !child.getName().equals("p" ) == false ); assertTrue( !gchild.getCharacters().equals("test" ) == false ); toxmlstring = rootnode.toXMLString(); assertTrue( !toxmlstring.equals(xmlstr_nodummy3) == false ); rootnode = null; rootnode = XMLNode.convertStringToXMLNode(xmlstr_nodummy4,null); assertTrue( rootnode.getNumChildren() == 1 ); child = rootnode.getChild(0); assertTrue( !rootnode.getName().equals( "p") == false ); assertTrue( !child.getCharacters().equals("test" ) == false ); toxmlstring = rootnode.toXMLString(); assertTrue( !toxmlstring.equals(xmlstr_nodummy4) == false ); rootnode = null; rootnode = XMLNode.convertStringToXMLNode(xmlstr_nodummy5,null); assertTrue( rootnode.getNumChildren() == 1 ); child = rootnode.getChild(0); gchild = child.getChild(0); assertTrue( !rootnode.getName().equals( "test1") == false ); assertTrue( !child.getName().equals("test2" ) == false ); assertTrue( !gchild.getCharacters().equals("test" ) == false ); toxmlstring = rootnode.toXMLString(); assertTrue( !toxmlstring.equals(xmlstr_nodummy5) == false ); rootnode = null; rootnode = XMLNode.convertStringToXMLNode(xmlstr_dummy1,null); assertTrue( rootnode.isEOF() == true ); assertTrue( rootnode.getNumChildren() == 2 ); child = rootnode.getChild(0); gchild = child.getChild(0); assertTrue( !child.getName().equals( "p") == false ); assertTrue( !gchild.getCharacters().equals("test1" ) == false ); child = rootnode.getChild(1); gchild = child.getChild(0); assertTrue( !child.getName().equals( "p") == false ); assertTrue( !gchild.getCharacters().equals("test2" ) == false ); toxmlstring = rootnode.toXMLString(); assertTrue( !toxmlstring.equals(xmlstr_dummy1) == false ); rootnode = null; rootnode = XMLNode.convertStringToXMLNode(xmlstr_dummy2,null); assertTrue( rootnode.isEOF() == true ); assertTrue( rootnode.getNumChildren() == 2 ); child = rootnode.getChild(0); gchild = child.getChild(0); assertTrue( !child.getName().equals( "test1") == false ); assertTrue( !gchild.getCharacters().equals("test1" ) == false ); child = rootnode.getChild(1); gchild = child.getChild(0); assertTrue( !child.getName().equals( "test2") == false ); assertTrue( !gchild.getCharacters().equals("test2" ) == false ); toxmlstring = rootnode.toXMLString(); assertTrue( !toxmlstring.equals(xmlstr_dummy2) == false ); rootnode = null; } public void test_XMLNode_create() { XMLNode node = new XMLNode(); assertTrue( node != null ); assertTrue( node.getNumChildren() == 0 ); node = null; node = new XMLNode(); assertTrue( node != null ); XMLNode node2 = new XMLNode(); assertTrue( node2 != null ); node.addChild(node2); assertTrue( node.getNumChildren() == 1 ); XMLNode node3 = new XMLNode(); assertTrue( node3 != null ); node.addChild(node3); assertTrue( node.getNumChildren() == 2 ); node = null; node2 = null; node3 = null; } public void test_XMLNode_createElement() { XMLTriple triple; XMLAttributes attr; XMLNamespaces ns; XMLNode snode, enode, tnode; XMLAttributes cattr; String name = "test"; String uri = "http://test.org/"; String prefix = "p"; String text = "text node"; triple = new XMLTriple(name,uri,prefix); ns = new XMLNamespaces(); attr = new XMLAttributes(); ns.add(uri,prefix); attr.add("id", "value",uri,prefix); snode = new XMLNode(triple,attr,ns); assertTrue( snode != null ); assertTrue( snode.getNumChildren() == 0 ); assertTrue( !snode.getName().equals(name) == false ); assertTrue( !snode.getPrefix().equals(prefix) == false ); assertTrue( !snode.getURI().equals(uri) == false ); assertTrue( snode.isElement() == true ); assertTrue( snode.isStart() == true ); assertTrue( snode.isEnd() == false ); assertTrue( snode.isText() == false ); snode.setEnd(); assertTrue( snode.isEnd() == true ); snode.unsetEnd(); assertTrue( snode.isEnd() == false ); cattr = snode.getAttributes(); assertTrue( cattr != null ); assertTrue( !cattr.getName(0).equals( "id" ) == false ); assertTrue( !cattr.getValue(0).equals( "value") == false ); assertTrue( !cattr.getPrefix(0).equals(prefix) == false ); assertTrue( !cattr.getURI(0).equals(uri) == false ); triple = null; attr = null; ns = null; snode = null; attr = new XMLAttributes(); attr.add("id", "value"); triple = new XMLTriple(name, "", ""); snode = new XMLNode(triple,attr); assertTrue( snode != null ); assertTrue( snode.getNumChildren() == 0 ); assertTrue( !snode.getName().equals( "test") == false ); assertTrue( snode.getPrefix().equals("") == true ); assertTrue( snode.getURI().equals("") == true ); assertTrue( snode.isElement() == true ); assertTrue( snode.isStart() == true ); assertTrue( snode.isEnd() == false ); assertTrue( snode.isText() == false ); cattr = snode.getAttributes(); assertTrue( cattr != null ); assertTrue( !cattr.getName(0).equals( "id" ) == false ); assertTrue( !cattr.getValue(0).equals( "value") == false ); assertTrue( cattr.getPrefix(0).equals("") == true ); assertTrue( cattr.getURI(0).equals("") == true ); enode = new XMLNode(triple); assertTrue( enode != null ); assertTrue( enode.getNumChildren() == 0 ); assertTrue( !enode.getName().equals( "test") == false ); assertTrue( enode.getPrefix().equals("") == true ); assertTrue( enode.getURI().equals("") == true ); assertTrue( enode.isElement() == true ); assertTrue( enode.isStart() == false ); assertTrue( enode.isEnd() == true ); assertTrue( enode.isText() == false ); tnode = new XMLNode(text); assertTrue( tnode != null ); assertTrue( !tnode.getCharacters().equals(text) == false ); assertTrue( tnode.getNumChildren() == 0 ); assertTrue( tnode.getName().equals("") == true ); assertTrue( tnode.getPrefix().equals("") == true ); assertTrue( tnode.getURI().equals("") == true ); assertTrue( tnode.isElement() == false ); assertTrue( tnode.isStart() == false ); assertTrue( tnode.isEnd() == false ); assertTrue( tnode.isText() == true ); triple = null; attr = null; snode = null; enode = null; tnode = null; } public void test_XMLNode_createFromToken() { XMLToken token; XMLTriple triple; XMLNode node; triple = new XMLTriple("attr", "uri", "prefix"); token = new XMLToken(triple); node = new XMLNode(token); assertTrue( node != null ); assertTrue( node.getNumChildren() == 0 ); assertTrue( !node.getName().equals( "attr") == false ); assertTrue( !node.getPrefix().equals( "prefix") == false ); assertTrue( !node.getURI().equals( "uri") == false ); assertTrue( node.getChild(1) != null ); token = null; triple = null; node = null; } public void test_XMLNode_getters() { XMLToken token; XMLNode node; XMLTriple triple; XMLAttributes attr; XMLNamespaces NS; NS = new XMLNamespaces(); NS.add( "http://test1.org/", "test1"); token = new XMLToken("This is a test"); node = new XMLNode(token); assertTrue( node != null ); assertTrue( node.getNumChildren() == 0 ); assertTrue( !node.getCharacters().equals( "This is a test") == false ); assertTrue( node.getChild(1) != null ); attr = new XMLAttributes(); assertTrue( attr != null ); attr.add( "attr2", "value"); triple = new XMLTriple("attr", "uri", "prefix"); token = new XMLToken(triple,attr); assertTrue( token != null ); node = new XMLNode(token); assertTrue( !node.getName().equals( "attr") == false ); assertTrue( !node.getURI().equals( "uri") == false ); assertTrue( !node.getPrefix().equals( "prefix") == false ); XMLAttributes returnattr = node.getAttributes(); assertTrue( !returnattr.getName(0).equals( "attr2") == false ); assertTrue( !returnattr.getValue(0).equals( "value") == false ); token = new XMLToken(triple,attr,NS); node = new XMLNode(token); XMLNamespaces returnNS = node.getNamespaces(); assertTrue( returnNS.getLength() == 1 ); assertTrue( returnNS.isEmpty() == false ); triple = null; token = null; node = null; } public void test_XMLNode_insert() { XMLAttributes attr = new XMLAttributes(); XMLTriple trp_p = new XMLTriple("parent","",""); XMLTriple trp_c1 = new XMLTriple("child1","",""); XMLTriple trp_c2 = new XMLTriple("child2","",""); XMLTriple trp_c3 = new XMLTriple("child3","",""); XMLTriple trp_c4 = new XMLTriple("child4","",""); XMLTriple trp_c5 = new XMLTriple("child5","",""); XMLNode p = new XMLNode(trp_p,attr); XMLNode c1 = new XMLNode(trp_c1,attr); XMLNode c2 = new XMLNode(trp_c2,attr); XMLNode c3 = new XMLNode(trp_c3,attr); XMLNode c4 = new XMLNode(trp_c4,attr); XMLNode c5 = new XMLNode(trp_c5,attr); p.addChild(c2); p.addChild(c4); p.insertChild(0,c1); p.insertChild(2,c3); p.insertChild(4,c5); assertTrue( p.getNumChildren() == 5 ); assertTrue( !p.getChild(0).getName().equals( "child1") == false ); assertTrue( !p.getChild(1).getName().equals( "child2") == false ); assertTrue( !p.getChild(2).getName().equals( "child3") == false ); assertTrue( !p.getChild(3).getName().equals( "child4") == false ); assertTrue( !p.getChild(4).getName().equals( "child5") == false ); p.removeChildren(); p.insertChild(0,c1); p.insertChild(0,c2); p.insertChild(0,c3); p.insertChild(0,c4); p.insertChild(0,c5); assertTrue( p.getNumChildren() == 5 ); assertTrue( !p.getChild(0).getName().equals( "child5") == false ); assertTrue( !p.getChild(1).getName().equals( "child4") == false ); assertTrue( !p.getChild(2).getName().equals( "child3") == false ); assertTrue( !p.getChild(3).getName().equals( "child2") == false ); assertTrue( !p.getChild(4).getName().equals( "child1") == false ); p.removeChildren(); p.insertChild(1,c1); p.insertChild(2,c2); p.insertChild(3,c3); p.insertChild(4,c4); p.insertChild(5,c5); assertTrue( p.getNumChildren() == 5 ); assertTrue( !p.getChild(0).getName().equals( "child1") == false ); assertTrue( !p.getChild(1).getName().equals( "child2") == false ); assertTrue( !p.getChild(2).getName().equals( "child3") == false ); assertTrue( !p.getChild(3).getName().equals( "child4") == false ); assertTrue( !p.getChild(4).getName().equals( "child5") == false ); p.removeChildren(); XMLNode tmp; tmp = p.insertChild(0,c1); assertTrue( !tmp.getName().equals("child1") == false ); tmp = p.insertChild(0,c2); assertTrue( !tmp.getName().equals("child2") == false ); tmp = p.insertChild(0,c3); assertTrue( !tmp.getName().equals("child3") == false ); tmp = p.insertChild(0,c4); assertTrue( !tmp.getName().equals("child4") == false ); tmp = p.insertChild(0,c5); assertTrue( !tmp.getName().equals("child5") == false ); p.removeChildren(); tmp = p.insertChild(1,c1); assertTrue( !tmp.getName().equals("child1") == false ); tmp = p.insertChild(2,c2); assertTrue( !tmp.getName().equals("child2") == false ); tmp = p.insertChild(3,c3); assertTrue( !tmp.getName().equals("child3") == false ); tmp = p.insertChild(4,c4); assertTrue( !tmp.getName().equals("child4") == false ); tmp = p.insertChild(5,c5); assertTrue( !tmp.getName().equals("child5") == false ); p = null; c1 = null; c2 = null; c3 = null; c4 = null; c5 = null; attr = null; trp_p = null; trp_c1 = null; trp_c2 = null; trp_c3 = null; trp_c4 = null; trp_c5 = null; } public void test_XMLNode_namespace_add() { XMLTriple triple = new XMLTriple("test","",""); XMLAttributes attr = new XMLAttributes(); XMLNode node = new XMLNode(triple,attr); assertTrue( node.getNamespacesLength() == 0 ); assertTrue( node.isNamespacesEmpty() == true ); node.addNamespace( "http://test1.org/", "test1"); assertTrue( node.getNamespacesLength() == 1 ); assertTrue( node.isNamespacesEmpty() == false ); node.addNamespace( "http://test2.org/", "test2"); assertTrue( node.getNamespacesLength() == 2 ); assertTrue( node.isNamespacesEmpty() == false ); node.addNamespace( "http://test1.org/", "test1a"); assertTrue( node.getNamespacesLength() == 3 ); assertTrue( node.isNamespacesEmpty() == false ); node.addNamespace( "http://test1.org/", "test1a"); assertTrue( node.getNamespacesLength() == 3 ); assertTrue( node.isNamespacesEmpty() == false ); assertTrue( ! (node.getNamespaceIndex( "http://test1.org/") == -1) ); node = null; triple = null; attr = null; } public void test_XMLNode_namespace_get() { XMLTriple triple = new XMLTriple("test","",""); XMLAttributes attr = new XMLAttributes(); XMLNode node = new XMLNode(triple,attr); node.addNamespace( "http://test1.org/", "test1"); node.addNamespace( "http://test2.org/", "test2"); node.addNamespace( "http://test3.org/", "test3"); node.addNamespace( "http://test4.org/", "test4"); node.addNamespace( "http://test5.org/", "test5"); node.addNamespace( "http://test6.org/", "test6"); node.addNamespace( "http://test7.org/", "test7"); node.addNamespace( "http://test8.org/", "test8"); node.addNamespace( "http://test9.org/", "test9"); assertTrue( node.getNamespacesLength() == 9 ); assertTrue( node.getNamespaceIndex( "http://test1.org/") == 0 ); assertTrue( !node.getNamespacePrefix(1).equals( "test2") == false ); assertTrue( !node.getNamespacePrefix( "http://test1.org/").equals( "test1") == false ); assertTrue( !node.getNamespaceURI(1).equals( "http://test2.org/") == false ); assertTrue( !node.getNamespaceURI( "test2").equals( "http://test2.org/") == false ); assertTrue( node.getNamespaceIndex( "http://test1.org/") == 0 ); assertTrue( node.getNamespaceIndex( "http://test2.org/") == 1 ); assertTrue( node.getNamespaceIndex( "http://test5.org/") == 4 ); assertTrue( node.getNamespaceIndex( "http://test9.org/") == 8 ); assertTrue( node.getNamespaceIndex( "http://testX.org/") == -1 ); assertTrue( node.hasNamespaceURI( "http://test1.org/") != false ); assertTrue( node.hasNamespaceURI( "http://test2.org/") != false ); assertTrue( node.hasNamespaceURI( "http://test5.org/") != false ); assertTrue( node.hasNamespaceURI( "http://test9.org/") != false ); assertTrue( node.hasNamespaceURI( "http://testX.org/") == false ); assertTrue( node.getNamespaceIndexByPrefix( "test1") == 0 ); assertTrue( node.getNamespaceIndexByPrefix( "test5") == 4 ); assertTrue( node.getNamespaceIndexByPrefix( "test9") == 8 ); assertTrue( node.getNamespaceIndexByPrefix( "testX") == -1 ); assertTrue( node.hasNamespacePrefix( "test1") != false ); assertTrue( node.hasNamespacePrefix( "test5") != false ); assertTrue( node.hasNamespacePrefix( "test9") != false ); assertTrue( node.hasNamespacePrefix( "testX") == false ); assertTrue( node.hasNamespaceNS( "http://test1.org/", "test1") != false ); assertTrue( node.hasNamespaceNS( "http://test5.org/", "test5") != false ); assertTrue( node.hasNamespaceNS( "http://test9.org/", "test9") != false ); assertTrue( node.hasNamespaceNS( "http://testX.org/", "testX") == false ); node = null; triple = null; attr = null; } public void test_XMLNode_namespace_remove() { XMLTriple triple = new XMLTriple("test","",""); XMLAttributes attr = new XMLAttributes(); XMLNode node = new XMLNode(triple,attr); node.addNamespace( "http://test1.org/", "test1"); node.addNamespace( "http://test2.org/", "test2"); node.addNamespace( "http://test3.org/", "test3"); node.addNamespace( "http://test4.org/", "test4"); node.addNamespace( "http://test5.org/", "test5"); assertTrue( node.getNamespacesLength() == 5 ); node.removeNamespace(4); assertTrue( node.getNamespacesLength() == 4 ); node.removeNamespace(3); assertTrue( node.getNamespacesLength() == 3 ); node.removeNamespace(2); assertTrue( node.getNamespacesLength() == 2 ); node.removeNamespace(1); assertTrue( node.getNamespacesLength() == 1 ); node.removeNamespace(0); assertTrue( node.getNamespacesLength() == 0 ); node.addNamespace( "http://test1.org/", "test1"); node.addNamespace( "http://test2.org/", "test2"); node.addNamespace( "http://test3.org/", "test3"); node.addNamespace( "http://test4.org/", "test4"); node.addNamespace( "http://test5.org/", "test5"); assertTrue( node.getNamespacesLength() == 5 ); node.removeNamespace(0); assertTrue( node.getNamespacesLength() == 4 ); node.removeNamespace(0); assertTrue( node.getNamespacesLength() == 3 ); node.removeNamespace(0); assertTrue( node.getNamespacesLength() == 2 ); node.removeNamespace(0); assertTrue( node.getNamespacesLength() == 1 ); node.removeNamespace(0); assertTrue( node.getNamespacesLength() == 0 ); node = null; triple = null; attr = null; } public void test_XMLNode_namespace_remove_by_prefix() { XMLTriple triple = new XMLTriple("test","",""); XMLAttributes attr = new XMLAttributes(); XMLNode node = new XMLNode(triple,attr); node.addNamespace( "http://test1.org/", "test1"); node.addNamespace( "http://test2.org/", "test2"); node.addNamespace( "http://test3.org/", "test3"); node.addNamespace( "http://test4.org/", "test4"); node.addNamespace( "http://test5.org/", "test5"); assertTrue( node.getNamespacesLength() == 5 ); node.removeNamespace( "test1"); assertTrue( node.getNamespacesLength() == 4 ); node.removeNamespace( "test2"); assertTrue( node.getNamespacesLength() == 3 ); node.removeNamespace( "test3"); assertTrue( node.getNamespacesLength() == 2 ); node.removeNamespace( "test4"); assertTrue( node.getNamespacesLength() == 1 ); node.removeNamespace( "test5"); assertTrue( node.getNamespacesLength() == 0 ); node.addNamespace( "http://test1.org/", "test1"); node.addNamespace( "http://test2.org/", "test2"); node.addNamespace( "http://test3.org/", "test3"); node.addNamespace( "http://test4.org/", "test4"); node.addNamespace( "http://test5.org/", "test5"); assertTrue( node.getNamespacesLength() == 5 ); node.removeNamespace( "test5"); assertTrue( node.getNamespacesLength() == 4 ); node.removeNamespace( "test4"); assertTrue( node.getNamespacesLength() == 3 ); node.removeNamespace( "test3"); assertTrue( node.getNamespacesLength() == 2 ); node.removeNamespace( "test2"); assertTrue( node.getNamespacesLength() == 1 ); node.removeNamespace( "test1"); assertTrue( node.getNamespacesLength() == 0 ); node.addNamespace( "http://test1.org/", "test1"); node.addNamespace( "http://test2.org/", "test2"); node.addNamespace( "http://test3.org/", "test3"); node.addNamespace( "http://test4.org/", "test4"); node.addNamespace( "http://test5.org/", "test5"); assertTrue( node.getNamespacesLength() == 5 ); node.removeNamespace( "test3"); assertTrue( node.getNamespacesLength() == 4 ); node.removeNamespace( "test1"); assertTrue( node.getNamespacesLength() == 3 ); node.removeNamespace( "test4"); assertTrue( node.getNamespacesLength() == 2 ); node.removeNamespace( "test5"); assertTrue( node.getNamespacesLength() == 1 ); node.removeNamespace( "test2"); assertTrue( node.getNamespacesLength() == 0 ); node = null; triple = null; attr = null; } public void test_XMLNode_namespace_set_clear() { XMLTriple triple = new XMLTriple("test","",""); XMLAttributes attr = new XMLAttributes(); XMLNode node = new XMLNode(triple,attr); XMLNamespaces ns = new XMLNamespaces(); assertTrue( node.getNamespacesLength() == 0 ); assertTrue( node.isNamespacesEmpty() == true ); ns.add( "http://test1.org/", "test1"); ns.add( "http://test2.org/", "test2"); ns.add( "http://test3.org/", "test3"); ns.add( "http://test4.org/", "test4"); ns.add( "http://test5.org/", "test5"); node.setNamespaces(ns); assertTrue( node.getNamespacesLength() == 5 ); assertTrue( node.isNamespacesEmpty() == false ); assertTrue( !node.getNamespacePrefix(0).equals( "test1") == false ); assertTrue( !node.getNamespacePrefix(1).equals( "test2") == false ); assertTrue( !node.getNamespacePrefix(2).equals( "test3") == false ); assertTrue( !node.getNamespacePrefix(3).equals( "test4") == false ); assertTrue( !node.getNamespacePrefix(4).equals( "test5") == false ); assertTrue( !node.getNamespaceURI(0).equals( "http://test1.org/") == false ); assertTrue( !node.getNamespaceURI(1).equals( "http://test2.org/") == false ); assertTrue( !node.getNamespaceURI(2).equals( "http://test3.org/") == false ); assertTrue( !node.getNamespaceURI(3).equals( "http://test4.org/") == false ); assertTrue( !node.getNamespaceURI(4).equals( "http://test5.org/") == false ); node.clearNamespaces(); assertTrue( node.getNamespacesLength() == 0 ); assertTrue( node.isAttributesEmpty() != false ); ns = null; node = null; triple = null; attr = null; } public void test_XMLNode_remove() { XMLAttributes attr = new XMLAttributes(); XMLTriple trp_p = new XMLTriple("parent","",""); XMLTriple trp_c1 = new XMLTriple("child1","",""); XMLTriple trp_c2 = new XMLTriple("child2","",""); XMLTriple trp_c3 = new XMLTriple("child3","",""); XMLTriple trp_c4 = new XMLTriple("child4","",""); XMLTriple trp_c5 = new XMLTriple("child5","",""); XMLNode p = new XMLNode(trp_p,attr); XMLNode c1 = new XMLNode(trp_c1,attr); XMLNode c2 = new XMLNode(trp_c2,attr); XMLNode c3 = new XMLNode(trp_c3,attr); XMLNode c4 = new XMLNode(trp_c4,attr); XMLNode c5 = new XMLNode(trp_c5,attr); XMLNode r; p.addChild(c1); p.addChild(c2); p.addChild(c3); p.addChild(c4); p.addChild(c5); r = p.removeChild(5); assertTrue( r == null ); r = p.removeChild(1); assertTrue( p.getNumChildren() == 4 ); assertTrue( !r.getName().equals("child2") == false ); r = null; r = p.removeChild(3); assertTrue( p.getNumChildren() == 3 ); assertTrue( !r.getName().equals("child5") == false ); r = null; r = p.removeChild(0); assertTrue( p.getNumChildren() == 2 ); assertTrue( !r.getName().equals("child1") == false ); r = null; r = p.removeChild(1); assertTrue( p.getNumChildren() == 1 ); assertTrue( !r.getName().equals("child4") == false ); r = null; r = p.removeChild(0); assertTrue( p.getNumChildren() == 0 ); assertTrue( !r.getName().equals("child3") == false ); r = null; p.addChild(c1); p.addChild(c2); p.addChild(c3); p.addChild(c4); p.addChild(c5); r = p.removeChild(4); assertTrue( p.getNumChildren() == 4 ); assertTrue( !r.getName().equals("child5") == false ); r = null; r = p.removeChild(3); assertTrue( p.getNumChildren() == 3 ); assertTrue( !r.getName().equals("child4") == false ); r = null; r = p.removeChild(2); assertTrue( p.getNumChildren() == 2 ); assertTrue( !r.getName().equals("child3") == false ); r = null; r = p.removeChild(1); assertTrue( p.getNumChildren() == 1 ); assertTrue( !r.getName().equals("child2") == false ); r = null; r = p.removeChild(0); assertTrue( p.getNumChildren() == 0 ); assertTrue( !r.getName().equals("child1") == false ); r = null; p.addChild(c1); p.addChild(c2); p.addChild(c3); p.addChild(c4); p.addChild(c5); r = p.removeChild(0); assertTrue( p.getNumChildren() == 4 ); assertTrue( !r.getName().equals("child1") == false ); r = null; r = p.removeChild(0); assertTrue( p.getNumChildren() == 3 ); assertTrue( !r.getName().equals("child2") == false ); r = null; r = p.removeChild(0); assertTrue( p.getNumChildren() == 2 ); assertTrue( !r.getName().equals("child3") == false ); r = null; r = p.removeChild(0); assertTrue( p.getNumChildren() == 1 ); assertTrue( !r.getName().equals("child4") == false ); r = null; r = p.removeChild(0); assertTrue( p.getNumChildren() == 0 ); assertTrue( !r.getName().equals("child5") == false ); r = null; p.addChild(c1); p.addChild(c2); p.addChild(c3); p.addChild(c4); p.addChild(c5); r = p.removeChild(0); assertTrue( !r.getName().equals("child1") == false ); p.insertChild(0,r); assertTrue( p.getNumChildren() == 5 ); assertTrue( !p.getChild(0).getName().equals("child1") == false ); r = null; r = p.removeChild(1); assertTrue( !r.getName().equals("child2") == false ); p.insertChild(1,r); assertTrue( p.getNumChildren() == 5 ); assertTrue( !p.getChild(1).getName().equals("child2") == false ); r = null; r = p.removeChild(2); assertTrue( !r.getName().equals("child3") == false ); p.insertChild(2,r); assertTrue( p.getNumChildren() == 5 ); assertTrue( !p.getChild(2).getName().equals("child3") == false ); r = null; r = p.removeChild(3); assertTrue( !r.getName().equals("child4") == false ); p.insertChild(3,r); assertTrue( p.getNumChildren() == 5 ); assertTrue( !p.getChild(3).getName().equals("child4") == false ); r = null; r = p.removeChild(4); assertTrue( !r.getName().equals("child5") == false ); p.insertChild(4,r); assertTrue( p.getNumChildren() == 5 ); assertTrue( !p.getChild(4).getName().equals("child5") == false ); r = null; p = null; c1 = null; c2 = null; c3 = null; c4 = null; c5 = null; attr = null; trp_p = null; trp_c1 = null; trp_c2 = null; trp_c3 = null; trp_c4 = null; trp_c5 = null; } /** * Loads the SWIG-generated libSBML Java module when this class is * loaded, or reports a sensible diagnostic message about why it failed. */ static { String varname; String shlibname; if (System.getProperty("mrj.version") != null) { varname = "DYLD_LIBRARY_PATH"; // We're on a Mac. shlibname = "libsbmlj.jnilib and/or libsbml.dylib"; } else { varname = "LD_LIBRARY_PATH"; // We're not on a Mac. shlibname = "libsbmlj.so and/or libsbml.so"; } try { System.loadLibrary("sbmlj"); // For extra safety, check that the jar file is in the classpath. Class.forName("org.sbml.libsbml.libsbml"); } catch (SecurityException e) { e.printStackTrace(); System.err.println("Could not load the libSBML library files due to a"+ " security exception.\n"); System.exit(1); } catch (UnsatisfiedLinkError e) { e.printStackTrace(); System.err.println("Error: could not link with the libSBML library files."+ " It is likely\nyour " + varname + " environment variable does not include the directories\n"+ "containing the " + shlibname + " library files.\n"); System.exit(1); } catch (ClassNotFoundException e) { e.printStackTrace(); System.err.println("Error: unable to load the file libsbmlj.jar."+ " It is likely\nyour -classpath option and CLASSPATH" + " environment variable\n"+ "do not include the path to libsbmlj.jar.\n"); System.exit(1); } } }<|fim▁end|>
<|file_name|>cwEdgeTile.cpp<|end_file_name|><|fim▁begin|>/************************************************************************** ** ** Copyright (C) 2013 by Philip Schuchardt ** www.cavewhere.com ** **************************************************************************/ #include "cwEdgeTile.h" //Qt includes #include <QDebug> cwEdgeTile::cwEdgeTile() { } void cwEdgeTile::generate() { generateVertex(); generateIndexes(); } void cwEdgeTile::generateIndexes() { QVector<unsigned int> tempIndexes; //Create all the geometry for normal geometry int numVertices = numVerticesOnADimension(); for(int row = 0; row < numVertices - 1; row++) { for(int column = 1; column < numVertices - 1; column++) { //Triangle 1 tempIndexes.append(indexOf(column, row)); tempIndexes.append(indexOf(column + 1, row + 1)); tempIndexes.append(indexOf(column, row + 1)); //Triangle 2 tempIndexes.append(indexOf(column, row)); tempIndexes.append(indexOf(column + 1, row)); tempIndexes.append(indexOf(column + 1, row + 1)); } } unsigned int largestInt = indexOf(numVertices - 1, numVertices - 1) + 1; unsigned int halfIndex1 = largestInt; unsigned int halfIndex2 = halfIndex1 + 1; //Create the geometry for the first column for(int row = 0; row < tileSize(); row++) { unsigned int bottomLeft = indexOf(0, row); unsigned int bottomRight = indexOf(1, row); unsigned int topLeft = indexOf(0, row + 1); unsigned int topRight = indexOf(1, row + 1); //Triangle 1 tempIndexes.append(bottomLeft); tempIndexes.append(bottomRight); tempIndexes.append(halfIndex2); //Triangle 2 tempIndexes.append(bottomRight); tempIndexes.append(topRight); tempIndexes.append(halfIndex2); //Triangle 3 tempIndexes.append(topRight); tempIndexes.append(topLeft); tempIndexes.append(halfIndex2); //Triangle 4 tempIndexes.append(halfIndex1); tempIndexes.append(halfIndex2); tempIndexes.append(topLeft); //Triangle 5 tempIndexes.append(bottomLeft); tempIndexes.append(halfIndex2); tempIndexes.append(halfIndex1); halfIndex1 += 2; halfIndex2 += 2; } Indexes.clear(); Indexes.reserve(tempIndexes.size()); Indexes.resize(tempIndexes.size()); //Can't get optimize faces working on windows Indexes = tempIndexes; // Forsyth::OptimizeFaces(tempIndexes.data(), tempIndexes.size(), // halfIndex2 - 1, // Indexes.data(), // 24); } void cwEdgeTile::generateVertex() { Vertices.clear(); //vertex spacing double spacing = 1.0 / (double)tileSize(); int numVertexes = numVerticesOnADimension(); int totalSize = numVertexes * numVertexes; Vertices.reserve(totalSize); //Create the regualar mesh points for(int y = 0; y < numVertexes; y++) { for(int x = 0; x < numVertexes; x++) { float xPos = x * spacing; float yPos = y * spacing; QVector2D vertex(xPos, yPos); Vertices.append(vertex); } } //Add the points for the half spacing double halfSpacing = spacing / 2.0; for(int row = 0; row < tileSize(); row++) {<|fim▁hole|> Vertices.append(QVector2D(halfSpacing, yPos)); } }<|fim▁end|>
float yPos = row * spacing + halfSpacing; Vertices.append(QVector2D(0.0, yPos));
<|file_name|>test_draft_registration_institutions_list.py<|end_file_name|><|fim▁begin|>import pytest from api.base.settings.defaults import API_BASE from api_tests.nodes.views.test_node_institutions_list import TestNodeInstitutionList from osf_tests.factories import DraftRegistrationFactory, AuthUserFactory @pytest.fixture() def user(): return AuthUserFactory() @pytest.fixture() def user_two(): return AuthUserFactory() @pytest.mark.django_db class TestDraftRegistrationInstitutionList(TestNodeInstitutionList): @pytest.fixture() def node_one(self, institution, user): # Overrides TestNodeInstitutionList draft = DraftRegistrationFactory(initiator=user) draft.affiliated_institutions.add(institution) draft.save() return draft @pytest.fixture() def node_two(self, user): # Overrides TestNodeInstitutionList return DraftRegistrationFactory(initiator=user) <|fim▁hole|> return '/{}draft_registrations/{}/institutions/'.format(API_BASE, node_one._id) @pytest.fixture() def node_two_url(self, node_two): # Overrides TestNodeInstitutionList return '/{}draft_registrations/{}/institutions/'.format(API_BASE, node_two._id) # Overrides TestNodeInstitutionList def test_node_institution_detail( self, app, user, user_two, institution, node_one, node_two, node_one_url, node_two_url, ): # test_return_institution_unauthenticated res = app.get(node_one_url, expect_errors=True) assert res.status_code == 401 # test_return institution_contrib res = app.get(node_one_url, auth=user.auth) assert res.status_code == 200 assert res.json['data'][0]['attributes']['name'] == institution.name assert res.json['data'][0]['id'] == institution._id # test_return_no_institution res = app.get( node_two_url, auth=user.auth, ) assert res.status_code == 200 assert len(res.json['data']) == 0 # test non contrib res = app.get( node_one_url, auth=user_two.auth, expect_errors=True ) assert res.status_code == 403<|fim▁end|>
@pytest.fixture() def node_one_url(self, node_one): # Overrides TestNodeInstitutionList
<|file_name|>learn_svd.py<|end_file_name|><|fim▁begin|>import os,sys,cv2,pdb from sklearn.decomposition import TruncatedSVD from pylab import * def get_feature(imgpath): img = cv2.imread(imgpath,0) img = cv2.resize(img,(32,64)) img = np.float32(img) img = img / 255 img = np.reshape(img, (1,32*64)) return img def extract_sample_from_image(imgdir): feats = [] for rdir, pdir, names in os.walk(imgdir+'pos'): for name in names: sname,ext = os.path.splitext(name) if 0 == cmp(ext, '.jpg'): fname = os.path.join(rdir, name) feats.append(get_feature(fname)) for rdir, pdir, names in os.walk(imgdir+'neg'): for name in names: sname,ext = os.path.splitext(name) if 0 == cmp(ext, '.jpg'): fname = os.path.join(rdir, name) feats.append(get_feature(fname)) sample_num = len(feats) sample_size = feats[0].shape[1] samples = np.zeros((sample_num, sample_size)) for k in range(sample_num): samples[k,:] = feats[k] print 'samples ', samples.shape[0], samples.shape[1] return samples def run_svd(samples): svd = TruncatedSVD(2) svd.fit(samples) newsamples = svd.transform(samples) return (svd, newsamples) def show_svd(transformed): sample_num = transformed.shape[0] for k in range(sample_num): if k*2<sample_num:<|fim▁hole|> else: mark = 'bo' x,y = (transformed[k,0], transformed[k,1]) plot(x,y,mark) show() if __name__=="__main__": samples = extract_sample_from_image('img/') svd, transformed = run_svd(samples) show_svd(transformed)<|fim▁end|>
mark = 'rx'
<|file_name|>strings.js<|end_file_name|><|fim▁begin|>/////////////////////////////////////////////////////////////////////////// // Copyright © Esri. All Rights Reserved. // // Licensed under the Apache License Version 2.0 (the 'License'); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an 'AS IS' BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /////////////////////////////////////////////////////////////////////////// define({ "configText": "Ustaw tekst konfiguracyjny:", "generalSettings": { "tabTitle": "Ustawienia ogólne", "measurementUnitLabel": "Jednostka miary", "currencyLabel": "Symbol miary", "roundCostLabel": "Zaokrąglaj koszt", "projectOutputSettings": "Ustawienia wynikowe projektu", "typeOfProjectAreaLabel": "Typ obszaru projektu", "bufferDistanceLabel": "Odległość buforowania", "roundCostValues": { "twoDecimalPoint": "Dwa miejsca po przecinku", "nearestWholeNumber": "Najbliższa liczba całkowita", "nearestTen": "Najbliższa dziesiątka", "nearestHundred": "Najbliższa setka", "nearestThousand": "Najbliższe tysiące", "nearestTenThousands": "Najbliższe dziesiątki tysięcy" }, "projectAreaType": { "outline": "Obrys", "buffer": "Bufor" }, "errorMessages": { "currency": "Nieprawidłowa jednostka waluty", "bufferDistance": "Nieprawidłowa odległość buforowania", "outOfRangebufferDistance": "Wartość powinna być większa niż 0 i mniejsza niż lub równa 100" } }, "projectSettings": { "tabTitle": "Ustawienia projektu", "costingGeometrySectionTitle": "Zdefiniuj obszar geograficzny na potrzeby kalkulacji kosztów (opcjonalnie)", "costingGeometrySectionNote": "Uwaga: skonfigurowanie tej warstwy umożliwi użytkownikowi konfigurowanie równań kosztów szablonów obiektów na podstawie obszarów geograficznych.", "projectTableSectionTitle": "Możliwość zapisania/wczytania ustawień projektu (opcjonalnie)", "projectTableSectionNote": "Uwaga: skonfigurowanie wszystkich tabel i warstw umożliwi użytkownikowi zapisanie/wczytanie projektu w celu ponownego wykorzystania.", "costingGeometryLayerLabel": "Warstwa geometrii kalkulacji kosztów", "fieldLabelGeography": "Pole do oznaczenia etykietą obszaru geograficznego", "projectAssetsTableLabel": "Tabela zasobów projektu", "projectMultiplierTableLabel": "Tabela kosztów dodatkowych mnożnika projektu", "projectLayerLabel": "Warstwa projektu", "configureFieldsLabel": "Skonfiguruj pola", "fieldDescriptionHeaderTitle": "Opis pola", "layerFieldsHeaderTitle": "Pole warstwy", "selectLabel": "Zaznacz", "errorMessages": { "duplicateLayerSelection": "Warstwa ${layerName} jest już wybrana", "invalidConfiguration": "Należy wybrać wartość ${fieldsString}" }, "costingGeometryHelp": "<p>Zostaną wyświetlone warstwy poligonowe z następującymi warunkami: <br/> <li>\tWarstwa musi mieć możliwość wykonywania zapytania</li><li>\tWarstwa musi zawierać pole GlobalID</li></p>", "fieldToLabelGeographyHelp": "<p>Pola znakowe i liczbowe wybranej warstwy geometrii kalkulacji kosztów zostaną wyświetlone w menu rozwijanym Pole do oznaczenia etykietą obszaru geograficznego.</p>", "projectAssetsTableHelp": "<p>Zostaną wyświetlone tabele z następującymi warunkami: <br/> <li>Tabela musi mieć możliwości edycji, czyli tworzenia, usuwania i aktualizacji</li> <li>Tabela musi zawierać sześć pól o dokładnie takich nazwach i typach danych:</li><ul><li>\tAssetGUID (pole typu GUID)</li><li>\tCostEquation (pole typu String)</li><li>\tScenario (pole typu String)</li><li>\tTemplateName (pole typu String)</li><li> GeographyGUID (pole typu GUID)</li><li>\tProjectGUID (pole typu GUID)</li></ul> </p>", "projectMultiplierTableHelp": "<p>Zostaną wyświetlone tabele z następującymi warunkami: <br/> <li>Tabela musi mieć możliwości edycji, czyli tworzenia, usuwania i aktualizacji</li> <li>Tabela musi zawierać pięć pól o dokładnie takich nazwach i typach danych:</li><ul><li>\tDescription (pole typu String)</li><li>\tType (pole typu String)</li><li>\tValue (pole typu Float/Double)</li><li>\tCostindex (pole typu Integer)</li><li> \tProjectGUID (pole typu GUID))</li></ul> </p>", "projectLayerHelp": "<p>Zostaną wyświetlone warstwy poligonowe z następującymi warunkami: <br/> <li>Warstwa musi mieć możliwości edycji, czyli tworzenia, usuwania i aktualizacji</li> <li>Warstwa musi zawierać pięć pól o dokładnie takich nazwach i typach danych:</li><ul><li>ProjectName (pole typu String)</li><li>Description (pole typu String)</li><li>Totalassetcost (pole typu Float/Double)</li><li>Grossprojectcost (pole typu Float/Double)</li><li>GlobalID (pole typu GlobalID)</li></ul> </p>", "pointLayerCentroidLabel": "Centroid warstwy punktowej", "selectRelatedPointLayerDefaultOption": "Zaznacz", "pointLayerHintText": "<p>Zostaną wyświetlone warstwy punktowe z następującymi warunkami: <br/> <li>\tWarstwa musi mieć pole 'Projectid' (typ GUID)</li><li>\tWarstwa musi mieć możliwość edycji, a więc atrybut 'Tworzenie', 'Usuwanie' i 'Aktualizacja'</li></p>" }, "layerSettings": { "tabTitle": "Ustawienia warstwy", "layerNameHeaderTitle": "Nazwa warstwy", "layerNameHeaderTooltip": "Lista warstw na mapie", "EditableLayerHeaderTitle": "Edytowalne", "EditableLayerHeaderTooltip": "Dołącz warstwę i jej szablony w widżecie kalkulacji kosztów", "SelectableLayerHeaderTitle": "Podlegające selekcji", "SelectableLayerHeaderTooltip": "Geometria z obiektu może zostać użyta do wygenerowania nowego elementu kosztu", "fieldPickerHeaderTitle": "ID projektu (opcjonalnie)", "fieldPickerHeaderTooltip": "Pole opcjonalne (typu znakowego), w którym będzie przechowywany identyfikator projektu", "selectLabel": "Zaznacz", "noAssetLayersAvailable": "Nie znaleziono warstwy zasobów na wybranej mapie internetowej", "disableEditableCheckboxTooltip": "Ta warstwa nie ma możliwości edycji", "missingCapabilitiesMsg": "Dla tej warstwy brak następujących funkcji:", "missingGlobalIdMsg": "Ta warstwa nie ma pola GlobalId", "create": "Tworzenie", "update": "Aktualizuj", "delete": "Usuwanie", "attributeSettingHeaderTitle": "Ustawienia atrybutów", "addFieldLabelTitle": "Dodaj atrybuty", "layerAttributesHeaderTitle": "Atrybuty warstwy", "projectLayerAttributesHeaderTitle": "Atrybuty warstwy projektu",<|fim▁hole|> "costingInfo": { "tabTitle": "Informacje o kalkulacji kosztów", "proposedMainsLabel": "Proponowane elementy główne", "addCostingTemplateLabel": "Dodaj szablon kalkulacji kosztów", "manageScenariosTitle": "Zarządzaj scenariuszami", "featureTemplateTitle": "Szablon obiektu", "costEquationTitle": "Równanie kosztów", "geographyTitle": "Obszar geograficzny", "scenarioTitle": "Scenariusz", "actionTitle": "Działania", "scenarioNameLabel": "Nazwa scenariusza", "addBtnLabel": "Dodaj", "srNoLabel": "Nie.", "deleteLabel": "Usuwanie", "duplicateScenarioName": "Duplikuj nazwę scenariusza", "hintText": "<div>Wskazówka: użyj następujących słów kluczowych</div><ul><li><b>{TOTALCOUNT}</b>: używa łącznej liczby zasobów tego samego typu w obszarze geograficznym</li><li><b>{MEASURE}</b>: używa długości dla zasobu liniowego i pola powierzchni dla zasobu poligonowego</li><li><b>{TOTALMEASURE}</b>: używa łącznej długości dla zasobu liniowego i łącznego pola powierzchni dla zasobu poligonowego tego samego typu w obszarze geograficznym</li></ul> Możesz użyć funkcji, takich jak:<ul><li>Math.abs(-100)</li><li>Math.floor({TOTALMEASURE})</li></ul>Należy zmodyfikować równanie kosztów zgodnie z wymaganiami projektu.", "noneValue": "Brak", "requiredCostEquation": "Niepoprawne równanie kosztów dla warstwy ${layerName} : ${templateName}", "duplicateTemplateMessage": "Istnieje podwójny wpis szablonu dla warstwy ${layerName} : ${templateName}", "defaultEquationRequired": "Wymagane jest domyślne równanie dla warstwy ${layerName} : ${templateName}", "validCostEquationMessage": "Wprowadź prawidłowe równanie kosztów", "costEquationHelpText": "Edytuj równanie kosztów zgodnie z wymaganiami projektu", "scenarioHelpText": "Wybierz scenariusz zgodnie z wymaganiami projektu", "copyRowTitle": "Kopiuj wiersz", "noTemplateAvailable": "Dodaj co najmniej jeden szablon dla warstwy ${layerName}", "manageScenarioLabel": "Zarządzaj scenariuszem", "noLayerMessage": "Wprowadź co najmniej jedną warstwę w ${tabName}", "noEditableLayersAvailable": "Warstwy, które należy oznaczyć jako możliwe do edycji na karcie ustawień warstwy", "updateProjectCostCheckboxLabel": "Aktualizuj równania projektu", "updateProjectCostEquationHint": "Wskazówka: Umożliwia użytkownikowi aktualizowanie równań kosztów zasobów, które zostały już dodane do istniejących projektów, za pomocą nowych równań zdefiniowanych niżej na podstawie szablonu obiektu, geografii i scenariusza. Jeśli brak określonej kombinacji, zostanie przyjęty koszt domyślny, tzn. geografia i scenariusz ma wartość 'None' (brak). W przypadku usunięcia szablonu obiektu ustawiony zostanie koszt równy 0." }, "statisticsSettings": { "tabTitle": "Dodatkowe ustawienia", "addStatisticsLabel": "Dodaj statystykę", "fieldNameTitle": "Pole", "statisticsTitle": "Etykieta", "addNewStatisticsText": "Dodaj nową statystykę", "deleteStatisticsText": "Usuń statystykę", "moveStatisticsUpText": "Przesuń statystykę w górę", "moveStatisticsDownText": "Przesuń statystykę w dół", "selectDeselectAllTitle": "Zaznacz wszystkie" }, "projectCostSettings": { "addProjectCostLabel": "Dodaj koszt dodatkowy projektu", "additionalCostValueColumnHeader": "Wartość", "invalidProjectCostMessage": "Nieprawidłowa wartość kosztu projektu", "additionalCostLabelColumnHeader": "Etykieta", "additionalCostTypeColumnHeader": "Typ" }, "statisticsType": { "countLabel": "Liczba", "averageLabel": "Średnia", "maxLabel": "Maksimum", "minLabel": "Minimum", "summationLabel": "Zsumowanie", "areaLabel": "Pole powierzchni", "lengthLabel": "Długość" } });<|fim▁end|>
"attributeSettingsPopupTitle": "Ustawienia atrybutów warstwy" },
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use egl::egl::EGLContext; use egl::egl::EGLDisplay; use egl::egl::EGLSurface; use egl::egl::MakeCurrent; use egl::egl::SwapBuffers; use libc::{dup2, pipe, read}; use log::info; use log::warn; use rust_webvr::api::MagicLeapVRService; use servo::euclid::Scale; use servo::keyboard_types::Key; use servo::servo_url::ServoUrl; use servo::webrender_api::units::{DeviceIntRect, DevicePixel, DevicePoint, LayoutPixel}; use simpleservo::{self, deinit, gl_glue, MouseButton, ServoGlue, SERVO}; use simpleservo::{ Coordinates, EventLoopWaker, HostTrait, InitOptions, InputMethodType, PromptResult, VRInitOptions, }; use smallvec::SmallVec; use std::cell::Cell; use std::ffi::CStr; use std::ffi::CString; use std::io::Write; use std::os::raw::c_char; use std::os::raw::c_int; use std::os::raw::c_void; use std::rc::Rc; use std::thread; use std::time::Duration; use std::time::Instant; use webxr::magicleap::MagicLeapDiscovery; #[repr(u32)] pub enum MLLogLevel { Fatal = 0, Error = 1, Warning = 2, Info = 3, Debug = 4, Verbose = 5, } #[repr(C)] #[allow(non_camel_case_types)] pub enum MLKeyType { kNone, kCharacter, kBackspace, kShift, kSpeechToText, kPageEmoji, kPageLowerLetters, kPageNumericSymbols, kCancel, kSubmit, kPrevious, kNext, kClear, kClose, kEnter, kCustom1, kCustom2, kCustom3, kCustom4, kCustom5, } #[repr(transparent)] #[derive(Clone, Copy)] pub struct MLLogger(Option<extern "C" fn(MLLogLevel, *const c_char)>); #[repr(transparent)] pub struct MLHistoryUpdate(Option<extern "C" fn(MLApp, bool, bool)>); #[repr(transparent)] pub struct MLURLUpdate(Option<extern "C" fn(MLApp, *const c_char)>); #[repr(transparent)] pub struct MLKeyboard(Option<extern "C" fn(MLApp, bool)>); #[repr(transparent)] #[derive(Clone, Copy)] pub struct MLApp(*mut c_void); const LOG_LEVEL: log::LevelFilter = log::LevelFilter::Info; fn call<F, T>(f: F) -> Result<T, &'static str> where F: FnOnce(&mut ServoGlue) -> Result<T, &'static str>, { SERVO.with(|s| match s.borrow_mut().as_mut() { Some(ref mut s) => (f)(s), None => Err("Servo is not available in this thread"), }) } #[no_mangle] pub unsafe extern "C" fn init_servo( ctxt: EGLContext, surf: EGLSurface, disp: EGLDisplay, landscape: bool, app: MLApp, logger: MLLogger, history_update: MLHistoryUpdate, url_update: MLURLUpdate, keyboard: MLKeyboard, url: *const c_char, default_args: *const c_char, width: u32, height: u32, hidpi: f32, ) -> *mut ServoInstance { redirect_stdout_to_log(logger); let _ = log::set_boxed_logger(Box::new(logger)); log::set_max_level(LOG_LEVEL); let gl = gl_glue::egl::init().expect("EGL initialization failure"); let coordinates = Coordinates::new( 0, 0, width as i32, height as i32, width as i32, height as i32, ); let mut url = CStr::from_ptr(url).to_str().unwrap_or("about:blank"); // If the URL has a space in it, then treat everything before the space as arguments let args = if let Some(i) = url.rfind(' ') { let (front, back) = url.split_at(i); url = back; front.split(' ').map(|s| s.to_owned()).collect() } else if !default_args.is_null() { CStr::from_ptr(default_args) .to_str() .unwrap_or("") .split(' ') .map(|s| s.to_owned()) .collect() } else { Vec::new() }; info!("got args: {:?}", args); let vr_init = if !landscape { let name = String::from("Magic Leap VR Display"); let (service, heartbeat) = MagicLeapVRService::new(name, ctxt, gl.gl_wrapper.clone()) .expect("Failed to create VR service"); let service = Box::new(service); let heartbeat = Box::new(heartbeat); VRInitOptions::VRService(service, heartbeat) } else { VRInitOptions::None }; let xr_discovery: Option<Box<dyn webxr_api::Discovery>> = if !landscape { let discovery = MagicLeapDiscovery::new(ctxt, gl.gl_wrapper.clone()); Some(Box::new(discovery)) } else { None }; let opts = InitOptions { args, url: Some(url.to_string()), density: hidpi, enable_subpixel_text_antialiasing: false, vr_init, xr_discovery, coordinates, gl_context_pointer: Some(ctxt),<|fim▁hole|> let wakeup = Box::new(EventLoopWakerInstance); let shut_down_complete = Rc::new(Cell::new(false)); let callbacks = Box::new(HostCallbacks { app, ctxt, surf, disp, landscape, shut_down_complete: shut_down_complete.clone(), history_update, url_update, keyboard, }); info!("Starting servo"); simpleservo::init(opts, gl.gl_wrapper, wakeup, callbacks).expect("error initializing Servo"); let result = Box::new(ServoInstance { scroll_state: ScrollState::TriggerUp, scroll_scale: Scale::new(SCROLL_SCALE / hidpi), shut_down_complete, }); Box::into_raw(result) } #[no_mangle] pub unsafe extern "C" fn heartbeat_servo(_servo: *mut ServoInstance) { let _ = call(|s| s.perform_updates()); } #[no_mangle] pub unsafe extern "C" fn keyboard_servo( _servo: *mut ServoInstance, key_code: char, key_type: MLKeyType, ) { let key = match key_type { MLKeyType::kCharacter => Key::Character([key_code].iter().collect()), MLKeyType::kBackspace => Key::Backspace, MLKeyType::kEnter => Key::Enter, _ => return, }; // TODO: can the ML1 generate separate press and release events? let key2 = key.clone(); let _ = call(move |s| s.key_down(key2)); let _ = call(move |s| s.key_up(key)); } // Some magic numbers. // How far does the cursor have to move for it to count as a drag rather than a click? // (In device pixels squared, to avoid taking a sqrt when calculating move distance.) const DRAG_CUTOFF_SQUARED: f32 = 900.0; // How much should we scale scrolling by? const SCROLL_SCALE: f32 = 3.0; #[no_mangle] pub unsafe extern "C" fn move_servo(servo: *mut ServoInstance, x: f32, y: f32) { // Servo's cursor was moved if let Some(servo) = servo.as_mut() { let point = DevicePoint::new(x, y); match servo.scroll_state { ScrollState::TriggerUp => { servo.scroll_state = ScrollState::TriggerUp; let _ = call(|s| s.mouse_move(x, y)); }, ScrollState::TriggerDown(start) if (start - point).square_length() < DRAG_CUTOFF_SQUARED => { return; } ScrollState::TriggerDown(start) => { servo.scroll_state = ScrollState::TriggerDragging(start, point); let _ = call(|s| s.mouse_move(x, y)); let delta = (point - start) * servo.scroll_scale; let start = start.to_i32(); let _ = call(|s| s.scroll_start(delta.x, delta.y, start.x, start.y)); }, ScrollState::TriggerDragging(start, prev) => { servo.scroll_state = ScrollState::TriggerDragging(start, point); let _ = call(|s| s.mouse_move(x, y)); let delta = (point - prev) * servo.scroll_scale; let start = start.to_i32(); let _ = call(|s| s.scroll(delta.x, delta.y, start.x, start.y)); }, } } } #[no_mangle] pub unsafe extern "C" fn trigger_servo(servo: *mut ServoInstance, x: f32, y: f32, down: bool) { // Servo was triggered if let Some(servo) = servo.as_mut() { let point = DevicePoint::new(x, y); match servo.scroll_state { ScrollState::TriggerUp if down => { servo.scroll_state = ScrollState::TriggerDown(point); let _ = call(|s| s.mouse_down(x, y, MouseButton::Left)); }, ScrollState::TriggerDown(start) if !down => { servo.scroll_state = ScrollState::TriggerUp; let _ = call(|s| s.mouse_up(start.x, start.y, MouseButton::Left)); let _ = call(|s| s.click(start.x as f32, start.y as f32)); let _ = call(|s| s.mouse_move(start.x, start.y)); }, ScrollState::TriggerDragging(start, prev) if !down => { servo.scroll_state = ScrollState::TriggerUp; let delta = (point - prev) * servo.scroll_scale; let start = start.to_i32(); let _ = call(|s| s.scroll_end(delta.x, delta.y, start.x, start.y)); let _ = call(|s| s.mouse_up(x, y, MouseButton::Left)); }, _ => return, } } } #[no_mangle] pub unsafe extern "C" fn traverse_servo(_servo: *mut ServoInstance, delta: i32) { // Traverse the session history if delta == 0 { let _ = call(|s| s.reload()); } else if delta < 0 { let _ = call(|s| s.go_back()); } else { let _ = call(|s| s.go_forward()); } } #[no_mangle] pub unsafe extern "C" fn navigate_servo(_servo: *mut ServoInstance, text: *const c_char) { let text = CStr::from_ptr(text) .to_str() .expect("Failed to convert text to UTF-8"); let url = ServoUrl::parse(text).unwrap_or_else(|_| { let mut search = ServoUrl::parse("https://duckduckgo.com") .expect("Failed to parse search URL") .into_url(); search.query_pairs_mut().append_pair("q", text); ServoUrl::from_url(search) }); let _ = call(|s| s.load_uri(url.as_str())); } // Some magic numbers for shutdown const SHUTDOWN_DURATION: Duration = Duration::from_secs(10); const SHUTDOWN_POLL_INTERVAL: Duration = Duration::from_millis(100); #[no_mangle] pub unsafe extern "C" fn discard_servo(servo: *mut ServoInstance) { if let Some(servo) = servo.as_mut() { let servo = Box::from_raw(servo); let finish = Instant::now() + SHUTDOWN_DURATION; let _ = call(|s| s.request_shutdown()); while !servo.shut_down_complete.get() { let _ = call(|s| s.perform_updates()); if Instant::now() > finish { warn!("Incomplete shutdown."); } thread::sleep(SHUTDOWN_POLL_INTERVAL); } deinit(); } } struct HostCallbacks { ctxt: EGLContext, surf: EGLSurface, disp: EGLDisplay, landscape: bool, shut_down_complete: Rc<Cell<bool>>, history_update: MLHistoryUpdate, url_update: MLURLUpdate, app: MLApp, keyboard: MLKeyboard, } impl HostTrait for HostCallbacks { fn flush(&self) { // Immersive and landscape apps have different requirements for who calls SwapBuffers. if self.landscape { SwapBuffers(self.disp, self.surf); } } fn make_current(&self) { MakeCurrent(self.disp, self.surf, self.surf, self.ctxt); } fn prompt_alert(&self, message: String, _trusted: bool) { warn!("Prompt Alert: {}", message); } fn prompt_ok_cancel(&self, message: String, _trusted: bool) -> PromptResult { warn!("Prompt not implemented. Cancelled. {}", message); PromptResult::Secondary } fn prompt_yes_no(&self, message: String, _trusted: bool) -> PromptResult { warn!("Prompt not implemented. Cancelled. {}", message); PromptResult::Secondary } fn prompt_input(&self, message: String, default: String, _trusted: bool) -> Option<String> { warn!("Input prompt not implemented. {}", message); Some(default) } fn on_load_started(&self) {} fn on_load_ended(&self) {} fn on_title_changed(&self, _title: String) {} fn on_allow_navigation(&self, _url: String) -> bool { true } fn on_url_changed(&self, url: String) { if let Ok(cstr) = CString::new(url.as_str()) { if let Some(url_update) = self.url_update.0 { url_update(self.app, cstr.as_ptr()); } } } fn on_history_changed(&self, can_go_back: bool, can_go_forward: bool) { if let Some(history_update) = self.history_update.0 { history_update(self.app, can_go_back, can_go_forward); } } fn on_animating_changed(&self, _animating: bool) {} fn on_shutdown_complete(&self) { self.shut_down_complete.set(true); } fn on_ime_show( &self, _input_type: InputMethodType, _text: Option<String>, _bounds: DeviceIntRect, ) { if let Some(keyboard) = self.keyboard.0 { keyboard(self.app, true) } } fn on_ime_hide(&self) { if let Some(keyboard) = self.keyboard.0 { keyboard(self.app, false) } } fn get_clipboard_contents(&self) -> Option<String> { None } fn set_clipboard_contents(&self, _contents: String) {} fn on_devtools_started(&self, port: Result<u16, ()>) { match port { Ok(p) => info!("Devtools Server running on port {}", p), Err(()) => error!("Error running Devtools server"), } } } pub struct ServoInstance { scroll_state: ScrollState, scroll_scale: Scale<f32, DevicePixel, LayoutPixel>, shut_down_complete: Rc<Cell<bool>>, } #[derive(Clone, Copy)] enum ScrollState { TriggerUp, TriggerDown(DevicePoint), TriggerDragging(DevicePoint, DevicePoint), } struct EventLoopWakerInstance; impl EventLoopWaker for EventLoopWakerInstance { fn clone_box(&self) -> Box<dyn EventLoopWaker> { Box::new(EventLoopWakerInstance) } fn wake(&self) {} } impl log::Log for MLLogger { fn enabled(&self, metadata: &log::Metadata) -> bool { metadata.level() <= LOG_LEVEL } fn log(&self, record: &log::Record) { if let Some(log) = self.0 { let lvl = match record.level() { log::Level::Error => MLLogLevel::Error, log::Level::Warn => MLLogLevel::Warning, log::Level::Info => MLLogLevel::Info, log::Level::Debug => MLLogLevel::Debug, log::Level::Trace => MLLogLevel::Verbose, }; let mut msg = SmallVec::<[u8; 128]>::new(); write!(msg, "{}\0", record.args()).unwrap(); log(lvl, &msg[0] as *const _ as *const _); } } fn flush(&self) {} } fn redirect_stdout_to_log(logger: MLLogger) { let log = match logger.0 { None => return, Some(log) => log, }; // The first step is to redirect stdout and stderr to the logs. // We redirect stdout and stderr to a custom descriptor. let mut pfd: [c_int; 2] = [0, 0]; unsafe { pipe(pfd.as_mut_ptr()); dup2(pfd[1], 1); dup2(pfd[1], 2); } let descriptor = pfd[0]; // Then we spawn a thread whose only job is to read from the other side of the // pipe and redirect to the logs. let _detached = thread::spawn(move || { const BUF_LENGTH: usize = 512; let mut buf = vec![b'\0' as c_char; BUF_LENGTH]; // Always keep at least one null terminator const BUF_AVAILABLE: usize = BUF_LENGTH - 1; let buf = &mut buf[..BUF_AVAILABLE]; let mut cursor = 0_usize; loop { let result = { let read_into = &mut buf[cursor..]; unsafe { read( descriptor, read_into.as_mut_ptr() as *mut _, read_into.len(), ) } }; let end = if result == 0 { return; } else if result < 0 { log( MLLogLevel::Error, b"error in log thread; closing\0".as_ptr() as *const _, ); return; } else { result as usize + cursor }; // Only modify the portion of the buffer that contains real data. let buf = &mut buf[0..end]; if let Some(last_newline_pos) = buf.iter().rposition(|&c| c == b'\n' as c_char) { buf[last_newline_pos] = b'\0' as c_char; log(MLLogLevel::Info, buf.as_ptr()); if last_newline_pos < buf.len() - 1 { let pos_after_newline = last_newline_pos + 1; let len_not_logged_yet = buf[pos_after_newline..].len(); for j in 0..len_not_logged_yet as usize { buf[j] = buf[pos_after_newline + j]; } cursor = len_not_logged_yet; } else { cursor = 0; } } else if end == BUF_AVAILABLE { // No newline found but the buffer is full, flush it anyway. // `buf.as_ptr()` is null-terminated by BUF_LENGTH being 1 less than BUF_AVAILABLE. log(MLLogLevel::Info, buf.as_ptr()); cursor = 0; } else { cursor = end; } } }); }<|fim▁end|>
native_display_pointer: Some(disp), };