repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
duncte123/Dunctebot-website | includes/db.php | 911 | <?php
/**
* Copyright 2017-2018 Duncan "duncte123" Sterken
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
require('config.php');
$db = new PDO("mysql:host={$config['db']['host']};port=3306;dbname={$config['db']['dbname']}"
, $config['db']['user'], $config['db']['pass'],
array(PDO::MYSQL_ATTR_INIT_COMMAND => "SET NAMES utf8") );
| apache-2.0 |
RepIR/RepIR | src/main/java/io/github/repir/Repository/ModelParameters.java | 4324 | package io.github.repir.Repository;
import java.util.Map;
import java.util.TreeMap;
import io.github.repir.Repository.ModelParameters.File;
import io.github.repir.Repository.ModelParameters.Record;
import io.github.htools.io.Datafile;
import io.github.htools.io.struct.StructuredFileKeyValue;
import io.github.htools.io.struct.StructuredFileKeyValueRecord;
import io.github.htools.lib.Log;
import io.github.htools.lib.MathTools;
import java.util.HashMap;
/**
* For tuning RetrievalModel, this Feature can be used to store mean
* average precisions for each combination of parameter settings.
* @author jer
*/
public class ModelParameters extends StoredDynamicFeature<File, Record> {
public static Log log = new Log(ModelParameters.class);
private ModelParameters(Repository repository, String model) {
super(repository, model);
}
public static ModelParameters get(Repository repository, String model) {
String label = canonicalName(ModelParameters.class, model);
ModelParameters termid = (ModelParameters)repository.getStoredFeature(label);
if (termid == null) {
termid = new ModelParameters(repository, model);
repository.storeFeature(label, termid);
}
return termid;
}
@Override
public File createFile(Datafile df) {
return new File(df);
}
public Record newRecord( String settings[] ) {
return getFile().newRecord(settings);
}
public class File extends StructuredFileKeyValue<Record> {
public StringArrayField parameter = this.addStringArray("parameter");
public StringArrayField value = this.addStringArray("value");
public DoubleField map = this.addDouble("map");
public File(Datafile df) {
super(df);
}
@Override
public Record newRecord() {
return new Record();
}
public Record newRecord( String settings[] ) {
return new Record( settings );
}
@Override
public Record closingRecord() {
return new Record();
}
}
public class Record implements StructuredFileKeyValueRecord<File> {
public TreeMap<String, String> parameters = new TreeMap<String,String>();
public double map = -1;
public Record() {}
public Record( String settings[] ) {
for (String s : settings) {
parameters.put(s, repository.configuredString(s));
}
}
@Override
public int hashCode() {
int hash = 31;
for (Map.Entry<String, String> e : parameters.entrySet()) {
hash = MathTools.combineHash(hash, e.getKey().hashCode());
hash = MathTools.combineHash(hash, e.getValue().hashCode());
}
return MathTools.finishHash(hash);
}
@Override
public boolean equals(Object r) {
if (r instanceof Record) {
Record record = (Record)r;
return parameters.equals(record.parameters);
}
return false;
}
public void write(File file) {
file.parameter.write( parameters.keySet().toArray(new String[ parameters.size() ]));
file.value.write( parameters.values().toArray(new String[ parameters.size() ]));
file.map.write(map);
}
public void read(File file) {
for (int i = 0; i < file.parameter.value.length; i++) {
parameters.put( file.parameter.value[i], file.value.value[i]);
}
map = file.map.value;
}
public void convert(StructuredFileKeyValueRecord record) {
Record r = (Record)record;
r.parameters = (TreeMap<String, String>)parameters.clone();
r.map = map;
}
}
public Record read( String settings[] ) {
this.openRead();
Record s = (Record)newRecord( settings );
Record r = (Record) find(s);
return r;
}
public Record read( Record record ) {
this.openRead();
Record found = (Record) find(record);
return (found == null)?record:found;
}
public HashMap<Record, Record> load() {
openRead();
HashMap<Record, Record> map = new HashMap<Record, Record>();
NOTOK:
for (Record r : getKeys()) {
map.put(r, r);
}
closeRead();
return map;
}
}
| apache-2.0 |
cmbasnett/mandala | engine/src/input_event_flags.hpp | 734 | // naga
#include "types.hpp"
// stl
#include <iostream>
namespace naga
{
typedef u8 InputEventFlagsType;
enum : InputEventFlagsType
{
// TODO: this got messed up, they map directly to GLFW flags
INPUT_EVENT_FLAG_SHIFT = (1 << 0),
INPUT_EVENT_FLAG_CTRL = (1 << 1),
INPUT_EVENT_FLAG_ALT = (1 << 2),
INPUT_EVENT_FLAG_SUPER = (1 << 3)
};
//struct InputEventFlags
//{
// InputEventFlagsType value;
// inline InputEventFlagsType operator()() { return value; }
// friend inline std::ostream& operator<<(std::ostream& ostream, InputEventFlags& flags)
// {
// return ostream;
// }
// friend inline std::istream& operator>>(std::istream& istream, InputEventFlags& flags)
// {
// return istream;
// }
//};
}
| apache-2.0 |
citlab/Intercloud | xmpp-client/src/main/java/de/tu_berlin/cit/intercloud/xmpp/client/service/impl/XmppService.java | 5191 | package de.tu_berlin.cit.intercloud.xmpp.client.service.impl;
import de.tu_berlin.cit.intercloud.xmpp.client.service.IXmppConnectionManager;
import de.tu_berlin.cit.intercloud.xmpp.client.extension.GetXwadlIQ;
import de.tu_berlin.cit.intercloud.xmpp.client.extension.RestIQ;
import de.tu_berlin.cit.intercloud.xmpp.client.extension.XwadlIQ;
import de.tu_berlin.cit.intercloud.xmpp.client.service.IXmppService;
import de.tu_berlin.cit.intercloud.xmpp.rest.XmppURI;
import de.tu_berlin.cit.intercloud.xmpp.rest.xml.ResourceDocument;
import de.tu_berlin.cit.intercloud.xmpp.rest.xwadl.ResourceTypeDocument;
import org.jivesoftware.smack.AbstractXMPPConnection;
import org.jivesoftware.smack.PacketCollector;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.filter.AndFilter;
import org.jivesoftware.smack.filter.IQReplyFilter;
import org.jivesoftware.smack.filter.StanzaFilter;
import org.jivesoftware.smack.packet.IQ;
import org.jivesoftware.smackx.disco.ServiceDiscoveryManager;
import org.jivesoftware.smackx.disco.packet.DiscoverInfo;
import org.jivesoftware.smackx.disco.packet.DiscoverItems;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class XmppService implements IXmppService {
private static final Logger logger = LoggerFactory.getLogger(XmppService.class);
private final IXmppConnectionManager connectionManager;
public XmppService(XmppURI uri, String password) throws XMPPException, IOException, SmackException {
this.connectionManager = new XmppConnectionManager(uri, password);
}
@Override
public void connect(XmppURI uri, String password) throws XMPPException, IOException, SmackException {
this.connectionManager.connect(uri, password);
}
@Override
public void disconnect() {
this.connectionManager.disconnect();
}
@Override
public List<String> discoverRestfulItems(XmppURI uri) throws XMPPException, IOException, SmackException {
return discoverItemsByFeature(uri, Arrays.asList(XwadlIQ.NAMESPACE, RestIQ.NAMESPACE));
}
private List<String> discoverItemsByFeature(XmppURI uri, List<String> features) throws XMPPException, IOException, SmackException {
// discover items
ServiceDiscoveryManager discoveryManager = ServiceDiscoveryManager.getInstanceFor(this.connectionManager.getConnection());
DiscoverItems discoverItems = discoveryManager.discoverItems(uri.getDomain());
List<DiscoverItems.Item> items = discoverItems.getItems();
List<String> result = new ArrayList<>();
// discover infos per item and check if specified feature set is supported
for (DiscoverItems.Item item : items) {
DiscoverInfo discoverInfo = discoveryManager.discoverInfo(item.getEntityID());
boolean conatinsAllFeatures = true;
for (String feature : features) {
if (!discoverInfo.containsFeature(feature)) {
conatinsAllFeatures = false;
break;
}
}
if (conatinsAllFeatures) {
result.add(item.getEntityID());
} else if (logger.isDebugEnabled()) {
logger.debug("Entity {} does not support the specified features.", item.getEntityID());
}
}
return result;
}
@Override
public ResourceDocument sendRestDocument(XmppURI uri, ResourceDocument document) throws XMPPException, IOException, SmackException {
AbstractXMPPConnection connection = this.connectionManager.getConnection();
// create an set IQ stanza to uri
RestIQ setIQ = new RestIQ(uri, document);
// send stanza
connection.sendStanza(setIQ);
// wait for response
StanzaFilter filter = new AndFilter(new IQReplyFilter(setIQ, connection));
PacketCollector collector = connection.createPacketCollector(filter);
IQ resultIQ = collector.nextResultOrThrow();
if(resultIQ instanceof RestIQ) {
// create rest doc
return ((RestIQ) resultIQ).getResourceDocument();
} else {
throw new SmackException("Wrong RestIQ has been passed");
}
}
@Override
public ResourceTypeDocument getXwadlDocument(XmppURI uri) throws XMPPException, IOException, SmackException {
AbstractXMPPConnection connection = this.connectionManager.getConnection();
// create an get IQ stanza to uri
IQ getIQ = new GetXwadlIQ(uri);
// send stanza
connection.sendStanza(getIQ);
// wait for response
StanzaFilter filter = new AndFilter(new IQReplyFilter(getIQ, connection));
PacketCollector collector = connection.createPacketCollector(filter);
IQ resultIQ = collector.nextResultOrThrow();
if (resultIQ instanceof XwadlIQ) {
// create xwadl
return ((XwadlIQ) resultIQ).getXwadl();
} else {
throw new SmackException("Wrong IQ has been passed");
}
}
}
| apache-2.0 |
scottlepp/navigo | app/common/security/auth-service.js | 7716 | 'use strict';
angular.module('voyager.security').
factory('authService', function ($http, config, $q) {
var observers = [];
var errorCallback;
var authCallback;
var loggedIn = false;
var permissions = {};
var _user;
var _groups = [];
var _sso = true;
var _isAnonymous = true;
var _PERMISSIONS_LIST = 'check=manage&check=download&check=process&check=configure_view&check=save_search&check=share_saved_search&check=view&check=tag&check=edit_fields&check=flag';
var _methods;
function _setLoginState(response) {
if (response) {
_isAnonymous = angular.isUndefined(response.data.state) || response.data.state === 'anonymous';
_user = response.data.user;
permissions = response.data.permissions;
//console.log(response);
if(response.data.user) {
loggedIn = true;
_sso = true;
_groups = response.data.user.groups;
} else {
loggedIn = false; //logout success
}
if(response.data.methods) {
_methods = response.data.methods;
}
observers.forEach(function (entry) {
entry(response);
});
if (authCallback) {
authCallback(response);
authCallback = null;
}
}
return response;
}
var defaultErrorCallback = function (response) {
if (response.error) {
console.log(response.error);
} else {
console.log('failed');
}
};
var doPost = function (request, action) {
return $http({
method: 'POST',
url: config.root + 'api/rest/auth/' + action + '.json',
data: request,
headers: {'Content-Type': 'application/x-www-form-urlencoded'}
}).then(_setLoginState, errorCallback);
};
function _getInfoUrl() {
var url = config.root + 'api/rest/auth/info.json?' + _PERMISSIONS_LIST + '&r=' + new Date().getTime();
if(_sso === false) {
url += '&sso=false';
}
return url;
}
function _getPrivileges() {
return $http.get(_getInfoUrl(),{cache: false, headers: {'Cache-Control' : 'no-cache'}}).then(_setLoginState, defaultErrorCallback);
}
_getPrivileges();
return {
doLogin: function ($scope, successHandler, errorHandler) {
errorCallback = errorHandler;
authCallback = successHandler;
var request = 'user=' + encodeURIComponent($scope.user) + '&pass=' + encodeURIComponent($scope.pass) + '&' + _PERMISSIONS_LIST;
if($scope.keepLoggedIn === true) {
request += '&rememberMe=true';
}
return doPost(request, 'login');
},
doLogout: function () {
//observers = [];
authCallback = null;
var request = 'check=manage';
doPost(request, 'logout');
loggedIn = false;
_sso = false;
},
getPrivileges: function () {
return _getPrivileges();
// possibly make jsonp call here for IE 8 issue but server not set up
},
loadPrivileges: function() {
if(_.isEmpty(permissions)) {
return _getPrivileges();
} else {
return $q.when();
}
},
addObserver: function (obs) {
var exists = false;
observers.forEach(function (entry) {
if (entry === obs) {
exists = true;
}
});
if (!exists) {
observers.push(obs);
}
},
isLoggedIn: function () {
return loggedIn;
},
isAnonymous: function () {
return _isAnonymous;
},
hasPermission: function (type) {
return permissions[type] && permissions[type] === true;
},
getUser: function() {
return _user;
},
getGroups: function() {
return _groups;
},
getGroupsJoined: function() {
return _groups.join();
},
getUserInfo: function() {
return $http.get(_getInfoUrl()).then(function(res) {
return res.data.user;
});
},
fetchGroups: function() {
return $http.get(config.root + 'api/rest/auth/info/groups').then(function(res) {
return res.data.groups;
});
},
checkAccess: function() {
_sso = true; //if they manually logged out
return $http.get(_getInfoUrl()).then(function(res) {
var hasAccess = res.data.permissions.use_voyager;
if (hasAccess) {
_setLoginState(res);
}
return hasAccess;
});
},
getMethods: function() {
// test external
// _methods = []; // remove comment for external only
// _methods.push({name:'test',url:'http://www.google.com'});
var methods = {all:_methods};
methods.all = _methods.filter(function(method) {
return method.enabled === true;
});
methods.external = _.filter(_methods, function(method) {
method.displayName = _.classify(method.name);
return angular.isDefined(method.url) && method.enabled === true;
});
if(methods.external.length === 0) {
delete methods.external;
}
return methods;
},
showLogout: function() {
var show = true;
var methods = this.getMethods().all;
// only enforce if windows is the only method enabled
if (methods.length === 1 && methods[0].name === 'windows') {
var windowsAuth = methods[0];
// only enforce if some sso is enabled
if (windowsAuth.enableNtlm === true || windowsAuth.enableNegotiate === true) {
show = !windowsAuth.hideLogout;
}
} else if (methods.length > 1) { // if there are multiple methods and sso, show logout for admin only
var self = this;
_.each(methods, function(method) {
// hide if they aren't an admin (let admin see multiple methods)
if(method.name === 'windows' && !self.hasPermission('manage')) {
// only enforce if some sso is enabled
if (method.enableNtlm === true || method.enableNegotiate === true) {
show = !method.hideLogout;
}
}
});
}
return show;
}
};
}); | apache-2.0 |
OpenHFT/Chronicle-Queue | src/test/java/net/openhft/chronicle/queue/WriteBytesTest.java | 60431 | /*
* Copyright 2016-2020 https://chronicle.software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package net.openhft.chronicle.queue;
import net.openhft.chronicle.bytes.Bytes;
import net.openhft.chronicle.core.annotation.RequiredForClient;
import net.openhft.chronicle.core.io.IORuntimeException;
import net.openhft.chronicle.core.io.IOTools;
import net.openhft.chronicle.core.time.SetTimeProvider;
import net.openhft.chronicle.queue.impl.single.SingleChronicleQueueBuilder;
import net.openhft.chronicle.wire.DocumentContext;
import org.jetbrains.annotations.NotNull;
import org.junit.Test;
import java.io.File;
import static net.openhft.chronicle.queue.RollCycles.TEST4_DAILY;
import static net.openhft.chronicle.queue.impl.single.SingleChronicleQueueBuilder.binary;
import static org.junit.Assert.assertEquals;
@RequiredForClient
public class WriteBytesTest extends ChronicleQueueTestBase {
final Bytes<?> outgoingBytes = Bytes.elasticByteBuffer();
private final byte[] incomingMsgBytes = new byte[100];
private final byte[] outgoingMsgBytes = new byte[100];
@Test
public void testWriteBytes() {
File dir = getTmpDir();
try (ChronicleQueue queue = binary(dir)
.testBlockSize()
.build()) {
ExcerptAppender appender = queue.acquireAppender();
ExcerptTailer tailer = queue.createTailer();
outgoingMsgBytes[0] = 'A';
outgoingBytes.write(outgoingMsgBytes);
postOneMessage(appender);
fetchOneMessage(tailer, incomingMsgBytes);
// System.out.println(new String(incomingMsgBytes));
outgoingBytes.clear();
outgoingMsgBytes[0] = 'A';
outgoingMsgBytes[1] = 'B';
outgoingBytes.write(outgoingMsgBytes);
postOneMessage(appender);
fetchOneMessage(tailer, incomingMsgBytes);
// System.out.println(new String(incomingMsgBytes));
} finally {
try {
IOTools.deleteDirWithFiles(dir, 2);
} catch (IORuntimeException e) {
// ignored
}
}
}
@Test
public void testWriteBytesAndDump() {
File dir = getTmpDir();
final SingleChronicleQueueBuilder builder = binary(dir)
.testBlockSize()
.rollCycle(TEST4_DAILY)
.timeProvider(new SetTimeProvider("2020/10/19T01:01:01"));
try (ChronicleQueue queue = builder
.build()) {
final boolean useSparseFiles = builder.useSparseFiles();
ExcerptAppender appender = queue.acquireAppender();
for (int i = Byte.MIN_VALUE; i <= Byte.MAX_VALUE; i++) {
byte finalI = (byte) i;
appender.writeBytes(b ->
b.writeLong(finalI * 0x0101010101010101L));
}
assertEquals("--- !!meta-data #binary\n" +
"header: !STStore {\n" +
" wireType: !WireType BINARY_LIGHT,\n" +
" metadata: !SCQMeta {\n" +
" roll: !SCQSRoll { length: !int 86400000, format: yyyyMMdd'T4', epoch: 0 },\n" +
" deltaCheckpointInterval: 64,\n" +
" sourceId: 0\n" +
" }\n" +
"}\n" +
"# position: 176, header: 0\n" +
"--- !!data #binary\n" +
"listing.highestCycle: 18554\n" +
"# position: 216, header: 1\n" +
"--- !!data #binary\n" +
"listing.lowestCycle: 18554\n" +
"# position: 256, header: 2\n" +
"--- !!data #binary\n" +
"listing.modCount: 3\n" +
"# position: 288, header: 3\n" +
"--- !!data #binary\n" +
"chronicle.write.lock: -9223372036854775808\n" +
"# position: 328, header: 4\n" +
"--- !!data #binary\n" +
"chronicle.append.lock: -9223372036854775808\n" +
"# position: 368, header: 5\n" +
"--- !!data #binary\n" +
"chronicle.lastIndexReplicated: -1\n" +
"# position: 416, header: 6\n" +
"--- !!data #binary\n" +
"chronicle.lastAcknowledgedIndexReplicated: -1\n" +
"...\n" +
"# 130596 bytes remaining\n" +
"--- !!meta-data #binary\n" +
"header: !SCQStore {\n" +
" writePosition: [\n" +
" 4120,\n" +
" 17695265259775\n" +
" ],\n" +
" indexing: !SCQSIndexing {\n" +
" indexCount: 32,\n" +
" indexSpacing: 4,\n" +
" index2Index: 196,\n" +
" lastIndex: 256\n" +
" },\n" +
" dataFormat: 1\n" +
"}\n" +
"# position: 196, header: -1\n" +
"--- !!meta-data #binary\n" +
"index2index: [\n" +
" # length: 32, used: 2\n" +
" 488,\n" +
" 2324,\n" +
" 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0\n" +
"]\n" +
"# position: 488, header: -1\n" +
"--- !!meta-data #binary\n" +
"index: [\n" +
" # length: 32, used: 32\n" +
" 776,\n" +
" 824,\n" +
" 872,\n" +
" 920,\n" +
" 968,\n" +
" 1016,\n" +
" 1064,\n" +
" 1112,\n" +
" 1160,\n" +
" 1208,\n" +
" 1256,\n" +
" 1304,\n" +
" 1352,\n" +
" 1400,\n" +
" 1448,\n" +
" 1496,\n" +
" 1544,\n" +
" 1592,\n" +
" 1640,\n" +
" 1688,\n" +
" 1736,\n" +
" 1784,\n" +
" 1832,\n" +
" 1880,\n" +
" 1928,\n" +
" 1976,\n" +
" 2024,\n" +
" 2072,\n" +
" 2120,\n" +
" 2168,\n" +
" 2216,\n" +
" 2264\n" +
"]\n" +
"# position: 776, header: 0\n" +
"--- !!data #binary\n" +
"00000300 80 7f 7f 7f ····\n" +
"00000310 7f 7f 7f 7f ···· \n" +
"# position: 788, header: 1\n" +
"--- !!data #binary\n" +
"00000310 81 80 80 80 80 80 80 80 ········\n" +
"# position: 800, header: 2\n" +
"--- !!data #binary\n" +
"00000320 82 81 81 81 81 81 81 81 ···· ···· \n" +
"# position: 812, header: 3\n" +
"--- !!data #binary\n" +
"00000330 83 82 82 82 82 82 82 82 ········ \n" +
"# position: 824, header: 4\n" +
"--- !!data #binary\n" +
"# # Unknown_0x84\n" +
"# # Unknown_0x83\n" +
"# # Unknown_0x83\n" +
"# # Unknown_0x83\n" +
"# # Unknown_0x83\n" +
"# # Unknown_0x83\n" +
"# # Unknown_0x83\n" +
"# # Unknown_0x83\n" +
"# position: 836, header: 5\n" +
"--- !!data #binary\n" +
"# # Unknown_0x85\n" +
"# # Unknown_0x84\n" +
"# # Unknown_0x84\n" +
"# # Unknown_0x84\n" +
"# # Unknown_0x84\n" +
"# # Unknown_0x84\n" +
"# # Unknown_0x84\n" +
"# # Unknown_0x84\n" +
"# position: 848, header: 6\n" +
"--- !!data #binary\n" +
"# # BYTES_MARSHALLABLE\n" +
"# # Unknown_0x85\n" +
"# # Unknown_0x85\n" +
"# # Unknown_0x85\n" +
"# # Unknown_0x85\n" +
"# # Unknown_0x85\n" +
"# # Unknown_0x85\n" +
"# # Unknown_0x85\n" +
"# position: 860, header: 7\n" +
"--- !!data #binary\n" +
"00000360 87 86 86 86 86 86 86 86 ········ \n" +
"# position: 872, header: 8\n" +
"--- !!data #binary\n" +
"00000360 88 87 87 87 ····\n" +
"00000370 87 87 87 87 ···· \n" +
"# position: 884, header: 9\n" +
"--- !!data #binary\n" +
"00000370 89 88 88 88 88 88 88 88 ········\n" +
"# position: 896, header: 10\n" +
"--- !!data #binary\n" +
"\"\\x89\\x89\\x89\\x89\\x89\\x89\\x89\"\n" +
"# position: 908, header: 11\n" +
"--- !!data #binary\n" +
"# # Unknown_0x8B\n" +
"\"\\x8A\\x8A\\x8A\\x8A\\x8A\\x8A\"\n" +
"# position: 920, header: 12\n" +
"--- !!data #binary\n" +
"# # Unknown_0x8C\n" +
"# # Unknown_0x8B\n" +
"# # Unknown_0x8B\n" +
"# # Unknown_0x8B\n" +
"# # Unknown_0x8B\n" +
"# # Unknown_0x8B\n" +
"# # Unknown_0x8B\n" +
"# # Unknown_0x8B\n" +
"# position: 932, header: 13\n" +
"--- !!data #binary\n" +
"000003a0 8d 8c 8c 8c 8c 8c 8c 8c ········\n" +
"# position: 944, header: 14\n" +
"--- !!data #binary\n" +
"000003b0 8e 8d 8d 8d 8d 8d 8d 8d ···· ···· \n" +
"# position: 956, header: 15\n" +
"--- !!data #binary\n" +
"000003c0 8f 8e 8e 8e 8e 8e 8e 8e ········ \n" +
"# position: 968, header: 16\n" +
"--- !!data #binary\n" +
"-1.4156185439721035E-29\n" +
"# position: 980, header: 17\n" +
"--- !!data #binary\n" +
"# # FLOAT32\n" +
"-5.702071897398123E-29\n" +
"# # EndOfFile\n" +
"# position: 992, header: 18\n" +
"--- !!data #binary\n" +
"-753555055760.82\n" +
"# position: 1004, header: 19\n" +
"--- !!data #binary\n" +
"# # FLOAT_STOP_2\n" +
"-48698841.79\n" +
"# position: 1016, header: 20\n" +
"--- !!data #binary\n" +
"-8422085917.3268\n" +
"# position: 1028, header: 21\n" +
"--- !!data #binary\n" +
"# # FLOAT_STOP_4\n" +
"-541098.2421\n" +
"# position: 1040, header: 22\n" +
"--- !!data #binary\n" +
"-93086212.770454\n" +
"# position: 1052, header: 23\n" +
"--- !!data #binary\n" +
"# # FLOAT_STOP_6\n" +
"-5952.080663\n" +
"# position: 1064, header: 24\n" +
"--- !!data #binary\n" +
"# # Unknown_0x97\n" +
"# # Unknown_0x97\n" +
"# # Unknown_0x97\n" +
"# # Unknown_0x97\n" +
"# position: 1076, header: 25\n" +
"--- !!data #binary\n" +
"# # Unknown_0x98\n" +
"# # Unknown_0x98\n" +
"# # Unknown_0x98\n" +
"# # Unknown_0x98\n" +
"# position: 1088, header: 26\n" +
"--- !!data #binary\n" +
"# # Unknown_0x99\n" +
"# # Unknown_0x99\n" +
"# # Unknown_0x99\n" +
"# # Unknown_0x99\n" +
"# position: 1100, header: 27\n" +
"--- !!data #binary\n" +
"# # FLOAT_SET_LOW_0\n" +
"# # FLOAT_SET_LOW_0\n" +
"# # FLOAT_SET_LOW_0\n" +
"# # FLOAT_SET_LOW_0\n" +
"# position: 1112, header: 28\n" +
"--- !!data #binary\n" +
"# # FLOAT_SET_LOW_2\n" +
"# # FLOAT_SET_LOW_2\n" +
"# # FLOAT_SET_LOW_2\n" +
"# # FLOAT_SET_LOW_2\n" +
"# position: 1124, header: 29\n" +
"--- !!data #binary\n" +
"# # FLOAT_SET_LOW_4\n" +
"# # FLOAT_SET_LOW_4\n" +
"# # FLOAT_SET_LOW_4\n" +
"# # FLOAT_SET_LOW_4\n" +
"# position: 1136, header: 30\n" +
"--- !!data #binary\n" +
"# # Unknown_0x9D\n" +
"# # Unknown_0x9D\n" +
"# # Unknown_0x9D\n" +
"# # Unknown_0x9D\n" +
"# position: 1148, header: 31\n" +
"--- !!data #binary\n" +
"# # Unknown_0x9E\n" +
"# # Unknown_0x9E\n" +
"# # Unknown_0x9E\n" +
"# # Unknown_0x9E\n" +
"# position: 1160, header: 32\n" +
"--- !!data #binary\n" +
"# # Unknown_0x9F\n" +
"# # Unknown_0x9F\n" +
"# # Unknown_0x9F\n" +
"# # Unknown_0x9F\n" +
"# position: 1172, header: 33\n" +
"--- !!data #binary\n" +
"!int 160\n" +
"# # UUID\n" +
"# # UUID\n" +
"# # UUID\n" +
"# position: 1184, header: 34\n" +
"--- !!data #binary\n" +
"!int 41377\n" +
"!int 161\n" +
"!int 161\n" +
"!int -1\n" +
"# position: 1196, header: 35\n" +
"--- !!data #binary\n" +
"2728567458\n" +
"!int 41634\n" +
"# position: 1208, header: 36\n" +
"--- !!data #binary\n" +
"!byte -93\n" +
"2745410467\n" +
"# # EndOfFile\n" +
"# position: 1220, header: 37\n" +
"--- !!data #binary\n" +
"!short -23388\n" +
"!byte -92\n" +
"!byte -92\n" +
"!byte 0\n" +
"# position: 1232, header: 38\n" +
"--- !!data #binary\n" +
"!int -1515870811\n" +
"!short -23131\n" +
"# position: 1244, header: 39\n" +
"--- !!data #binary\n" +
"# # INT32\n" +
"!int -1499027802\n" +
"# # EndOfFile\n" +
"# position: 1256, header: 40\n" +
"--- !!data #binary\n" +
"!int 167\n" +
"# # INT64\n" +
"# # INT64\n" +
"# # INT64\n" +
"# position: 1268, header: 41\n" +
"--- !!data #binary\n" +
"!int 43176\n" +
"!int 168\n" +
"!int 168\n" +
"!int -1\n" +
"# position: 1280, header: 42\n" +
"--- !!data #binary\n" +
"# # SET_LOW_INT16\n" +
"!int 43433\n" +
"!int 43433\n" +
"# position: 1292, header: 43\n" +
"--- !!data #binary\n" +
"# # Unknown_0xAA\n" +
"# # Unknown_0xAA\n" +
"# # Unknown_0xAA\n" +
"# # Unknown_0xAA\n" +
"# position: 1304, header: 44\n" +
"--- !!data #binary\n" +
"# # Unknown_0xAB\n" +
"# # Unknown_0xAB\n" +
"# # Unknown_0xAB\n" +
"# # Unknown_0xAB\n" +
"# position: 1316, header: 45\n" +
"--- !!data #binary\n" +
"# # Unknown_0xAC\n" +
"# # Unknown_0xAC\n" +
"# # Unknown_0xAC\n" +
"# # Unknown_0xAC\n" +
"# position: 1328, header: 46\n" +
"--- !!data #binary\n" +
"# # Unknown_0xAD\n" +
"# # Unknown_0xAD\n" +
"# # Unknown_0xAD\n" +
"# # Unknown_0xAD\n" +
"# position: 1340, header: 47\n" +
"--- !!data #binary\n" +
"# # Unknown_0xAE\n" +
"# # Unknown_0xAE\n" +
"# # Unknown_0xAE\n" +
"# # Unknown_0xAE\n" +
"# position: 1352, header: 48\n" +
"--- !!data #binary\n" +
"false\n" +
"# # INT64_0x\n" +
"# # INT64_0x\n" +
"# # INT64_0x\n" +
"# # EndOfFile\n" +
"# position: 1364, header: 49\n" +
"--- !!data #binary\n" +
"true\n" +
"false\n" +
"false\n" +
"false\n" +
"false\n" +
"false\n" +
"false\n" +
"false\n" +
"# position: 1376, header: 50\n" +
"--- !!data #binary\n" +
"00000560 b2 b1 b1 b1 b1 b1 b1 b1 ···· ···· \n" +
"# position: 1388, header: 51\n" +
"--- !!data #binary\n" +
"00000570 b3 b2 b2 b2 b2 b2 b2 b2 ········ \n" +
"# position: 1400, header: 52\n" +
"--- !!data #binary\n" +
"00000570 b4 b3 b3 b3 ····\n" +
"00000580 b3 b3 b3 b3 ···· \n" +
"# position: 1412, header: 53\n" +
"--- !!data #binary\n" +
"00000580 b5 b4 b4 b4 b4 b4 b4 b4 ········\n" +
"# position: 1424, header: 54\n" +
"--- !!data #binary\n" +
"00000590 b6 b5 b5 b5 b5 b5 b5 b5 ···· ···· \n" +
"# position: 1436, header: 55\n" +
"--- !!data #binary\n" +
"000005a0 b7 b6 b6 b6 b6 b6 b6 b6 ········ \n" +
"# position: 1448, header: 56\n" +
"--- !!data #binary\n" +
"000005a0 b8 b7 b7 b7 ····\n" +
"000005b0 b7 b7 b7 b7 ···· \n" +
"# position: 1460, header: 57\n" +
"--- !!data #binary\n" +
"000005b0 b9 b8 b8 b8 b8 b8 b8 b8 ········\n" +
"# position: 1472, header: 58\n" +
"--- !!data #binary\n" +
"\"-252662577519802\": \n" +
"# position: 1484, header: 59\n" +
"--- !!data #binary\n" +
"!!null \"\"\n" +
"\"-2008556674363\": \n" +
"# position: 1496, header: 60\n" +
"--- !!data #binary\n" +
"000005d0 bc bb bb bb ····\n" +
"000005e0 bb bb bb bb ···· \n" +
"# position: 1508, header: 61\n" +
"--- !!data #binary\n" +
"000005e0 bd bc bc bc bc bc bc bc ········\n" +
"# position: 1520, header: 62\n" +
"--- !!data #binary\n" +
"000005f0 be bd bd bd bd bd bd bd ···· ···· \n" +
"# position: 1532, header: 63\n" +
"--- !!data #binary\n" +
"00000600 bf be be be be be be be ········ \n" +
"# position: 1544, header: 64\n" +
"--- !!data #binary\n" +
"\"\": # # HINT\n" +
"# # HINT\n" +
"# # HINT\n" +
"# # HINT\n" +
"# # HINT\n" +
"# # HINT\n" +
"# # HINT\n" +
"# position: 1556, header: 65\n" +
"--- !!data #binary\n" +
"\"\\xC0\": \"\": \"\": \"\": \"\": \"\": \"\": \n" +
"# position: 1568, header: 66\n" +
"--- !!data #binary\n" +
"00000620 c2 c1 c1 c1 c1 c1 c1 c1 ···· ···· \n" +
"# position: 1580, header: 67\n" +
"--- !!data #binary\n" +
"00000630 c3 c2 c2 c2 c2 c2 c2 c2 ········ \n" +
"# position: 1592, header: 68\n" +
"--- !!data #binary\n" +
"00000630 c4 c3 c3 c3 ····\n" +
"00000640 c3 c3 c3 c3 ···· \n" +
"# position: 1604, header: 69\n" +
"--- !!data #binary\n" +
"00000640 c5 c4 c4 c4 c4 c4 c4 c4 ········\n" +
"# position: 1616, header: 70\n" +
"--- !!data #binary\n" +
"00000650 c6 c5 c5 c5 c5 c5 c5 c5 ···· ···· \n" +
"# position: 1628, header: 71\n" +
"--- !!data #binary\n" +
"\"\\xC6\\xC6\\xC6\\xC6\\xC6\\xC6\\xC6\": \n" +
"# position: 1640, header: 72\n" +
"--- !!data #binary\n" +
"00000660 c8 c7 c7 c7 ····\n" +
"00000670 c7 c7 c7 c7 ···· \n" +
"# position: 1652, header: 73\n" +
"--- !!data #binary\n" +
"00000670 c9 c8 c8 c8 c8 c8 c8 c8 ········\n" +
"# position: 1664, header: 74\n" +
"--- !!data #binary\n" +
"00000680 ca c9 c9 c9 c9 c9 c9 c9 ···· ···· \n" +
"# position: 1676, header: 75\n" +
"--- !!data #binary\n" +
"00000690 cb ca ca ca ca ca ca ca ········ \n" +
"# position: 1688, header: 76\n" +
"--- !!data #binary\n" +
"00000690 cc cb cb cb ····\n" +
"000006a0 cb cb cb cb ···· \n" +
"# position: 1700, header: 77\n" +
"--- !!data #binary\n" +
"000006a0 cd cc cc cc cc cc cc cc ········\n" +
"# position: 1712, header: 78\n" +
"--- !!data #binary\n" +
"000006b0 ce cd cd cd cd cd cd cd ···· ···· \n" +
"# position: 1724, header: 79\n" +
"--- !!data #binary\n" +
"000006c0 cf ce ce ce ce ce ce ce ········ \n" +
"# position: 1736, header: 80\n" +
"--- !!data #binary\n" +
"000006c0 d0 cf cf cf ····\n" +
"000006d0 cf cf cf cf ···· \n" +
"# position: 1748, header: 81\n" +
"--- !!data #binary\n" +
"000006d0 d1 d0 d0 d0 d0 d0 d0 d0 ········\n" +
"# position: 1760, header: 82\n" +
"--- !!data #binary\n" +
"000006e0 d2 d1 d1 d1 d1 d1 d1 d1 ···· ···· \n" +
"# position: 1772, header: 83\n" +
"--- !!data #binary\n" +
"000006f0 d3 d2 d2 d2 d2 d2 d2 d2 ········ \n" +
"# position: 1784, header: 84\n" +
"--- !!data #binary\n" +
"000006f0 d4 d3 d3 d3 ····\n" +
"00000700 d3 d3 d3 d3 ···· \n" +
"# position: 1796, header: 85\n" +
"--- !!data #binary\n" +
"00000700 d5 d4 d4 d4 d4 d4 d4 d4 ········\n" +
"# position: 1808, header: 86\n" +
"--- !!data #binary\n" +
"00000710 d6 d5 d5 d5 d5 d5 d5 d5 ···· ···· \n" +
"# position: 1820, header: 87\n" +
"--- !!data #binary\n" +
"00000720 d7 d6 d6 d6 d6 d6 d6 d6 ········ \n" +
"# position: 1832, header: 88\n" +
"--- !!data #binary\n" +
"00000720 d8 d7 d7 d7 ····\n" +
"00000730 d7 d7 d7 d7 ···· \n" +
"# position: 1844, header: 89\n" +
"--- !!data #binary\n" +
"00000730 d9 d8 d8 d8 d8 d8 d8 d8 ········\n" +
"# position: 1856, header: 90\n" +
"--- !!data #binary\n" +
"00000740 da d9 d9 d9 d9 d9 d9 d9 ···· ···· \n" +
"# position: 1868, header: 91\n" +
"--- !!data #binary\n" +
"00000750 db da da da da da da da ········ \n" +
"# position: 1880, header: 92\n" +
"--- !!data #binary\n" +
"00000750 dc db db db ····\n" +
"00000760 db db db db ···· \n" +
"# position: 1892, header: 93\n" +
"--- !!data #binary\n" +
"00000760 dd dc dc dc dc dc dc dc ········\n" +
"# position: 1904, header: 94\n" +
"--- !!data #binary\n" +
"00000770 de dd dd dd dd dd dd dd ···· ···· \n" +
"# position: 1916, header: 95\n" +
"--- !!data #binary\n" +
"00000780 df de de de de de de de ········ \n" +
"# position: 1928, header: 96\n" +
"--- !!data #binary\n" +
"00000780 e0 df df df ····\n" +
"00000790 df df df df ···· \n" +
"# position: 1940, header: 97\n" +
"--- !!data #binary\n" +
"00000790 e1 e0 e0 e0 e0 e0 e0 e0 ········\n" +
"# position: 1952, header: 98\n" +
"--- !!data #binary\n" +
"000007a0 e2 e1 e1 e1 e1 e1 e1 e1 ···· ···· \n" +
"# position: 1964, header: 99\n" +
"--- !!data #binary\n" +
"000007b0 e3 e2 e2 e2 e2 e2 e2 e2 ········ \n" +
"# position: 1976, header: 100\n" +
"--- !!data #binary\n" +
"000007b0 e4 e3 e3 e3 ····\n" +
"000007c0 e3 e3 e3 e3 ···· \n" +
"# position: 1988, header: 101\n" +
"--- !!data #binary\n" +
"000007c0 e5 e4 e4 e4 e4 e4 e4 e4 ········\n" +
"# position: 2000, header: 102\n" +
"--- !!data #binary\n" +
"000007d0 e6 e5 e5 e5 e5 e5 e5 e5 ···· ···· \n" +
"# position: 2012, header: 103\n" +
"--- !!data #binary\n" +
"000007e0 e7 e6 e6 e6 e6 e6 e6 e6 ········ \n" +
"# position: 2024, header: 104\n" +
"--- !!data #binary\n" +
"000007e0 e8 e7 e7 e7 ····\n" +
"000007f0 e7 e7 e7 e7 ···· \n" +
"# position: 2036, header: 105\n" +
"--- !!data #binary\n" +
"000007f0 e9 e8 e8 e8 e8 e8 e8 e8 ········\n" +
"# position: 2048, header: 106\n" +
"--- !!data #binary\n" +
"00000800 ea e9 e9 e9 e9 e9 e9 e9 ···· ···· \n" +
"# position: 2060, header: 107\n" +
"--- !!data #binary\n" +
"00000810 eb ea ea ea ea ea ea ea ········ \n" +
"# position: 2072, header: 108\n" +
"--- !!data #binary\n" +
"00000810 ec eb eb eb ····\n" +
"00000820 eb eb eb eb ···· \n" +
"# position: 2084, header: 109\n" +
"--- !!data #binary\n" +
"00000820 ed ec ec ec ec ec ec ec ········\n" +
"# position: 2096, header: 110\n" +
"--- !!data #binary\n" +
"00000830 ee ed ed ed ed ed ed ed ···· ···· \n" +
"# position: 2108, header: 111\n" +
"--- !!data #binary\n" +
"00000840 ef ee ee ee ee ee ee ee ········ \n" +
"# position: 2120, header: 112\n" +
"--- !!data #binary\n" +
"00000840 f0 ef ef ef ····\n" +
"00000850 ef ef ef ef ···· \n" +
"# position: 2132, header: 113\n" +
"--- !!data #binary\n" +
"00000850 f1 f0 f0 f0 f0 f0 f0 f0 ········\n" +
"# position: 2144, header: 114\n" +
"--- !!data #binary\n" +
"00000860 f2 f1 f1 f1 f1 f1 f1 f1 ···· ···· \n" +
"# position: 2156, header: 115\n" +
"--- !!data #binary\n" +
"00000870 f3 f2 f2 f2 f2 f2 f2 f2 ········ \n" +
"# position: 2168, header: 116\n" +
"--- !!data #binary\n" +
"00000870 f4 f3 f3 f3 ····\n" +
"00000880 f3 f3 f3 f3 ···· \n" +
"# position: 2180, header: 117\n" +
"--- !!data #binary\n" +
"00000880 f5 f4 f4 f4 f4 f4 f4 f4 ········\n" +
"# position: 2192, header: 118\n" +
"--- !!data #binary\n" +
"00000890 f6 f5 f5 f5 f5 f5 f5 f5 ···· ···· \n" +
"# position: 2204, header: 119\n" +
"--- !!data #binary\n" +
"000008a0 f7 f6 f6 f6 f6 f6 f6 f6 ········ \n" +
"# position: 2216, header: 120\n" +
"--- !!data #binary\n" +
"000008a0 f8 f7 f7 f7 ····\n" +
"000008b0 f7 f7 f7 f7 ···· \n" +
"# position: 2228, header: 121\n" +
"--- !!data #binary\n" +
"000008b0 f9 f8 f8 f8 f8 f8 f8 f8 ········\n" +
"# position: 2240, header: 122\n" +
"--- !!data #binary\n" +
"000008c0 fa f9 f9 f9 f9 f9 f9 f9 ···· ···· \n" +
"# position: 2252, header: 123\n" +
"--- !!data #binary\n" +
"000008d0 fb fa fa fa fa fa fa fa ········ \n" +
"# position: 2264, header: 124\n" +
"--- !!data #binary\n" +
"000008d0 fc fb fb fb ····\n" +
"000008e0 fb fb fb fb ···· \n" +
"# position: 2276, header: 125\n" +
"--- !!data #binary\n" +
"000008e0 fd fc fc fc fc fc fc fc ········\n" +
"# position: 2288, header: 126\n" +
"--- !!data #binary\n" +
"000008f0 fe fd fd fd fd fd fd fd ···· ···· \n" +
"# position: 2300, header: 127\n" +
"--- !!data #binary\n" +
"00000900 ff fe fe fe fe fe fe fe ········ \n" +
"# position: 2312, header: 128\n" +
"--- !!data #binary\n" +
"00000900 00 00 00 00 ····\n" +
"00000910 00 00 00 00 ···· \n" +
"# position: 2324, header: 128\n" +
"--- !!meta-data #binary\n" +
"index: [\n" +
" # length: 32, used: 32\n" +
" 2312,\n" +
" 2644,\n" +
" 2692,\n" +
" 2740,\n" +
" 2788,\n" +
" 2836,\n" +
" 2884,\n" +
" 2932,\n" +
" 2980,\n" +
" 3028,\n" +
" 3076,\n" +
" 3124,\n" +
" 3172,\n" +
" 3220,\n" +
" 3268,\n" +
" 3316,\n" +
" 3364,\n" +
" 3412,\n" +
" 3460,\n" +
" 3508,\n" +
" 3556,\n" +
" 3604,\n" +
" 3652,\n" +
" 3700,\n" +
" 3748,\n" +
" 3796,\n" +
" 3844,\n" +
" 3892,\n" +
" 3940,\n" +
" 3988,\n" +
" 4036,\n" +
" 4084\n" +
"]\n" +
"# position: 2608, header: 129\n" +
"--- !!data #binary\n" +
"00000a30 01 01 01 01 01 01 01 01 ···· ···· \n" +
"# position: 2620, header: 130\n" +
"--- !!data #binary\n" +
"00000a40 02 02 02 02 02 02 02 02 ········ \n" +
"# position: 2632, header: 131\n" +
"--- !!data #binary\n" +
"00000a40 03 03 03 03 ····\n" +
"00000a50 03 03 03 03 ···· \n" +
"# position: 2644, header: 132\n" +
"--- !!data #binary\n" +
"00000a50 04 04 04 04 04 04 04 04 ········\n" +
"# position: 2656, header: 133\n" +
"--- !!data #binary\n" +
"00000a60 05 05 05 05 05 05 05 05 ···· ···· \n" +
"# position: 2668, header: 134\n" +
"--- !!data #binary\n" +
"00000a70 06 06 06 06 06 06 06 06 ········ \n" +
"# position: 2680, header: 135\n" +
"--- !!data #binary\n" +
"00000a70 07 07 07 07 ····\n" +
"00000a80 07 07 07 07 ···· \n" +
"# position: 2692, header: 136\n" +
"--- !!data #binary\n" +
"00000a80 08 08 08 08 08 08 08 08 ········\n" +
"# position: 2704, header: 137\n" +
"--- !!data #binary\n" +
"00000a90 09 09 09 09 09 09 09 09 ···· ···· \n" +
"# position: 2716, header: 138\n" +
"--- !!data\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"\n" +
"# position: 2728, header: 139\n" +
"--- !!data #binary\n" +
"00000aa0 0b 0b 0b 0b ····\n" +
"00000ab0 0b 0b 0b 0b ···· \n" +
"# position: 2740, header: 140\n" +
"--- !!data #binary\n" +
"00000ab0 0c 0c 0c 0c 0c 0c 0c 0c ········\n" +
"# position: 2752, header: 141\n" +
"--- !!data #binary\n" +
"00000ac0 0d 0d 0d 0d 0d 0d 0d 0d ···· ···· \n" +
"# position: 2764, header: 142\n" +
"--- !!data #binary\n" +
"00000ad0 0e 0e 0e 0e 0e 0e 0e 0e ········ \n" +
"# position: 2776, header: 143\n" +
"--- !!data #binary\n" +
"00000ad0 0f 0f 0f 0f ····\n" +
"00000ae0 0f 0f 0f 0f ···· \n" +
"# position: 2788, header: 144\n" +
"--- !!data #binary\n" +
"00000ae0 10 10 10 10 10 10 10 10 ········\n" +
"# position: 2800, header: 145\n" +
"--- !!data #binary\n" +
"00000af0 11 11 11 11 11 11 11 11 ···· ···· \n" +
"# position: 2812, header: 146\n" +
"--- !!data #binary\n" +
"00000b00 12 12 12 12 12 12 12 12 ········ \n" +
"# position: 2824, header: 147\n" +
"--- !!data #binary\n" +
"00000b00 13 13 13 13 ····\n" +
"00000b10 13 13 13 13 ···· \n" +
"# position: 2836, header: 148\n" +
"--- !!data #binary\n" +
"00000b10 14 14 14 14 14 14 14 14 ········\n" +
"# position: 2848, header: 149\n" +
"--- !!data #binary\n" +
"00000b20 15 15 15 15 15 15 15 15 ···· ···· \n" +
"# position: 2860, header: 150\n" +
"--- !!data #binary\n" +
"00000b30 16 16 16 16 16 16 16 16 ········ \n" +
"# position: 2872, header: 151\n" +
"--- !!data #binary\n" +
"00000b30 17 17 17 17 ····\n" +
"00000b40 17 17 17 17 ···· \n" +
"# position: 2884, header: 152\n" +
"--- !!data #binary\n" +
"00000b40 18 18 18 18 18 18 18 18 ········\n" +
"# position: 2896, header: 153\n" +
"--- !!data #binary\n" +
"00000b50 19 19 19 19 19 19 19 19 ···· ···· \n" +
"# position: 2908, header: 154\n" +
"--- !!data #binary\n" +
"00000b60 1a 1a 1a 1a 1a 1a 1a 1a ········ \n" +
"# position: 2920, header: 155\n" +
"--- !!data #binary\n" +
"00000b60 1b 1b 1b 1b ····\n" +
"00000b70 1b 1b 1b 1b ···· \n" +
"# position: 2932, header: 156\n" +
"--- !!data #binary\n" +
"00000b70 1c 1c 1c 1c 1c 1c 1c 1c ········\n" +
"# position: 2944, header: 157\n" +
"--- !!data #binary\n" +
"00000b80 1d 1d 1d 1d 1d 1d 1d 1d ···· ···· \n" +
"# position: 2956, header: 158\n" +
"--- !!data #binary\n" +
"00000b90 1e 1e 1e 1e 1e 1e 1e 1e ········ \n" +
"# position: 2968, header: 159\n" +
"--- !!data #binary\n" +
"00000b90 1f 1f 1f 1f ····\n" +
"00000ba0 1f 1f 1f 1f ···· \n" +
"# position: 2980, header: 160\n" +
"--- !!data\n" +
" \n" +
"# position: 2992, header: 161\n" +
"--- !!data\n" +
"!!!!!!!!\n" +
"# position: 3004, header: 162\n" +
"--- !!data\n" +
"\"\"\"\"\"\"\"\"\n" +
"# position: 3016, header: 163\n" +
"--- !!data\n" +
"########\n" +
"# position: 3028, header: 164\n" +
"--- !!data\n" +
"$$$$$$$$\n" +
"# position: 3040, header: 165\n" +
"--- !!data\n" +
"%%%%%%%%\n" +
"# position: 3052, header: 166\n" +
"--- !!data\n" +
"&&&&&&&&\n" +
"# position: 3064, header: 167\n" +
"--- !!data\n" +
"''''''''\n" +
"# position: 3076, header: 168\n" +
"--- !!data\n" +
"((((((((\n" +
"# position: 3088, header: 169\n" +
"--- !!data\n" +
"))))))))\n" +
"# position: 3100, header: 170\n" +
"--- !!data\n" +
"********\n" +
"# position: 3112, header: 171\n" +
"--- !!data\n" +
"++++++++\n" +
"# position: 3124, header: 172\n" +
"--- !!data\n" +
",,,,,,,,\n" +
"# position: 3136, header: 173\n" +
"--- !!data\n" +
"--------\n" +
"# position: 3148, header: 174\n" +
"--- !!data\n" +
"........\n" +
"# position: 3160, header: 175\n" +
"--- !!data\n" +
"////////\n" +
"# position: 3172, header: 176\n" +
"--- !!data\n" +
"00000000\n" +
"# position: 3184, header: 177\n" +
"--- !!data\n" +
"11111111\n" +
"# position: 3196, header: 178\n" +
"--- !!data\n" +
"22222222\n" +
"# position: 3208, header: 179\n" +
"--- !!data\n" +
"33333333\n" +
"# position: 3220, header: 180\n" +
"--- !!data\n" +
"44444444\n" +
"# position: 3232, header: 181\n" +
"--- !!data\n" +
"55555555\n" +
"# position: 3244, header: 182\n" +
"--- !!data\n" +
"66666666\n" +
"# position: 3256, header: 183\n" +
"--- !!data\n" +
"77777777\n" +
"# position: 3268, header: 184\n" +
"--- !!data\n" +
"88888888\n" +
"# position: 3280, header: 185\n" +
"--- !!data\n" +
"99999999\n" +
"# position: 3292, header: 186\n" +
"--- !!data\n" +
"::::::::\n" +
"# position: 3304, header: 187\n" +
"--- !!data\n" +
";;;;;;;;\n" +
"# position: 3316, header: 188\n" +
"--- !!data\n" +
"<<<<<<<<\n" +
"# position: 3328, header: 189\n" +
"--- !!data\n" +
"========\n" +
"# position: 3340, header: 190\n" +
"--- !!data\n" +
">>>>>>>>\n" +
"# position: 3352, header: 191\n" +
"--- !!data\n" +
"????????\n" +
"# position: 3364, header: 192\n" +
"--- !!data\n" +
"@@@@@@@@\n" +
"# position: 3376, header: 193\n" +
"--- !!data\n" +
"AAAAAAAA\n" +
"# position: 3388, header: 194\n" +
"--- !!data\n" +
"BBBBBBBB\n" +
"# position: 3400, header: 195\n" +
"--- !!data\n" +
"CCCCCCCC\n" +
"# position: 3412, header: 196\n" +
"--- !!data\n" +
"DDDDDDDD\n" +
"# position: 3424, header: 197\n" +
"--- !!data\n" +
"EEEEEEEE\n" +
"# position: 3436, header: 198\n" +
"--- !!data\n" +
"FFFFFFFF\n" +
"# position: 3448, header: 199\n" +
"--- !!data\n" +
"GGGGGGGG\n" +
"# position: 3460, header: 200\n" +
"--- !!data\n" +
"HHHHHHHH\n" +
"# position: 3472, header: 201\n" +
"--- !!data\n" +
"IIIIIIII\n" +
"# position: 3484, header: 202\n" +
"--- !!data\n" +
"JJJJJJJJ\n" +
"# position: 3496, header: 203\n" +
"--- !!data\n" +
"KKKKKKKK\n" +
"# position: 3508, header: 204\n" +
"--- !!data\n" +
"LLLLLLLL\n" +
"# position: 3520, header: 205\n" +
"--- !!data\n" +
"MMMMMMMM\n" +
"# position: 3532, header: 206\n" +
"--- !!data\n" +
"NNNNNNNN\n" +
"# position: 3544, header: 207\n" +
"--- !!data\n" +
"OOOOOOOO\n" +
"# position: 3556, header: 208\n" +
"--- !!data\n" +
"PPPPPPPP\n" +
"# position: 3568, header: 209\n" +
"--- !!data\n" +
"QQQQQQQQ\n" +
"# position: 3580, header: 210\n" +
"--- !!data\n" +
"RRRRRRRR\n" +
"# position: 3592, header: 211\n" +
"--- !!data\n" +
"SSSSSSSS\n" +
"# position: 3604, header: 212\n" +
"--- !!data\n" +
"TTTTTTTT\n" +
"# position: 3616, header: 213\n" +
"--- !!data\n" +
"UUUUUUUU\n" +
"# position: 3628, header: 214\n" +
"--- !!data\n" +
"VVVVVVVV\n" +
"# position: 3640, header: 215\n" +
"--- !!data\n" +
"WWWWWWWW\n" +
"# position: 3652, header: 216\n" +
"--- !!data\n" +
"XXXXXXXX\n" +
"# position: 3664, header: 217\n" +
"--- !!data\n" +
"YYYYYYYY\n" +
"# position: 3676, header: 218\n" +
"--- !!data\n" +
"ZZZZZZZZ\n" +
"# position: 3688, header: 219\n" +
"--- !!data\n" +
"[[[[[[[[\n" +
"# position: 3700, header: 220\n" +
"--- !!data\n" +
"\\\\\\\\\\\\\\\\\n" +
"# position: 3712, header: 221\n" +
"--- !!data\n" +
"]]]]]]]]\n" +
"# position: 3724, header: 222\n" +
"--- !!data\n" +
"^^^^^^^^\n" +
"# position: 3736, header: 223\n" +
"--- !!data\n" +
"________\n" +
"# position: 3748, header: 224\n" +
"--- !!data\n" +
"````````\n" +
"# position: 3760, header: 225\n" +
"--- !!data\n" +
"aaaaaaaa\n" +
"# position: 3772, header: 226\n" +
"--- !!data\n" +
"bbbbbbbb\n" +
"# position: 3784, header: 227\n" +
"--- !!data\n" +
"cccccccc\n" +
"# position: 3796, header: 228\n" +
"--- !!data\n" +
"dddddddd\n" +
"# position: 3808, header: 229\n" +
"--- !!data\n" +
"eeeeeeee\n" +
"# position: 3820, header: 230\n" +
"--- !!data\n" +
"ffffffff\n" +
"# position: 3832, header: 231\n" +
"--- !!data\n" +
"gggggggg\n" +
"# position: 3844, header: 232\n" +
"--- !!data\n" +
"hhhhhhhh\n" +
"# position: 3856, header: 233\n" +
"--- !!data\n" +
"iiiiiiii\n" +
"# position: 3868, header: 234\n" +
"--- !!data\n" +
"jjjjjjjj\n" +
"# position: 3880, header: 235\n" +
"--- !!data\n" +
"kkkkkkkk\n" +
"# position: 3892, header: 236\n" +
"--- !!data\n" +
"llllllll\n" +
"# position: 3904, header: 237\n" +
"--- !!data\n" +
"mmmmmmmm\n" +
"# position: 3916, header: 238\n" +
"--- !!data\n" +
"nnnnnnnn\n" +
"# position: 3928, header: 239\n" +
"--- !!data\n" +
"oooooooo\n" +
"# position: 3940, header: 240\n" +
"--- !!data\n" +
"pppppppp\n" +
"# position: 3952, header: 241\n" +
"--- !!data\n" +
"qqqqqqqq\n" +
"# position: 3964, header: 242\n" +
"--- !!data\n" +
"rrrrrrrr\n" +
"# position: 3976, header: 243\n" +
"--- !!data\n" +
"ssssssss\n" +
"# position: 3988, header: 244\n" +
"--- !!data\n" +
"tttttttt\n" +
"# position: 4000, header: 245\n" +
"--- !!data\n" +
"uuuuuuuu\n" +
"# position: 4012, header: 246\n" +
"--- !!data\n" +
"vvvvvvvv\n" +
"# position: 4024, header: 247\n" +
"--- !!data\n" +
"wwwwwwww\n" +
"# position: 4036, header: 248\n" +
"--- !!data\n" +
"xxxxxxxx\n" +
"# position: 4048, header: 249\n" +
"--- !!data\n" +
"yyyyyyyy\n" +
"# position: 4060, header: 250\n" +
"--- !!data\n" +
"zzzzzzzz\n" +
"# position: 4072, header: 251\n" +
"--- !!data\n" +
"{{{{{{{{\n" +
"# position: 4084, header: 252\n" +
"--- !!data\n" +
"||||||||\n" +
"# position: 4096, header: 253\n" +
"--- !!data\n" +
"}}}}}}}}\n" +
"# position: 4108, header: 254\n" +
"--- !!data\n" +
"~~~~~~~~\n" +
"# position: 4120, header: 255\n" +
"--- !!data\n" +
"\u007F\u007F\u007F\u007F\u007F\u007F\u007F\u007F\n" +
"...\n" +
(useSparseFiles
? "# 4294963160 bytes remaining\n"
: "# 126936 bytes remaining\n"), queue.dump());
} finally {
try {
IOTools.deleteDirWithFiles(dir, 2);
} catch (IORuntimeException e) {
// ignored
}
}
}
public boolean postOneMessage(@NotNull ExcerptAppender appender) {
appender.writeBytes(outgoingBytes);
return true;
}
public int fetchOneMessage(@NotNull ExcerptTailer tailer, @NotNull byte[] using) {
try (DocumentContext dc = tailer.readingDocument()) {
return !dc.isPresent() ? -1 : dc.wire().bytes().read(using);
}
}
@Override
public void assertReferencesReleased(){
outgoingBytes.releaseLast();
super.assertReferencesReleased();
}
}
| apache-2.0 |
kfmaster/cicdlab | modules/docker-roundcubemail/program/js/app.js | 258944 | /**
* Roundcube Webmail Client Script
*
* This file is part of the Roundcube Webmail client
*
* @licstart The following is the entire license notice for the
* JavaScript code in this file.
*
* Copyright (C) 2005-2014, The Roundcube Dev Team
* Copyright (C) 2011-2014, Kolab Systems AG
*
* The JavaScript code in this page is free software: you can
* redistribute it and/or modify it under the terms of the GNU
* General Public License (GNU GPL) as published by the Free Software
* Foundation, either version 3 of the License, or (at your option)
* any later version. The code is distributed WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU GPL for more details.
*
* As additional permission under GNU GPL version 3 section 7, you
* may distribute non-source (e.g., minimized or compacted) forms of
* that code without the copy of the GNU GPL normally required by
* section 4, provided you include this license notice and a URL
* through which recipients can access the Corresponding Source.
*
* @licend The above is the entire license notice
* for the JavaScript code in this file.
*
* @author Thomas Bruederli <[email protected]>
* @author Aleksander 'A.L.E.C' Machniak <[email protected]>
* @author Charles McNulty <[email protected]>
*
* @requires jquery.js, common.js, list.js
*/
function rcube_webmail()
{
this.labels = {};
this.buttons = {};
this.buttons_sel = {};
this.gui_objects = {};
this.gui_containers = {};
this.commands = {};
this.command_handlers = {};
this.onloads = [];
this.messages = {};
this.group2expand = {};
this.http_request_jobs = {};
this.menu_stack = [];
// webmail client settings
this.dblclick_time = 500;
this.message_time = 5000;
this.identifier_expr = /[^0-9a-z_-]/gi;
// environment defaults
this.env = {
request_timeout: 180, // seconds
draft_autosave: 0, // seconds
comm_path: './',
recipients_separator: ',',
recipients_delimiter: ', ',
popup_width: 1150,
popup_width_small: 900
};
// create protected reference to myself
this.ref = 'rcmail';
var ref = this;
// set jQuery ajax options
$.ajaxSetup({
cache: false,
timeout: this.env.request_timeout * 1000,
error: function(request, status, err){ ref.http_error(request, status, err); },
beforeSend: function(xmlhttp){ xmlhttp.setRequestHeader('X-Roundcube-Request', ref.env.request_token); }
});
// unload fix
$(window).bind('beforeunload', function() { ref.unload = true; });
// set environment variable(s)
this.set_env = function(p, value)
{
if (p != null && typeof p === 'object' && !value)
for (var n in p)
this.env[n] = p[n];
else
this.env[p] = value;
};
// add a localized label to the client environment
this.add_label = function(p, value)
{
if (typeof p == 'string')
this.labels[p] = value;
else if (typeof p == 'object')
$.extend(this.labels, p);
};
// add a button to the button list
this.register_button = function(command, id, type, act, sel, over)
{
var button_prop = {id:id, type:type};
if (act) button_prop.act = act;
if (sel) button_prop.sel = sel;
if (over) button_prop.over = over;
if (!this.buttons[command])
this.buttons[command] = [];
this.buttons[command].push(button_prop);
if (this.loaded)
init_button(command, button_prop);
};
// register a specific gui object
this.gui_object = function(name, id)
{
this.gui_objects[name] = this.loaded ? rcube_find_object(id) : id;
};
// register a container object
this.gui_container = function(name, id)
{
this.gui_containers[name] = id;
};
// add a GUI element (html node) to a specified container
this.add_element = function(elm, container)
{
if (this.gui_containers[container] && this.gui_containers[container].jquery)
this.gui_containers[container].append(elm);
};
// register an external handler for a certain command
this.register_command = function(command, callback, enable)
{
this.command_handlers[command] = callback;
if (enable)
this.enable_command(command, true);
};
// execute the given script on load
this.add_onload = function(f)
{
this.onloads.push(f);
};
// initialize webmail client
this.init = function()
{
var n;
this.task = this.env.task;
// check browser
if (this.env.server_error != 409 && (!bw.dom || !bw.xmlhttp_test() || (bw.mz && bw.vendver < 1.9) || (bw.ie && bw.vendver < 7))) {
this.goto_url('error', '_code=0x199');
return;
}
if (!this.env.blankpage)
this.env.blankpage = this.assets_path('program/resources/blank.gif');
// find all registered gui containers
for (n in this.gui_containers)
this.gui_containers[n] = $('#'+this.gui_containers[n]);
// find all registered gui objects
for (n in this.gui_objects)
this.gui_objects[n] = rcube_find_object(this.gui_objects[n]);
// clickjacking protection
if (this.env.x_frame_options) {
try {
// bust frame if not allowed
if (this.env.x_frame_options == 'deny' && top.location.href != self.location.href)
top.location.href = self.location.href;
else if (top.location.hostname != self.location.hostname)
throw 1;
} catch (e) {
// possible clickjacking attack: disable all form elements
$('form').each(function(){ ref.lock_form(this, true); });
this.display_message("Blocked: possible clickjacking attack!", 'error');
return;
}
}
// init registered buttons
this.init_buttons();
// tell parent window that this frame is loaded
if (this.is_framed()) {
parent.rcmail.set_busy(false, null, parent.rcmail.env.frame_lock);
parent.rcmail.env.frame_lock = null;
}
// enable general commands
this.enable_command('close', 'logout', 'mail', 'addressbook', 'settings', 'save-pref',
'compose', 'undo', 'about', 'switch-task', 'menu-open', 'menu-close', 'menu-save', true);
// set active task button
this.set_button(this.task, 'sel');
if (this.env.permaurl)
this.enable_command('permaurl', 'extwin', true);
switch (this.task) {
case 'mail':
// enable mail commands
this.enable_command('list', 'checkmail', 'add-contact', 'search', 'reset-search', 'collapse-folder', 'import-messages', true);
if (this.gui_objects.messagelist) {
this.message_list = new rcube_list_widget(this.gui_objects.messagelist, {
multiselect:true, multiexpand:true, draggable:true, keyboard:true,
column_movable:this.env.col_movable, dblclick_time:this.dblclick_time
});
this.message_list
.addEventListener('initrow', function(o) { ref.init_message_row(o); })
.addEventListener('dblclick', function(o) { ref.msglist_dbl_click(o); })
.addEventListener('click', function(o) { ref.msglist_click(o); })
.addEventListener('keypress', function(o) { ref.msglist_keypress(o); })
.addEventListener('select', function(o) { ref.msglist_select(o); })
.addEventListener('dragstart', function(o) { ref.drag_start(o); })
.addEventListener('dragmove', function(e) { ref.drag_move(e); })
.addEventListener('dragend', function(e) { ref.drag_end(e); })
.addEventListener('expandcollapse', function(o) { ref.msglist_expand(o); })
.addEventListener('column_replace', function(o) { ref.msglist_set_coltypes(o); })
.addEventListener('listupdate', function(o) { ref.triggerEvent('listupdate', o); })
.init();
// TODO: this should go into the list-widget code
$(this.message_list.thead).on('click', 'a.sortcol', function(e){
return ref.command('sort', $(this).attr('rel'), this);
});
this.enable_command('toggle_status', 'toggle_flag', 'sort', true);
this.enable_command('set-listmode', this.env.threads && !this.is_multifolder_listing());
// load messages
this.command('list');
$(this.gui_objects.qsearchbox).val(this.env.search_text).focusin(function() { ref.message_list.blur(); });
}
this.set_button_titles();
this.env.message_commands = ['show', 'reply', 'reply-all', 'reply-list',
'move', 'copy', 'delete', 'open', 'mark', 'edit', 'viewsource',
'print', 'load-attachment', 'download-attachment', 'show-headers', 'hide-headers', 'download',
'forward', 'forward-inline', 'forward-attachment', 'change-format'];
if (this.env.action == 'show' || this.env.action == 'preview') {
this.enable_command(this.env.message_commands, this.env.uid);
this.enable_command('reply-list', this.env.list_post);
if (this.env.action == 'show') {
this.http_request('pagenav', {_uid: this.env.uid, _mbox: this.env.mailbox, _search: this.env.search_request},
this.display_message('', 'loading'));
}
if (this.env.blockedobjects) {
if (this.gui_objects.remoteobjectsmsg)
this.gui_objects.remoteobjectsmsg.style.display = 'block';
this.enable_command('load-images', 'always-load', true);
}
// make preview/message frame visible
if (this.env.action == 'preview' && this.is_framed()) {
this.enable_command('compose', 'add-contact', false);
parent.rcmail.show_contentframe(true);
}
}
else if (this.env.action == 'compose') {
this.env.address_group_stack = [];
this.env.compose_commands = ['send-attachment', 'remove-attachment', 'send', 'cancel',
'toggle-editor', 'list-adresses', 'pushgroup', 'search', 'reset-search', 'extwin',
'insert-response', 'save-response', 'menu-open', 'menu-close'];
if (this.env.drafts_mailbox)
this.env.compose_commands.push('savedraft')
this.enable_command(this.env.compose_commands, 'identities', 'responses', true);
// add more commands (not enabled)
$.merge(this.env.compose_commands, ['add-recipient', 'firstpage', 'previouspage', 'nextpage', 'lastpage']);
if (window.googie) {
this.env.editor_config.spellchecker = googie;
this.env.editor_config.spellcheck_observer = function(s) { ref.spellcheck_state(); };
this.env.compose_commands.push('spellcheck')
this.enable_command('spellcheck', true);
}
// initialize HTML editor
this.editor_init(this.env.editor_config, this.env.composebody);
// init canned response functions
if (this.gui_objects.responseslist) {
$('a.insertresponse', this.gui_objects.responseslist)
.attr('unselectable', 'on')
.mousedown(function(e){ return rcube_event.cancel(e); })
.bind('mouseup keypress', function(e){
if (e.type == 'mouseup' || rcube_event.get_keycode(e) == 13) {
ref.command('insert-response', $(this).attr('rel'));
$(document.body).trigger('mouseup'); // hides the menu
return rcube_event.cancel(e);
}
});
// avoid textarea loosing focus when hitting the save-response button/link
$.each(this.buttons['save-response'] || [], function (i, v) {
$('#' + v.id).mousedown(function(e){ return rcube_event.cancel(e); })
});
}
// init message compose form
this.init_messageform();
}
else if (this.env.action == 'get')
this.enable_command('download', 'print', true);
// show printing dialog
else if (this.env.action == 'print' && this.env.uid) {
this.print_dialog();
}
// get unread count for each mailbox
if (this.gui_objects.mailboxlist) {
this.env.unread_counts = {};
this.gui_objects.folderlist = this.gui_objects.mailboxlist;
this.http_request('getunread');
}
// init address book widget
if (this.gui_objects.contactslist) {
this.contact_list = new rcube_list_widget(this.gui_objects.contactslist,
{ multiselect:true, draggable:false, keyboard:true });
this.contact_list
.addEventListener('initrow', function(o) { ref.triggerEvent('insertrow', { cid:o.uid, row:o }); })
.addEventListener('select', function(o) { ref.compose_recipient_select(o); })
.addEventListener('dblclick', function(o) { ref.compose_add_recipient(); })
.addEventListener('keypress', function(o) {
if (o.key_pressed == o.ENTER_KEY) {
if (!ref.compose_add_recipient()) {
// execute link action on <enter> if not a recipient entry
if (o.last_selected && String(o.last_selected).charAt(0) == 'G') {
$(o.rows[o.last_selected].obj).find('a').first().click();
}
}
}
})
.init();
// remember last focused address field
$('#_to,#_cc,#_bcc').focus(function() { ref.env.focused_field = this; });
}
if (this.gui_objects.addressbookslist) {
this.gui_objects.folderlist = this.gui_objects.addressbookslist;
this.enable_command('list-adresses', true);
}
// ask user to send MDN
if (this.env.mdn_request && this.env.uid) {
var postact = 'sendmdn',
postdata = {_uid: this.env.uid, _mbox: this.env.mailbox};
if (!confirm(this.get_label('mdnrequest'))) {
postdata._flag = 'mdnsent';
postact = 'mark';
}
this.http_post(postact, postdata);
}
// detect browser capabilities
if (!this.is_framed() && !this.env.extwin)
this.browser_capabilities_check();
break;
case 'addressbook':
this.env.address_group_stack = [];
if (this.gui_objects.folderlist)
this.env.contactfolders = $.extend($.extend({}, this.env.address_sources), this.env.contactgroups);
this.enable_command('add', 'import', this.env.writable_source);
this.enable_command('list', 'listgroup', 'pushgroup', 'popgroup', 'listsearch', 'search', 'reset-search', 'advanced-search', true);
if (this.gui_objects.contactslist) {
this.contact_list = new rcube_list_widget(this.gui_objects.contactslist,
{multiselect:true, draggable:this.gui_objects.folderlist?true:false, keyboard:true});
this.contact_list
.addEventListener('initrow', function(o) { ref.triggerEvent('insertrow', { cid:o.uid, row:o }); })
.addEventListener('keypress', function(o) { ref.contactlist_keypress(o); })
.addEventListener('select', function(o) { ref.contactlist_select(o); })
.addEventListener('dragstart', function(o) { ref.drag_start(o); })
.addEventListener('dragmove', function(e) { ref.drag_move(e); })
.addEventListener('dragend', function(e) { ref.drag_end(e); })
.init();
$(this.gui_objects.qsearchbox).focusin(function() { ref.contact_list.blur(); });
this.update_group_commands();
this.command('list');
}
if (this.gui_objects.savedsearchlist) {
this.savedsearchlist = new rcube_treelist_widget(this.gui_objects.savedsearchlist, {
id_prefix: 'rcmli',
id_encode: this.html_identifier_encode,
id_decode: this.html_identifier_decode
});
this.savedsearchlist.addEventListener('select', function(node) {
ref.triggerEvent('selectfolder', { folder:node.id, prefix:'rcmli' }); });
}
this.set_page_buttons();
if (this.env.cid) {
this.enable_command('show', 'edit', true);
// register handlers for group assignment via checkboxes
if (this.gui_objects.editform) {
$('input.groupmember').change(function() {
ref.group_member_change(this.checked ? 'add' : 'del', ref.env.cid, ref.env.source, this.value);
});
}
}
if (this.gui_objects.editform) {
this.enable_command('save', true);
if (this.env.action == 'add' || this.env.action == 'edit' || this.env.action == 'search')
this.init_contact_form();
}
else if (this.env.action == 'print') {
this.print_dialog();
}
break;
case 'settings':
this.enable_command('preferences', 'identities', 'responses', 'save', 'folders', true);
if (this.env.action == 'identities') {
this.enable_command('add', this.env.identities_level < 2);
}
else if (this.env.action == 'edit-identity' || this.env.action == 'add-identity') {
this.enable_command('save', 'edit', 'toggle-editor', true);
this.enable_command('delete', this.env.identities_level < 2);
// initialize HTML editor
this.editor_init(this.env.editor_config, 'rcmfd_signature');
}
else if (this.env.action == 'folders') {
this.enable_command('subscribe', 'unsubscribe', 'create-folder', 'rename-folder', true);
}
else if (this.env.action == 'edit-folder' && this.gui_objects.editform) {
this.enable_command('save', 'folder-size', true);
parent.rcmail.env.exists = this.env.messagecount;
parent.rcmail.enable_command('purge', this.env.messagecount);
}
else if (this.env.action == 'responses') {
this.enable_command('add', true);
}
if (this.gui_objects.identitieslist) {
this.identity_list = new rcube_list_widget(this.gui_objects.identitieslist,
{multiselect:false, draggable:false, keyboard:true});
this.identity_list
.addEventListener('select', function(o) { ref.identity_select(o); })
.addEventListener('keypress', function(o) {
if (o.key_pressed == o.ENTER_KEY) {
ref.identity_select(o);
}
})
.init()
.focus();
}
else if (this.gui_objects.sectionslist) {
this.sections_list = new rcube_list_widget(this.gui_objects.sectionslist, {multiselect:false, draggable:false, keyboard:true});
this.sections_list
.addEventListener('select', function(o) { ref.section_select(o); })
.addEventListener('keypress', function(o) { if (o.key_pressed == o.ENTER_KEY) ref.section_select(o); })
.init()
.focus();
}
else if (this.gui_objects.subscriptionlist) {
this.init_subscription_list();
}
else if (this.gui_objects.responseslist) {
this.responses_list = new rcube_list_widget(this.gui_objects.responseslist, {multiselect:false, draggable:false, keyboard:true});
this.responses_list
.addEventListener('select', function(list) {
var win, id = list.get_single_selection();
ref.enable_command('delete', !!id && $.inArray(id, ref.env.readonly_responses) < 0);
if (id && (win = ref.get_frame_window(ref.env.contentframe))) {
ref.set_busy(true);
ref.location_href({ _action:'edit-response', _key:id, _framed:1 }, win);
}
})
.init()
.focus();
}
break;
case 'login':
var input_user = $('#rcmloginuser');
input_user.bind('keyup', function(e){ return ref.login_user_keyup(e); });
if (input_user.val() == '')
input_user.focus();
else
$('#rcmloginpwd').focus();
// detect client timezone
if (window.jstz) {
var timezone = jstz.determine();
if (timezone.name())
$('#rcmlogintz').val(timezone.name());
}
else {
$('#rcmlogintz').val(new Date().getStdTimezoneOffset() / -60);
}
// display 'loading' message on form submit, lock submit button
$('form').submit(function () {
$('input[type=submit]', this).prop('disabled', true);
ref.clear_messages();
ref.display_message('', 'loading');
});
this.enable_command('login', true);
break;
}
// select first input field in an edit form
if (this.gui_objects.editform)
$("input,select,textarea", this.gui_objects.editform)
.not(':hidden').not(':disabled').first().select().focus();
// unset contentframe variable if preview_pane is enabled
if (this.env.contentframe && !$('#' + this.env.contentframe).is(':visible'))
this.env.contentframe = null;
// prevent from form submit with Enter key in file input fields
if (bw.ie)
$('input[type=file]').keydown(function(e) { if (e.keyCode == '13') e.preventDefault(); });
// flag object as complete
this.loaded = true;
this.env.lastrefresh = new Date();
// show message
if (this.pending_message)
this.display_message.apply(this, this.pending_message);
// init treelist widget
if (this.gui_objects.folderlist && window.rcube_treelist_widget) {
this.treelist = new rcube_treelist_widget(this.gui_objects.folderlist, {
selectable: true,
id_prefix: 'rcmli',
parent_focus: true,
id_encode: this.html_identifier_encode,
id_decode: this.html_identifier_decode,
check_droptarget: function(node) { return !node.virtual && ref.check_droptarget(node.id) }
});
this.treelist
.addEventListener('collapse', function(node) { ref.folder_collapsed(node) })
.addEventListener('expand', function(node) { ref.folder_collapsed(node) })
.addEventListener('beforeselect', function(node) { return !ref.busy; })
.addEventListener('select', function(node) { ref.triggerEvent('selectfolder', { folder:node.id, prefix:'rcmli' }) });
}
// activate html5 file drop feature (if browser supports it and if configured)
if (this.gui_objects.filedrop && this.env.filedrop && ((window.XMLHttpRequest && XMLHttpRequest.prototype && XMLHttpRequest.prototype.sendAsBinary) || window.FormData)) {
$(document.body).bind('dragover dragleave drop', function(e){ return ref.document_drag_hover(e, e.type == 'dragover'); });
$(this.gui_objects.filedrop).addClass('droptarget')
.bind('dragover dragleave', function(e){ return ref.file_drag_hover(e, e.type == 'dragover'); })
.get(0).addEventListener('drop', function(e){ return ref.file_dropped(e); }, false);
}
// catch document (and iframe) mouse clicks
var body_mouseup = function(e){ return ref.doc_mouse_up(e); };
$(document.body)
.bind('mouseup', body_mouseup)
.bind('keydown', function(e){ return ref.doc_keypress(e); });
$('iframe').load(function(e) {
try { $(this.contentDocument || this.contentWindow).on('mouseup', body_mouseup); }
catch (e) {/* catch possible "Permission denied" error in IE */ }
})
.contents().on('mouseup', body_mouseup);
// trigger init event hook
this.triggerEvent('init', { task:this.task, action:this.env.action });
// execute all foreign onload scripts
// @deprecated
for (n in this.onloads) {
if (typeof this.onloads[n] === 'string')
eval(this.onloads[n]);
else if (typeof this.onloads[n] === 'function')
this.onloads[n]();
}
// start keep-alive and refresh intervals
this.start_refresh();
this.start_keepalive();
};
this.log = function(msg)
{
if (window.console && console.log)
console.log(msg);
};
/*********************************************************/
/********* client command interface *********/
/*********************************************************/
// execute a specific command on the web client
this.command = function(command, props, obj, event)
{
var ret, uid, cid, url, flag, aborted = false;
if (obj && obj.blur && !(event && rcube_event.is_keyboard(event)))
obj.blur();
// do nothing if interface is locked by another command
// with exception for searching reset and menu
if (this.busy && !(command == 'reset-search' && this.last_command == 'search') && !command.match(/^menu-/))
return false;
// let the browser handle this click (shift/ctrl usually opens the link in a new window/tab)
if ((obj && obj.href && String(obj.href).indexOf('#') < 0) && rcube_event.get_modifier(event)) {
return true;
}
// command not supported or allowed
if (!this.commands[command]) {
// pass command to parent window
if (this.is_framed())
parent.rcmail.command(command, props);
return false;
}
// check input before leaving compose step
if (this.task == 'mail' && this.env.action == 'compose' && $.inArray(command, this.env.compose_commands) < 0 && !this.env.server_error) {
if (!this.env.is_sent && this.cmp_hash != this.compose_field_hash() && !confirm(this.get_label('notsentwarning')))
return false;
// remove copy from local storage if compose screen is left intentionally
this.remove_compose_data(this.env.compose_id);
this.compose_skip_unsavedcheck = true;
}
this.last_command = command;
// process external commands
if (typeof this.command_handlers[command] === 'function') {
ret = this.command_handlers[command](props, obj, event);
return ret !== undefined ? ret : (obj ? false : true);
}
else if (typeof this.command_handlers[command] === 'string') {
ret = window[this.command_handlers[command]](props, obj, event);
return ret !== undefined ? ret : (obj ? false : true);
}
// trigger plugin hooks
this.triggerEvent('actionbefore', {props:props, action:command, originalEvent:event});
ret = this.triggerEvent('before'+command, props || event);
if (ret !== undefined) {
// abort if one of the handlers returned false
if (ret === false)
return false;
else
props = ret;
}
ret = undefined;
// process internal command
switch (command) {
case 'login':
if (this.gui_objects.loginform)
this.gui_objects.loginform.submit();
break;
// commands to switch task
case 'logout':
case 'mail':
case 'addressbook':
case 'settings':
this.switch_task(command);
break;
case 'about':
this.redirect('?_task=settings&_action=about', false);
break;
case 'permaurl':
if (obj && obj.href && obj.target)
return true;
else if (this.env.permaurl)
parent.location.href = this.env.permaurl;
break;
case 'extwin':
if (this.env.action == 'compose') {
var form = this.gui_objects.messageform,
win = this.open_window('');
if (win) {
this.save_compose_form_local();
this.compose_skip_unsavedcheck = true;
$("input[name='_action']", form).val('compose');
form.action = this.url('mail/compose', { _id: this.env.compose_id, _extwin: 1 });
form.target = win.name;
form.submit();
}
}
else {
this.open_window(this.env.permaurl, true);
}
break;
case 'change-format':
url = this.env.permaurl + '&_format=' + props;
if (this.env.action == 'preview')
url = url.replace(/_action=show/, '_action=preview') + '&_framed=1';
if (this.env.extwin)
url += '&_extwin=1';
location.href = url;
break;
case 'menu-open':
if (props && props.menu == 'attachmentmenu') {
var mimetype = this.env.attachments[props.id];
this.enable_command('open-attachment', mimetype && this.env.mimetypes && $.inArray(mimetype, this.env.mimetypes) >= 0);
}
this.show_menu(props, props.show || undefined, event);
break;
case 'menu-close':
this.hide_menu(props, event);
break;
case 'menu-save':
this.triggerEvent(command, {props:props, originalEvent:event});
return false;
case 'open':
if (uid = this.get_single_uid()) {
obj.href = this.url('show', {_mbox: this.get_message_mailbox(uid), _uid: uid});
return true;
}
break;
case 'close':
if (this.env.extwin)
window.close();
break;
case 'list':
if (props && props != '') {
this.reset_qsearch(true);
}
if (this.env.action == 'compose' && this.env.extwin) {
window.close();
}
else if (this.task == 'mail') {
this.list_mailbox(props);
this.set_button_titles();
}
else if (this.task == 'addressbook')
this.list_contacts(props);
break;
case 'set-listmode':
this.set_list_options(null, undefined, undefined, props == 'threads' ? 1 : 0);
break;
case 'sort':
var sort_order = this.env.sort_order,
sort_col = !this.env.disabled_sort_col ? props : this.env.sort_col;
if (!this.env.disabled_sort_order)
sort_order = this.env.sort_col == sort_col && sort_order == 'ASC' ? 'DESC' : 'ASC';
// set table header and update env
this.set_list_sorting(sort_col, sort_order);
// reload message list
this.list_mailbox('', '', sort_col+'_'+sort_order);
break;
case 'nextpage':
this.list_page('next');
break;
case 'lastpage':
this.list_page('last');
break;
case 'previouspage':
this.list_page('prev');
break;
case 'firstpage':
this.list_page('first');
break;
case 'expunge':
if (this.env.exists)
this.expunge_mailbox(this.env.mailbox);
break;
case 'purge':
case 'empty-mailbox':
if (this.env.exists)
this.purge_mailbox(this.env.mailbox);
break;
// common commands used in multiple tasks
case 'show':
if (this.task == 'mail') {
uid = this.get_single_uid();
if (uid && (!this.env.uid || uid != this.env.uid)) {
if (this.env.mailbox == this.env.drafts_mailbox)
this.open_compose_step({ _draft_uid: uid, _mbox: this.env.mailbox });
else
this.show_message(uid);
}
}
else if (this.task == 'addressbook') {
cid = props ? props : this.get_single_cid();
if (cid && !(this.env.action == 'show' && cid == this.env.cid))
this.load_contact(cid, 'show');
}
break;
case 'add':
if (this.task == 'addressbook')
this.load_contact(0, 'add');
else if (this.task == 'settings' && this.env.action == 'responses') {
var frame;
if ((frame = this.get_frame_window(this.env.contentframe))) {
this.set_busy(true);
this.location_href({ _action:'add-response', _framed:1 }, frame);
}
}
else if (this.task == 'settings') {
this.identity_list.clear_selection();
this.load_identity(0, 'add-identity');
}
break;
case 'edit':
if (this.task == 'addressbook' && (cid = this.get_single_cid()))
this.load_contact(cid, 'edit');
else if (this.task == 'settings' && props)
this.load_identity(props, 'edit-identity');
else if (this.task == 'mail' && (uid = this.get_single_uid())) {
url = { _mbox: this.get_message_mailbox(uid) };
url[this.env.mailbox == this.env.drafts_mailbox && props != 'new' ? '_draft_uid' : '_uid'] = uid;
this.open_compose_step(url);
}
break;
case 'save':
var input, form = this.gui_objects.editform;
if (form) {
// adv. search
if (this.env.action == 'search') {
}
// user prefs
else if ((input = $("input[name='_pagesize']", form)) && input.length && isNaN(parseInt(input.val()))) {
alert(this.get_label('nopagesizewarning'));
input.focus();
break;
}
// contacts/identities
else {
// reload form
if (props == 'reload') {
form.action += '&_reload=1';
}
else if (this.task == 'settings' && (this.env.identities_level % 2) == 0 &&
(input = $("input[name='_email']", form)) && input.length && !rcube_check_email(input.val())
) {
alert(this.get_label('noemailwarning'));
input.focus();
break;
}
// clear empty input fields
$('input.placeholder').each(function(){ if (this.value == this._placeholder) this.value = ''; });
}
// add selected source (on the list)
if (parent.rcmail && parent.rcmail.env.source)
form.action = this.add_url(form.action, '_orig_source', parent.rcmail.env.source);
form.submit();
}
break;
case 'delete':
// mail task
if (this.task == 'mail')
this.delete_messages(event);
// addressbook task
else if (this.task == 'addressbook')
this.delete_contacts();
// settings: canned response
else if (this.task == 'settings' && this.env.action == 'responses')
this.delete_response();
// settings: user identities
else if (this.task == 'settings')
this.delete_identity();
break;
// mail task commands
case 'move':
case 'moveto': // deprecated
if (this.task == 'mail')
this.move_messages(props, event);
else if (this.task == 'addressbook')
this.move_contacts(props);
break;
case 'copy':
if (this.task == 'mail')
this.copy_messages(props, event);
else if (this.task == 'addressbook')
this.copy_contacts(props);
break;
case 'mark':
if (props)
this.mark_message(props);
break;
case 'toggle_status':
case 'toggle_flag':
flag = command == 'toggle_flag' ? 'flagged' : 'read';
if (uid = props) {
// toggle flagged/unflagged
if (flag == 'flagged') {
if (this.message_list.rows[uid].flagged)
flag = 'unflagged';
}
// toggle read/unread
else if (this.message_list.rows[uid].deleted)
flag = 'undelete';
else if (!this.message_list.rows[uid].unread)
flag = 'unread';
this.mark_message(flag, uid);
}
break;
case 'always-load':
if (this.env.uid && this.env.sender) {
this.add_contact(this.env.sender);
setTimeout(function(){ ref.command('load-images'); }, 300);
break;
}
case 'load-images':
if (this.env.uid)
this.show_message(this.env.uid, true, this.env.action=='preview');
break;
case 'load-attachment':
case 'open-attachment':
case 'download-attachment':
var qstring = '_mbox='+urlencode(this.env.mailbox)+'&_uid='+this.env.uid+'&_part='+props,
mimetype = this.env.attachments[props];
// open attachment in frame if it's of a supported mimetype
if (command != 'download-attachment' && mimetype && this.env.mimetypes && $.inArray(mimetype, this.env.mimetypes) >= 0) {
if (this.open_window(this.env.comm_path+'&_action=get&'+qstring+'&_frame=1'))
break;
}
this.goto_url('get', qstring+'&_download=1', false);
break;
case 'select-all':
this.select_all_mode = props ? false : true;
this.dummy_select = true; // prevent msg opening if there's only one msg on the list
if (props == 'invert')
this.message_list.invert_selection();
else
this.message_list.select_all(props == 'page' ? '' : props);
this.dummy_select = null;
break;
case 'select-none':
this.select_all_mode = false;
this.message_list.clear_selection();
break;
case 'expand-all':
this.env.autoexpand_threads = 1;
this.message_list.expand_all();
break;
case 'expand-unread':
this.env.autoexpand_threads = 2;
this.message_list.collapse_all();
this.expand_unread();
break;
case 'collapse-all':
this.env.autoexpand_threads = 0;
this.message_list.collapse_all();
break;
case 'nextmessage':
if (this.env.next_uid)
this.show_message(this.env.next_uid, false, this.env.action == 'preview');
break;
case 'lastmessage':
if (this.env.last_uid)
this.show_message(this.env.last_uid);
break;
case 'previousmessage':
if (this.env.prev_uid)
this.show_message(this.env.prev_uid, false, this.env.action == 'preview');
break;
case 'firstmessage':
if (this.env.first_uid)
this.show_message(this.env.first_uid);
break;
case 'compose':
url = {};
if (this.task == 'mail') {
url = {_mbox: this.env.mailbox, _search: this.env.search_request};
if (props)
url._to = props;
}
// modify url if we're in addressbook
else if (this.task == 'addressbook') {
// switch to mail compose step directly
if (props && props.indexOf('@') > 0) {
url._to = props;
}
else {
var a_cids = [];
// use contact id passed as command parameter
if (props)
a_cids.push(props);
// get selected contacts
else if (this.contact_list)
a_cids = this.contact_list.get_selection();
if (a_cids.length)
this.http_post('mailto', { _cid: a_cids.join(','), _source: this.env.source }, true);
else if (this.env.group)
this.http_post('mailto', { _gid: this.env.group, _source: this.env.source }, true);
break;
}
}
else if (props && typeof props == 'string') {
url._to = props;
}
else if (props && typeof props == 'object') {
$.extend(url, props);
}
this.open_compose_step(url);
break;
case 'spellcheck':
if (this.spellcheck_state()) {
this.editor.spellcheck_stop();
}
else {
this.editor.spellcheck_start();
}
break;
case 'savedraft':
// Reset the auto-save timer
clearTimeout(this.save_timer);
// compose form did not change (and draft wasn't saved already)
if (this.env.draft_id && this.cmp_hash == this.compose_field_hash()) {
this.auto_save_start();
break;
}
this.submit_messageform(true);
break;
case 'send':
if (!props.nocheck && !this.env.is_sent && !this.check_compose_input(command))
break;
// Reset the auto-save timer
clearTimeout(this.save_timer);
this.submit_messageform();
break;
case 'send-attachment':
// Reset the auto-save timer
clearTimeout(this.save_timer);
if (!(flag = this.upload_file(props || this.gui_objects.uploadform, 'upload'))) {
if (flag !== false)
alert(this.get_label('selectimportfile'));
aborted = true;
}
break;
case 'insert-sig':
this.change_identity($("[name='_from']")[0], true);
break;
case 'list-adresses':
this.list_contacts(props);
this.enable_command('add-recipient', false);
break;
case 'add-recipient':
this.compose_add_recipient(props);
break;
case 'reply-all':
case 'reply-list':
case 'reply':
if (uid = this.get_single_uid()) {
url = {_reply_uid: uid, _mbox: this.get_message_mailbox(uid), _search: this.env.search_request};
if (command == 'reply-all')
// do reply-list, when list is detected and popup menu wasn't used
url._all = (!props && this.env.reply_all_mode == 1 && this.commands['reply-list'] ? 'list' : 'all');
else if (command == 'reply-list')
url._all = 'list';
this.open_compose_step(url);
}
break;
case 'forward-attachment':
case 'forward-inline':
case 'forward':
var uids = this.env.uid ? [this.env.uid] : (this.message_list ? this.message_list.get_selection() : []);
if (uids.length) {
url = { _forward_uid: this.uids_to_list(uids), _mbox: this.env.mailbox, _search: this.env.search_request };
if (command == 'forward-attachment' || (!props && this.env.forward_attachment) || uids.length > 1)
url._attachment = 1;
this.open_compose_step(url);
}
break;
case 'print':
if (this.task == 'addressbook') {
if (uid = this.contact_list.get_single_selection()) {
url = '&_action=print&_cid=' + uid;
if (this.env.source)
url += '&_source=' + urlencode(this.env.source);
this.open_window(this.env.comm_path + url, true, true);
}
}
else if (this.env.action == 'get') {
this.gui_objects.messagepartframe.contentWindow.print();
}
else if (uid = this.get_single_uid()) {
url = '&_action=print&_uid='+uid+'&_mbox='+urlencode(this.get_message_mailbox(uid))+(this.env.safemode ? '&_safe=1' : '');
if (this.open_window(this.env.comm_path + url, true, true)) {
if (this.env.action != 'show')
this.mark_message('read', uid);
}
}
break;
case 'viewsource':
if (uid = this.get_single_uid())
this.open_window(this.env.comm_path+'&_action=viewsource&_uid='+uid+'&_mbox='+urlencode(this.env.mailbox), true, true);
break;
case 'download':
if (this.env.action == 'get') {
location.href = location.href.replace(/_frame=/, '_download=');
}
else if (uid = this.get_single_uid()) {
this.goto_url('viewsource', { _uid: uid, _mbox: this.get_message_mailbox(uid), _save: 1 });
}
break;
// quicksearch
case 'search':
if (!props && this.gui_objects.qsearchbox)
props = this.gui_objects.qsearchbox.value;
if (props) {
this.qsearch(props);
break;
}
// reset quicksearch
case 'reset-search':
var n, s = this.env.search_request || this.env.qsearch;
this.reset_qsearch(true);
this.select_all_mode = false;
if (s && this.env.action == 'compose') {
if (this.contact_list)
this.list_contacts_clear();
}
else if (s && this.env.mailbox) {
this.list_mailbox(this.env.mailbox, 1);
}
else if (s && this.task == 'addressbook') {
if (this.env.source == '') {
for (n in this.env.address_sources) break;
this.env.source = n;
this.env.group = '';
}
this.list_contacts(this.env.source, this.env.group, 1);
}
break;
case 'pushgroup':
// add group ID to stack
this.env.address_group_stack.push(props.id);
if (obj && event)
rcube_event.cancel(event);
case 'listgroup':
this.reset_qsearch();
this.list_contacts(props.source, props.id);
break;
case 'popgroup':
if (this.env.address_group_stack.length > 1) {
this.env.address_group_stack.pop();
this.reset_qsearch();
this.list_contacts(props.source, this.env.address_group_stack[this.env.address_group_stack.length-1]);
}
break;
case 'import-messages':
var form = props || this.gui_objects.importform,
importlock = this.set_busy(true, 'importwait');
$('input[name="_unlock"]', form).val(importlock);
if (!(flag = this.upload_file(form, 'import'))) {
this.set_busy(false, null, importlock);
if (flag !== false)
alert(this.get_label('selectimportfile'));
aborted = true;
}
break;
case 'import':
if (this.env.action == 'import' && this.gui_objects.importform) {
var file = document.getElementById('rcmimportfile');
if (file && !file.value) {
alert(this.get_label('selectimportfile'));
aborted = true;
break;
}
this.gui_objects.importform.submit();
this.set_busy(true, 'importwait');
this.lock_form(this.gui_objects.importform, true);
}
else
this.goto_url('import', (this.env.source ? '_target='+urlencode(this.env.source)+'&' : ''));
break;
case 'export':
if (this.contact_list.rowcount > 0) {
this.goto_url('export', { _source: this.env.source, _gid: this.env.group, _search: this.env.search_request });
}
break;
case 'export-selected':
if (this.contact_list.rowcount > 0) {
this.goto_url('export', { _source: this.env.source, _gid: this.env.group, _cid: this.contact_list.get_selection().join(',') });
}
break;
case 'upload-photo':
this.upload_contact_photo(props || this.gui_objects.uploadform);
break;
case 'delete-photo':
this.replace_contact_photo('-del-');
break;
// user settings commands
case 'preferences':
case 'identities':
case 'responses':
case 'folders':
this.goto_url('settings/' + command);
break;
case 'undo':
this.http_request('undo', '', this.display_message('', 'loading'));
break;
// unified command call (command name == function name)
default:
var func = command.replace(/-/g, '_');
if (this[func] && typeof this[func] === 'function') {
ret = this[func](props, obj, event);
}
break;
}
if (!aborted && this.triggerEvent('after'+command, props) === false)
ret = false;
this.triggerEvent('actionafter', { props:props, action:command, aborted:aborted });
return ret === false ? false : obj ? false : true;
};
// set command(s) enabled or disabled
this.enable_command = function()
{
var i, n, args = Array.prototype.slice.call(arguments),
enable = args.pop(), cmd;
for (n=0; n<args.length; n++) {
cmd = args[n];
// argument of type array
if (typeof cmd === 'string') {
this.commands[cmd] = enable;
this.set_button(cmd, (enable ? 'act' : 'pas'));
this.triggerEvent('enable-command', {command: cmd, status: enable});
}
// push array elements into commands array
else {
for (i in cmd)
args.push(cmd[i]);
}
}
};
this.command_enabled = function(cmd)
{
return this.commands[cmd];
};
// lock/unlock interface
this.set_busy = function(a, message, id)
{
if (a && message) {
var msg = this.get_label(message);
if (msg == message)
msg = 'Loading...';
id = this.display_message(msg, 'loading');
}
else if (!a && id) {
this.hide_message(id);
}
this.busy = a;
//document.body.style.cursor = a ? 'wait' : 'default';
if (this.gui_objects.editform)
this.lock_form(this.gui_objects.editform, a);
return id;
};
// return a localized string
this.get_label = function(name, domain)
{
if (domain && this.labels[domain+'.'+name])
return this.labels[domain+'.'+name];
else if (this.labels[name])
return this.labels[name];
else
return name;
};
// alias for convenience reasons
this.gettext = this.get_label;
// switch to another application task
this.switch_task = function(task)
{
if (this.task === task && task != 'mail')
return;
var url = this.get_task_url(task);
if (task == 'mail')
url += '&_mbox=INBOX';
else if (task == 'logout' && !this.env.server_error) {
url += '&_token=' + this.env.request_token;
this.clear_compose_data();
}
this.redirect(url);
};
this.get_task_url = function(task, url)
{
if (!url)
url = this.env.comm_path;
if (url.match(/[?&]_task=[a-zA-Z0-9_-]+/))
return url.replace(/_task=[a-zA-Z0-9_-]+/, '_task=' + task);
else
return url.replace(/\?.*$/, '') + '?_task=' + task;
};
this.reload = function(delay)
{
if (this.is_framed())
parent.rcmail.reload(delay);
else if (delay)
setTimeout(function() { ref.reload(); }, delay);
else if (window.location)
location.href = this.env.comm_path + (this.env.action ? '&_action='+this.env.action : '');
};
// Add variable to GET string, replace old value if exists
this.add_url = function(url, name, value)
{
value = urlencode(value);
if (/(\?.*)$/.test(url)) {
var urldata = RegExp.$1,
datax = RegExp('((\\?|&)'+RegExp.escape(name)+'=[^&]*)');
if (datax.test(urldata)) {
urldata = urldata.replace(datax, RegExp.$2 + name + '=' + value);
}
else
urldata += '&' + name + '=' + value
return url.replace(/(\?.*)$/, urldata);
}
return url + '?' + name + '=' + value;
};
this.is_framed = function()
{
return this.env.framed && parent.rcmail && parent.rcmail != this && typeof parent.rcmail.command == 'function';
};
this.save_pref = function(prop)
{
var request = {_name: prop.name, _value: prop.value};
if (prop.session)
request._session = prop.session;
if (prop.env)
this.env[prop.env] = prop.value;
this.http_post('save-pref', request);
};
this.html_identifier = function(str, encode)
{
return encode ? this.html_identifier_encode(str) : String(str).replace(this.identifier_expr, '_');
};
this.html_identifier_encode = function(str)
{
return Base64.encode(String(str)).replace(/=+$/, '').replace(/\+/g, '-').replace(/\//g, '_');
};
this.html_identifier_decode = function(str)
{
str = String(str).replace(/-/g, '+').replace(/_/g, '/');
while (str.length % 4) str += '=';
return Base64.decode(str);
};
/*********************************************************/
/********* event handling methods *********/
/*********************************************************/
this.drag_menu = function(e, target)
{
var modkey = rcube_event.get_modifier(e),
menu = this.gui_objects.dragmenu;
if (menu && modkey == SHIFT_KEY && this.commands['copy']) {
var pos = rcube_event.get_mouse_pos(e);
this.env.drag_target = target;
this.show_menu(this.gui_objects.dragmenu.id, true, e);
$(menu).css({top: (pos.y-10)+'px', left: (pos.x-10)+'px'});
return true;
}
return false;
};
this.drag_menu_action = function(action)
{
var menu = this.gui_objects.dragmenu;
if (menu) {
$(menu).hide();
}
this.command(action, this.env.drag_target);
this.env.drag_target = null;
};
this.drag_start = function(list)
{
this.drag_active = true;
if (this.preview_timer)
clearTimeout(this.preview_timer);
if (this.preview_read_timer)
clearTimeout(this.preview_read_timer);
// prepare treelist widget for dragging interactions
if (this.treelist)
this.treelist.drag_start();
};
this.drag_end = function(e)
{
var list, model;
if (this.treelist)
this.treelist.drag_end();
// execute drag & drop action when mouse was released
if (list = this.message_list)
model = this.env.mailboxes;
else if (list = this.contact_list)
model = this.env.contactfolders;
if (this.drag_active && model && this.env.last_folder_target) {
var target = model[this.env.last_folder_target];
list.draglayer.hide();
if (this.contact_list) {
if (!this.contacts_drag_menu(e, target))
this.command('move', target);
}
else if (!this.drag_menu(e, target))
this.command('move', target);
}
this.drag_active = false;
this.env.last_folder_target = null;
};
this.drag_move = function(e)
{
if (this.gui_objects.folderlist) {
var drag_target, oldclass,
layerclass = 'draglayernormal',
mouse = rcube_event.get_mouse_pos(e);
if (this.contact_list && this.contact_list.draglayer)
oldclass = this.contact_list.draglayer.attr('class');
// mouse intersects a valid drop target on the treelist
if (this.treelist && (drag_target = this.treelist.intersects(mouse, true))) {
this.env.last_folder_target = drag_target;
layerclass = 'draglayer' + (this.check_droptarget(drag_target) > 1 ? 'copy' : 'normal');
}
else {
// Clear target, otherwise drag end will trigger move into last valid droptarget
this.env.last_folder_target = null;
}
if (layerclass != oldclass && this.contact_list && this.contact_list.draglayer)
this.contact_list.draglayer.attr('class', layerclass);
}
};
this.collapse_folder = function(name)
{
if (this.treelist)
this.treelist.toggle(name);
};
this.folder_collapsed = function(node)
{
var prefname = this.env.task == 'addressbook' ? 'collapsed_abooks' : 'collapsed_folders',
old = this.env[prefname];
if (node.collapsed) {
this.env[prefname] = this.env[prefname] + '&'+urlencode(node.id)+'&';
// select the folder if one of its childs is currently selected
// don't select if it's virtual (#1488346)
if (!node.virtual && this.env.mailbox && this.env.mailbox.startsWith(name + this.env.delimiter))
this.command('list', name);
}
else {
var reg = new RegExp('&'+urlencode(node.id)+'&');
this.env[prefname] = this.env[prefname].replace(reg, '');
}
if (!this.drag_active) {
if (old !== this.env[prefname])
this.command('save-pref', { name: prefname, value: this.env[prefname] });
if (this.env.unread_counts)
this.set_unread_count_display(node.id, false);
}
};
// global mouse-click handler to cleanup some UI elements
this.doc_mouse_up = function(e)
{
var list, id, target = rcube_event.get_target(e);
// ignore event if jquery UI dialog is open
if ($(target).closest('.ui-dialog, .ui-widget-overlay').length)
return;
// remove focus from list widgets
if (window.rcube_list_widget && rcube_list_widget._instances.length) {
$.each(rcube_list_widget._instances, function(i,list){
if (list && !rcube_mouse_is_over(e, list.list.parentNode))
list.blur();
});
}
// reset 'pressed' buttons
if (this.buttons_sel) {
for (id in this.buttons_sel)
if (typeof id !== 'function')
this.button_out(this.buttons_sel[id], id);
this.buttons_sel = {};
}
// reset popup menus; delayed to have updated menu_stack data
setTimeout(function(e){
var obj, skip, config, id, i, parents = $(target).parents();
for (i = ref.menu_stack.length - 1; i >= 0; i--) {
id = ref.menu_stack[i];
obj = $('#' + id);
if (obj.is(':visible')
&& target != obj.data('opener')
&& target != obj.get(0) // check if scroll bar was clicked (#1489832)
&& !parents.is(obj.data('opener'))
&& id != skip
&& (obj.attr('data-editable') != 'true' || !$(target).parents('#' + id).length)
&& (obj.attr('data-sticky') != 'true' || !rcube_mouse_is_over(e, obj.get(0)))
) {
ref.hide_menu(id, e);
}
skip = obj.data('parent');
}
}, 10, e);
};
// global keypress event handler
this.doc_keypress = function(e)
{
// Helper method to move focus to the next/prev active menu item
var focus_menu_item = function(dir) {
var obj, item, mod = dir < 0 ? 'prevAll' : 'nextAll', limit = dir < 0 ? 'last' : 'first';
if (ref.focused_menu && (obj = $('#'+ref.focused_menu))) {
item = obj.find(':focus').closest('li')[mod](':has(:not([aria-disabled=true]))').find('a,input')[limit]();
if (!item.length)
item = obj.find(':focus').closest('ul')[mod](':has(:not([aria-disabled=true]))').find('a,input')[limit]();
return item.focus().length;
}
return 0;
};
var target = e.target || {},
keyCode = rcube_event.get_keycode(e);
// save global reference for keyboard detection on click events in IE
rcube_event._last_keyboard_event = e;
if (e.keyCode != 27 && (!this.menu_keyboard_active || target.nodeName == 'TEXTAREA' || target.nodeName == 'SELECT')) {
return true;
}
switch (keyCode) {
case 38:
case 40:
case 63232: // "up", in safari keypress
case 63233: // "down", in safari keypress
focus_menu_item(keyCode == 38 || keyCode == 63232 ? -1 : 1);
return rcube_event.cancel(e);
case 9: // tab
if (this.focused_menu) {
var mod = rcube_event.get_modifier(e);
if (!focus_menu_item(mod == SHIFT_KEY ? -1 : 1)) {
this.hide_menu(this.focused_menu, e);
}
}
return rcube_event.cancel(e);
case 27: // esc
if (this.menu_stack.length)
this.hide_menu(this.menu_stack[this.menu_stack.length-1], e);
break;
}
return true;
}
this.msglist_select = function(list)
{
if (this.preview_timer)
clearTimeout(this.preview_timer);
if (this.preview_read_timer)
clearTimeout(this.preview_read_timer);
var selected = list.get_single_selection();
this.enable_command(this.env.message_commands, selected != null);
if (selected) {
// Hide certain command buttons when Drafts folder is selected
if (this.env.mailbox == this.env.drafts_mailbox)
this.enable_command('reply', 'reply-all', 'reply-list', 'forward', 'forward-attachment', 'forward-inline', false);
// Disable reply-list when List-Post header is not set
else {
var msg = this.env.messages[selected];
if (!msg.ml)
this.enable_command('reply-list', false);
}
}
// Multi-message commands
this.enable_command('delete', 'move', 'copy', 'mark', 'forward', 'forward-attachment', list.selection.length > 0);
// reset all-pages-selection
if (selected || (list.selection.length && list.selection.length != list.rowcount))
this.select_all_mode = false;
// start timer for message preview (wait for double click)
if (selected && this.env.contentframe && !list.multi_selecting && !this.dummy_select)
this.preview_timer = setTimeout(function() { ref.msglist_get_preview(); }, this.dblclick_time);
else if (this.env.contentframe)
this.show_contentframe(false);
};
// This allow as to re-select selected message and display it in preview frame
this.msglist_click = function(list)
{
if (list.multi_selecting || !this.env.contentframe)
return;
if (list.get_single_selection())
return;
var win = this.get_frame_window(this.env.contentframe);
if (win && win.location.href.indexOf(this.env.blankpage) >= 0) {
if (this.preview_timer)
clearTimeout(this.preview_timer);
if (this.preview_read_timer)
clearTimeout(this.preview_read_timer);
this.preview_timer = setTimeout(function() { ref.msglist_get_preview(); }, this.dblclick_time);
}
};
this.msglist_dbl_click = function(list)
{
if (this.preview_timer)
clearTimeout(this.preview_timer);
if (this.preview_read_timer)
clearTimeout(this.preview_read_timer);
var uid = list.get_single_selection();
if (uid && (this.env.messages[uid].mbox || this.env.mailbox) == this.env.drafts_mailbox)
this.open_compose_step({ _draft_uid: uid, _mbox: this.env.mailbox });
else if (uid)
this.show_message(uid, false, false);
};
this.msglist_keypress = function(list)
{
if (list.modkey == CONTROL_KEY)
return;
if (list.key_pressed == list.ENTER_KEY)
this.command('show');
else if (list.key_pressed == list.DELETE_KEY || list.key_pressed == list.BACKSPACE_KEY)
this.command('delete');
else if (list.key_pressed == 33)
this.command('previouspage');
else if (list.key_pressed == 34)
this.command('nextpage');
};
this.msglist_get_preview = function()
{
var uid = this.get_single_uid();
if (uid && this.env.contentframe && !this.drag_active)
this.show_message(uid, false, true);
else if (this.env.contentframe)
this.show_contentframe(false);
};
this.msglist_expand = function(row)
{
if (this.env.messages[row.uid])
this.env.messages[row.uid].expanded = row.expanded;
$(row.obj)[row.expanded?'addClass':'removeClass']('expanded');
};
this.msglist_set_coltypes = function(list)
{
var i, found, name, cols = list.thead.rows[0].cells;
this.env.listcols = [];
for (i=0; i<cols.length; i++)
if (cols[i].id && cols[i].id.startsWith('rcm')) {
name = cols[i].id.slice(3);
this.env.listcols.push(name);
}
if ((found = $.inArray('flag', this.env.listcols)) >= 0)
this.env.flagged_col = found;
if ((found = $.inArray('subject', this.env.listcols)) >= 0)
this.env.subject_col = found;
this.command('save-pref', { name: 'list_cols', value: this.env.listcols, session: 'list_attrib/columns' });
};
this.check_droptarget = function(id)
{
switch (this.task) {
case 'mail':
return (this.env.mailboxes[id]
&& !this.env.mailboxes[id].virtual
&& (this.env.mailboxes[id].id != this.env.mailbox || this.is_multifolder_listing())) ? 1 : 0;
case 'addressbook':
var target;
if (id != this.env.source && (target = this.env.contactfolders[id])) {
// droptarget is a group
if (target.type == 'group') {
if (target.id != this.env.group && !this.env.contactfolders[target.source].readonly) {
var is_other = this.env.selection_sources.length > 1 || $.inArray(target.source, this.env.selection_sources) == -1;
return !is_other || this.commands.move ? 1 : 2;
}
}
// droptarget is a (writable) addressbook and it's not the source
else if (!target.readonly && (this.env.selection_sources.length > 1 || $.inArray(id, this.env.selection_sources) == -1)) {
return this.commands.move ? 1 : 2;
}
}
}
return 0;
};
// open popup window
this.open_window = function(url, small, toolbar)
{
var wname = 'rcmextwin' + new Date().getTime();
url += (url.match(/\?/) ? '&' : '?') + '_extwin=1';
if (this.env.standard_windows)
var extwin = window.open(url, wname);
else {
var win = this.is_framed() ? parent.window : window,
page = $(win),
page_width = page.width(),
page_height = bw.mz ? $('body', win).height() : page.height(),
w = Math.min(small ? this.env.popup_width_small : this.env.popup_width, page_width),
h = page_height, // always use same height
l = (win.screenLeft || win.screenX) + 20,
t = (win.screenTop || win.screenY) + 20,
extwin = window.open(url, wname,
'width='+w+',height='+h+',top='+t+',left='+l+',resizable=yes,location=no,scrollbars=yes'
+(toolbar ? ',toolbar=yes,menubar=yes,status=yes' : ',toolbar=no,menubar=no,status=no'));
}
// detect popup blocker (#1489618)
// don't care this might not work with all browsers
if (!extwin || extwin.closed) {
this.display_message(this.get_label('windowopenerror'), 'warning');
return;
}
// write loading... message to empty windows
if (!url && extwin.document) {
extwin.document.write('<html><body>' + this.get_label('loading') + '</body></html>');
}
// allow plugins to grab the window reference (#1489413)
this.triggerEvent('openwindow', { url:url, handle:extwin });
// focus window, delayed to bring to front
setTimeout(function() { extwin && extwin.focus(); }, 10);
return extwin;
};
/*********************************************************/
/********* (message) list functionality *********/
/*********************************************************/
this.init_message_row = function(row)
{
var i, fn = {}, uid = row.uid,
status_icon = (this.env.status_col != null ? 'status' : 'msg') + 'icn' + row.id;
if (uid && this.env.messages[uid])
$.extend(row, this.env.messages[uid]);
// set eventhandler to status icon
if (row.icon = document.getElementById(status_icon)) {
fn.icon = function(e) { ref.command('toggle_status', uid); };
}
// save message icon position too
if (this.env.status_col != null)
row.msgicon = document.getElementById('msgicn'+row.id);
else
row.msgicon = row.icon;
// set eventhandler to flag icon
if (this.env.flagged_col != null && (row.flagicon = document.getElementById('flagicn'+row.id))) {
fn.flagicon = function(e) { ref.command('toggle_flag', uid); };
}
// set event handler to thread expand/collapse icon
if (!row.depth && row.has_children && (row.expando = document.getElementById('rcmexpando'+row.id))) {
fn.expando = function(e) { ref.expand_message_row(e, uid); };
}
// attach events
$.each(fn, function(i, f) {
row[i].onclick = function(e) { f(e); return rcube_event.cancel(e); };
if (bw.touch && row[i].addEventListener) {
row[i].addEventListener('touchend', function(e) {
if (e.changedTouches.length == 1) {
f(e);
return rcube_event.cancel(e);
}
}, false);
}
});
this.triggerEvent('insertrow', { uid:uid, row:row });
};
// create a table row in the message list
this.add_message_row = function(uid, cols, flags, attop)
{
if (!this.gui_objects.messagelist || !this.message_list)
return false;
// Prevent from adding messages from different folder (#1487752)
if (flags.mbox != this.env.mailbox && !flags.skip_mbox_check)
return false;
if (!this.env.messages[uid])
this.env.messages[uid] = {};
// merge flags over local message object
$.extend(this.env.messages[uid], {
deleted: flags.deleted?1:0,
replied: flags.answered?1:0,
unread: !flags.seen?1:0,
forwarded: flags.forwarded?1:0,
flagged: flags.flagged?1:0,
has_children: flags.has_children?1:0,
depth: flags.depth?flags.depth:0,
unread_children: flags.unread_children?flags.unread_children:0,
parent_uid: flags.parent_uid?flags.parent_uid:0,
selected: this.select_all_mode || this.message_list.in_selection(uid),
ml: flags.ml?1:0,
ctype: flags.ctype,
mbox: flags.mbox,
// flags from plugins
flags: flags.extra_flags
});
var c, n, col, html, css_class, label, status_class = '', status_label = '',
tree = '', expando = '',
list = this.message_list,
rows = list.rows,
message = this.env.messages[uid],
msg_id = this.html_identifier(uid,true),
row_class = 'message'
+ (!flags.seen ? ' unread' : '')
+ (flags.deleted ? ' deleted' : '')
+ (flags.flagged ? ' flagged' : '')
+ (message.selected ? ' selected' : ''),
row = { cols:[], style:{}, id:'rcmrow'+msg_id, uid:uid };
// message status icons
css_class = 'msgicon';
if (this.env.status_col === null) {
css_class += ' status';
if (flags.deleted) {
status_class += ' deleted';
status_label += this.get_label('deleted') + ' ';
}
else if (!flags.seen) {
status_class += ' unread';
status_label += this.get_label('unread') + ' ';
}
else if (flags.unread_children > 0) {
status_class += ' unreadchildren';
}
}
if (flags.answered) {
status_class += ' replied';
status_label += this.get_label('replied') + ' ';
}
if (flags.forwarded) {
status_class += ' forwarded';
status_label += this.get_label('forwarded') + ' ';
}
// update selection
if (message.selected && !list.in_selection(uid))
list.selection.push(uid);
// threads
if (this.env.threading) {
if (message.depth) {
// This assumes that div width is hardcoded to 15px,
tree += '<span id="rcmtab' + msg_id + '" class="branch" style="width:' + (message.depth * 15) + 'px;"> </span>';
if ((rows[message.parent_uid] && rows[message.parent_uid].expanded === false)
|| ((this.env.autoexpand_threads == 0 || this.env.autoexpand_threads == 2) &&
(!rows[message.parent_uid] || !rows[message.parent_uid].expanded))
) {
row.style.display = 'none';
message.expanded = false;
}
else
message.expanded = true;
row_class += ' thread expanded';
}
else if (message.has_children) {
if (message.expanded === undefined && (this.env.autoexpand_threads == 1 || (this.env.autoexpand_threads == 2 && message.unread_children))) {
message.expanded = true;
}
expando = '<div id="rcmexpando' + row.id + '" class="' + (message.expanded ? 'expanded' : 'collapsed') + '"> </div>';
row_class += ' thread' + (message.expanded? ' expanded' : '');
}
if (flags.unread_children && flags.seen && !message.expanded)
row_class += ' unroot';
}
tree += '<span id="msgicn'+row.id+'" class="'+css_class+status_class+'" title="'+status_label+'"></span>';
row.className = row_class;
// build subject link
if (cols.subject) {
var action = flags.mbox == this.env.drafts_mailbox ? 'compose' : 'show',
uid_param = flags.mbox == this.env.drafts_mailbox ? '_draft_uid' : '_uid',
query = { _mbox: flags.mbox };
query[uid_param] = uid;
cols.subject = '<a href="' + this.url(action, query) + '" onclick="return rcube_event.keyboard_only(event)"' +
' onmouseover="rcube_webmail.long_subject_title(this,'+(message.depth+1)+')" tabindex="-1"><span>'+cols.subject+'</span></a>';
}
// add each submitted col
for (n in this.env.listcols) {
c = this.env.listcols[n];
col = {className: String(c).toLowerCase(), events:{}};
if (this.env.coltypes[c] && this.env.coltypes[c].hidden) {
col.className += ' hidden';
}
if (c == 'flag') {
css_class = (flags.flagged ? 'flagged' : 'unflagged');
label = this.get_label(css_class);
html = '<span id="flagicn'+row.id+'" class="'+css_class+'" title="'+label+'"></span>';
}
else if (c == 'attachment') {
label = this.get_label('withattachment');
if (flags.attachmentClass)
html = '<span class="'+flags.attachmentClass+'" title="'+label+'"></span>';
else if (/application\/|multipart\/(m|signed)/.test(flags.ctype))
html = '<span class="attachment" title="'+label+'"></span>';
else if (/multipart\/report/.test(flags.ctype))
html = '<span class="report"></span>';
else
html = ' ';
}
else if (c == 'status') {
label = '';
if (flags.deleted) {
css_class = 'deleted';
label = this.get_label('deleted');
}
else if (!flags.seen) {
css_class = 'unread';
label = this.get_label('unread');
}
else if (flags.unread_children > 0) {
css_class = 'unreadchildren';
}
else
css_class = 'msgicon';
html = '<span id="statusicn'+row.id+'" class="'+css_class+status_class+'" title="'+label+'"></span>';
}
else if (c == 'threads')
html = expando;
else if (c == 'subject') {
if (bw.ie)
col.events.mouseover = function() { rcube_webmail.long_subject_title_ex(this); };
html = tree + cols[c];
}
else if (c == 'priority') {
if (flags.prio > 0 && flags.prio < 6) {
label = this.get_label('priority') + ' ' + flags.prio;
html = '<span class="prio'+flags.prio+'" title="'+label+'"></span>';
}
else
html = ' ';
}
else if (c == 'folder') {
html = '<span onmouseover="rcube_webmail.long_subject_title(this)">' + cols[c] + '<span>';
}
else
html = cols[c];
col.innerHTML = html;
row.cols.push(col);
}
list.insert_row(row, attop);
// remove 'old' row
if (attop && this.env.pagesize && list.rowcount > this.env.pagesize) {
var uid = list.get_last_row();
list.remove_row(uid);
list.clear_selection(uid);
}
};
this.set_list_sorting = function(sort_col, sort_order)
{
// set table header class
$('#rcm'+this.env.sort_col).removeClass('sorted'+(this.env.sort_order.toUpperCase()));
if (sort_col)
$('#rcm'+sort_col).addClass('sorted'+sort_order);
this.env.sort_col = sort_col;
this.env.sort_order = sort_order;
};
this.set_list_options = function(cols, sort_col, sort_order, threads)
{
var update, post_data = {};
if (sort_col === undefined)
sort_col = this.env.sort_col;
if (!sort_order)
sort_order = this.env.sort_order;
if (this.env.sort_col != sort_col || this.env.sort_order != sort_order) {
update = 1;
this.set_list_sorting(sort_col, sort_order);
}
if (this.env.threading != threads) {
update = 1;
post_data._threads = threads;
}
if (cols && cols.length) {
// make sure new columns are added at the end of the list
var i, idx, name, newcols = [], oldcols = this.env.listcols;
for (i=0; i<oldcols.length; i++) {
name = oldcols[i];
idx = $.inArray(name, cols);
if (idx != -1) {
newcols.push(name);
delete cols[idx];
}
}
for (i=0; i<cols.length; i++)
if (cols[i])
newcols.push(cols[i]);
if (newcols.join() != oldcols.join()) {
update = 1;
post_data._cols = newcols.join(',');
}
}
if (update)
this.list_mailbox('', '', sort_col+'_'+sort_order, post_data);
};
// when user double-clicks on a row
this.show_message = function(id, safe, preview)
{
if (!id)
return;
var win, target = window,
action = preview ? 'preview': 'show',
url = '&_action='+action+'&_uid='+id+'&_mbox='+urlencode(this.get_message_mailbox(id));
if (preview && (win = this.get_frame_window(this.env.contentframe))) {
target = win;
url += '&_framed=1';
}
if (safe)
url += '&_safe=1';
// also send search request to get the right messages
if (this.env.search_request)
url += '&_search='+this.env.search_request;
// add browser capabilities, so we can properly handle attachments
url += '&_caps='+urlencode(this.browser_capabilities());
if (this.env.extwin)
url += '&_extwin=1';
if (preview && String(target.location.href).indexOf(url) >= 0) {
this.show_contentframe(true);
}
else {
if (!preview && this.env.message_extwin && !this.env.extwin)
this.open_window(this.env.comm_path+url, true);
else
this.location_href(this.env.comm_path+url, target, true);
// mark as read and change mbox unread counter
if (preview && this.message_list && this.message_list.rows[id] && this.message_list.rows[id].unread && this.env.preview_pane_mark_read > 0) {
this.preview_read_timer = setTimeout(function() {
ref.set_unread_message(id, ref.env.mailbox);
ref.http_post('mark', {_uid: id, _flag: 'read', _mbox: ref.env.mailbox, _quiet: 1});
}, this.env.preview_pane_mark_read * 1000);
}
}
};
// update message status and unread counter after marking a message as read
this.set_unread_message = function(id, folder)
{
var self = this;
// find window with messages list
if (!self.message_list)
self = self.opener();
if (!self && window.parent)
self = parent.rcmail;
if (!self || !self.message_list)
return;
// this may fail in multifolder mode
if (self.set_message(id, 'unread', false) === false)
self.set_message(id + '-' + folder, 'unread', false);
if (self.env.unread_counts[folder] > 0) {
self.env.unread_counts[folder] -= 1;
self.set_unread_count(folder, self.env.unread_counts[folder], folder == 'INBOX' && !self.is_multifolder_listing());
}
};
this.show_contentframe = function(show)
{
var frame, win, name = this.env.contentframe;
if (name && (frame = this.get_frame_element(name))) {
if (!show && (win = this.get_frame_window(name))) {
if (win.location.href.indexOf(this.env.blankpage) < 0) {
if (win.stop)
win.stop();
else // IE
win.document.execCommand('Stop');
win.location.href = this.env.blankpage;
}
}
else if (!bw.safari && !bw.konq)
$(frame)[show ? 'show' : 'hide']();
}
if (!show && this.env.frame_lock)
this.set_busy(false, null, this.env.frame_lock);
};
this.get_frame_element = function(id)
{
var frame;
if (id && (frame = document.getElementById(id)))
return frame;
};
this.get_frame_window = function(id)
{
var frame = this.get_frame_element(id);
if (frame && frame.name && window.frames)
return window.frames[frame.name];
};
this.lock_frame = function()
{
if (!this.env.frame_lock)
(this.is_framed() ? parent.rcmail : this).env.frame_lock = this.set_busy(true, 'loading');
};
// list a specific page
this.list_page = function(page)
{
if (page == 'next')
page = this.env.current_page+1;
else if (page == 'last')
page = this.env.pagecount;
else if (page == 'prev' && this.env.current_page > 1)
page = this.env.current_page-1;
else if (page == 'first' && this.env.current_page > 1)
page = 1;
if (page > 0 && page <= this.env.pagecount) {
this.env.current_page = page;
if (this.task == 'addressbook' || this.contact_list)
this.list_contacts(this.env.source, this.env.group, page);
else if (this.task == 'mail')
this.list_mailbox(this.env.mailbox, page);
}
};
// sends request to check for recent messages
this.checkmail = function()
{
var lock = this.set_busy(true, 'checkingmail'),
params = this.check_recent_params();
this.http_post('check-recent', params, lock);
};
// list messages of a specific mailbox using filter
this.filter_mailbox = function(filter)
{
if (this.filter_disabled)
return;
var lock = this.set_busy(true, 'searching');
this.clear_message_list();
// reset vars
this.env.current_page = 1;
this.env.search_filter = filter;
this.http_request('search', this.search_params(false, filter), lock);
};
// reload the current message listing
this.refresh_list = function()
{
this.list_mailbox(this.env.mailbox, this.env.current_page || 1, null, { _clear:1 }, true);
if (this.message_list)
this.message_list.clear_selection();
};
// list messages of a specific mailbox
this.list_mailbox = function(mbox, page, sort, url, update_only)
{
var win, target = window;
if (typeof url != 'object')
url = {};
if (!mbox)
mbox = this.env.mailbox ? this.env.mailbox : 'INBOX';
// add sort to url if set
if (sort)
url._sort = sort;
// folder change, reset page, search scope, etc.
if (this.env.mailbox != mbox) {
page = 1;
this.env.current_page = page;
this.env.search_scope = 'base';
this.select_all_mode = false;
this.reset_search_filter();
}
// also send search request to get the right messages
else if (this.env.search_request)
url._search = this.env.search_request;
if (!update_only) {
// unselect selected messages and clear the list and message data
this.clear_message_list();
if (mbox != this.env.mailbox || (mbox == this.env.mailbox && !page && !sort))
url._refresh = 1;
this.select_folder(mbox, '', true);
this.unmark_folder(mbox, 'recent', '', true);
this.env.mailbox = mbox;
}
// load message list remotely
if (this.gui_objects.messagelist) {
this.list_mailbox_remote(mbox, page, url);
return;
}
if (win = this.get_frame_window(this.env.contentframe)) {
target = win;
url._framed = 1;
}
if (this.env.uid)
url._uid = this.env.uid;
// load message list to target frame/window
if (mbox) {
this.set_busy(true, 'loading');
url._mbox = mbox;
if (page)
url._page = page;
this.location_href(url, target);
}
};
this.clear_message_list = function()
{
this.env.messages = {};
this.show_contentframe(false);
if (this.message_list)
this.message_list.clear(true);
};
// send remote request to load message list
this.list_mailbox_remote = function(mbox, page, url)
{
var lock = this.set_busy(true, 'loading');
if (typeof url != 'object')
url = {};
url._mbox = mbox;
if (page)
url._page = page;
this.http_request('list', url, lock);
this.update_state({ _mbox: mbox, _page: (page && page > 1 ? page : null) });
};
// removes messages that doesn't exists from list selection array
this.update_selection = function()
{
var selected = this.message_list.selection,
rows = this.message_list.rows,
i, selection = [];
for (i in selected)
if (rows[selected[i]])
selection.push(selected[i]);
this.message_list.selection = selection;
// reset preview frame, if currently previewed message is not selected (has been removed)
try {
var win = this.get_frame_window(this.env.contentframe),
id = win.rcmail.env.uid;
if (id && $.inArray(id, selection) < 0)
this.show_contentframe(false);
}
catch (e) {};
};
// expand all threads with unread children
this.expand_unread = function()
{
var r, tbody = this.message_list.tbody,
new_row = tbody.firstChild;
while (new_row) {
if (new_row.nodeType == 1 && (r = this.message_list.rows[new_row.uid]) && r.unread_children) {
this.message_list.expand_all(r);
this.set_unread_children(r.uid);
}
new_row = new_row.nextSibling;
}
return false;
};
// thread expanding/collapsing handler
this.expand_message_row = function(e, uid)
{
var row = this.message_list.rows[uid];
// handle unread_children mark
row.expanded = !row.expanded;
this.set_unread_children(uid);
row.expanded = !row.expanded;
this.message_list.expand_row(e, uid);
};
// message list expanding
this.expand_threads = function()
{
if (!this.env.threading || !this.env.autoexpand_threads || !this.message_list)
return;
switch (this.env.autoexpand_threads) {
case 2: this.expand_unread(); break;
case 1: this.message_list.expand_all(); break;
}
};
// Initializes threads indicators/expanders after list update
this.init_threads = function(roots, mbox)
{
// #1487752
if (mbox && mbox != this.env.mailbox)
return false;
for (var n=0, len=roots.length; n<len; n++)
this.add_tree_icons(roots[n]);
this.expand_threads();
};
// adds threads tree icons to the list (or specified thread)
this.add_tree_icons = function(root)
{
var i, l, r, n, len, pos, tmp = [], uid = [],
row, rows = this.message_list.rows;
if (root)
row = rows[root] ? rows[root].obj : null;
else
row = this.message_list.tbody.firstChild;
while (row) {
if (row.nodeType == 1 && (r = rows[row.uid])) {
if (r.depth) {
for (i=tmp.length-1; i>=0; i--) {
len = tmp[i].length;
if (len > r.depth) {
pos = len - r.depth;
if (!(tmp[i][pos] & 2))
tmp[i][pos] = tmp[i][pos] ? tmp[i][pos]+2 : 2;
}
else if (len == r.depth) {
if (!(tmp[i][0] & 2))
tmp[i][0] += 2;
}
if (r.depth > len)
break;
}
tmp.push(new Array(r.depth));
tmp[tmp.length-1][0] = 1;
uid.push(r.uid);
}
else {
if (tmp.length) {
for (i in tmp) {
this.set_tree_icons(uid[i], tmp[i]);
}
tmp = [];
uid = [];
}
if (root && row != rows[root].obj)
break;
}
}
row = row.nextSibling;
}
if (tmp.length) {
for (i in tmp) {
this.set_tree_icons(uid[i], tmp[i]);
}
}
};
// adds tree icons to specified message row
this.set_tree_icons = function(uid, tree)
{
var i, divs = [], html = '', len = tree.length;
for (i=0; i<len; i++) {
if (tree[i] > 2)
divs.push({'class': 'l3', width: 15});
else if (tree[i] > 1)
divs.push({'class': 'l2', width: 15});
else if (tree[i] > 0)
divs.push({'class': 'l1', width: 15});
// separator div
else if (divs.length && !divs[divs.length-1]['class'])
divs[divs.length-1].width += 15;
else
divs.push({'class': null, width: 15});
}
for (i=divs.length-1; i>=0; i--) {
if (divs[i]['class'])
html += '<div class="tree '+divs[i]['class']+'" />';
else
html += '<div style="width:'+divs[i].width+'px" />';
}
if (html)
$('#rcmtab'+this.html_identifier(uid, true)).html(html);
};
// update parent in a thread
this.update_thread_root = function(uid, flag)
{
if (!this.env.threading)
return;
var root = this.message_list.find_root(uid);
if (uid == root)
return;
var p = this.message_list.rows[root];
if (flag == 'read' && p.unread_children) {
p.unread_children--;
}
else if (flag == 'unread' && p.has_children) {
// unread_children may be undefined
p.unread_children = p.unread_children ? p.unread_children + 1 : 1;
}
else {
return;
}
this.set_message_icon(root);
this.set_unread_children(root);
};
// update thread indicators for all messages in a thread below the specified message
// return number of removed/added root level messages
this.update_thread = function (uid)
{
if (!this.env.threading)
return 0;
var r, parent, count = 0,
rows = this.message_list.rows,
row = rows[uid],
depth = rows[uid].depth,
roots = [];
if (!row.depth) // root message: decrease roots count
count--;
else if (row.unread) {
// update unread_children for thread root
parent = this.message_list.find_root(uid);
rows[parent].unread_children--;
this.set_unread_children(parent);
}
parent = row.parent_uid;
// childrens
row = row.obj.nextSibling;
while (row) {
if (row.nodeType == 1 && (r = rows[row.uid])) {
if (!r.depth || r.depth <= depth)
break;
r.depth--; // move left
// reset width and clear the content of a tab, icons will be added later
$('#rcmtab'+r.id).width(r.depth * 15).html('');
if (!r.depth) { // a new root
count++; // increase roots count
r.parent_uid = 0;
if (r.has_children) {
// replace 'leaf' with 'collapsed'
$('#'+r.id+' .leaf:first')
.attr('id', 'rcmexpando' + r.id)
.attr('class', (r.obj.style.display != 'none' ? 'expanded' : 'collapsed'))
.bind('mousedown', {uid: r.uid},
function(e) { return ref.expand_message_row(e, e.data.uid); });
r.unread_children = 0;
roots.push(r);
}
// show if it was hidden
if (r.obj.style.display == 'none')
$(r.obj).show();
}
else {
if (r.depth == depth)
r.parent_uid = parent;
if (r.unread && roots.length)
roots[roots.length-1].unread_children++;
}
}
row = row.nextSibling;
}
// update unread_children for roots
for (r=0; r<roots.length; r++)
this.set_unread_children(roots[r].uid);
return count;
};
this.delete_excessive_thread_rows = function()
{
var rows = this.message_list.rows,
tbody = this.message_list.tbody,
row = tbody.firstChild,
cnt = this.env.pagesize + 1;
while (row) {
if (row.nodeType == 1 && (r = rows[row.uid])) {
if (!r.depth && cnt)
cnt--;
if (!cnt)
this.message_list.remove_row(row.uid);
}
row = row.nextSibling;
}
};
// set message icon
this.set_message_icon = function(uid)
{
var css_class, label = '',
row = this.message_list.rows[uid];
if (!row)
return false;
if (row.icon) {
css_class = 'msgicon';
if (row.deleted) {
css_class += ' deleted';
label += this.get_label('deleted') + ' ';
}
else if (row.unread) {
css_class += ' unread';
label += this.get_label('unread') + ' ';
}
else if (row.unread_children)
css_class += ' unreadchildren';
if (row.msgicon == row.icon) {
if (row.replied) {
css_class += ' replied';
label += this.get_label('replied') + ' ';
}
if (row.forwarded) {
css_class += ' forwarded';
label += this.get_label('forwarded') + ' ';
}
css_class += ' status';
}
$(row.icon).attr('class', css_class).attr('title', label);
}
if (row.msgicon && row.msgicon != row.icon) {
label = '';
css_class = 'msgicon';
if (!row.unread && row.unread_children) {
css_class += ' unreadchildren';
}
if (row.replied) {
css_class += ' replied';
label += this.get_label('replied') + ' ';
}
if (row.forwarded) {
css_class += ' forwarded';
label += this.get_label('forwarded') + ' ';
}
$(row.msgicon).attr('class', css_class).attr('title', label);
}
if (row.flagicon) {
css_class = (row.flagged ? 'flagged' : 'unflagged');
label = this.get_label(css_class);
$(row.flagicon).attr('class', css_class)
.attr('aria-label', label)
.attr('title', label);
}
};
// set message status
this.set_message_status = function(uid, flag, status)
{
var row = this.message_list.rows[uid];
if (!row)
return false;
if (flag == 'unread') {
if (row.unread != status)
this.update_thread_root(uid, status ? 'unread' : 'read');
}
if ($.inArray(flag, ['unread', 'deleted', 'replied', 'forwarded', 'flagged']) > -1)
row[flag] = status;
};
// set message row status, class and icon
this.set_message = function(uid, flag, status)
{
var row = this.message_list && this.message_list.rows[uid];
if (!row)
return false;
if (flag)
this.set_message_status(uid, flag, status);
if ($.inArray(flag, ['unread', 'deleted', 'flagged']) > -1)
$(row.obj)[row[flag] ? 'addClass' : 'removeClass'](flag);
this.set_unread_children(uid);
this.set_message_icon(uid);
};
// sets unroot (unread_children) class of parent row
this.set_unread_children = function(uid)
{
var row = this.message_list.rows[uid];
if (row.parent_uid)
return;
if (!row.unread && row.unread_children && !row.expanded)
$(row.obj).addClass('unroot');
else
$(row.obj).removeClass('unroot');
};
// copy selected messages to the specified mailbox
this.copy_messages = function(mbox, event)
{
if (mbox && typeof mbox === 'object')
mbox = mbox.id;
else if (!mbox)
return this.folder_selector(event, function(folder) { ref.command('copy', folder); });
// exit if current or no mailbox specified
if (!mbox || mbox == this.env.mailbox)
return;
var post_data = this.selection_post_data({_target_mbox: mbox});
// exit if selection is empty
if (!post_data._uid)
return;
// send request to server
this.http_post('copy', post_data, this.display_message(this.get_label('copyingmessage'), 'loading'));
};
// move selected messages to the specified mailbox
this.move_messages = function(mbox, event)
{
if (mbox && typeof mbox === 'object')
mbox = mbox.id;
else if (!mbox)
return this.folder_selector(event, function(folder) { ref.command('move', folder); });
// exit if current or no mailbox specified
if (!mbox || (mbox == this.env.mailbox && !this.is_multifolder_listing()))
return;
var lock = false, post_data = this.selection_post_data({_target_mbox: mbox});
// exit if selection is empty
if (!post_data._uid)
return;
// show wait message
if (this.env.action == 'show')
lock = this.set_busy(true, 'movingmessage');
else
this.show_contentframe(false);
// Hide message command buttons until a message is selected
this.enable_command(this.env.message_commands, false);
this._with_selected_messages('move', post_data, lock);
};
// delete selected messages from the current mailbox
this.delete_messages = function(event)
{
var list = this.message_list, trash = this.env.trash_mailbox;
// if config is set to flag for deletion
if (this.env.flag_for_deletion) {
this.mark_message('delete');
return false;
}
// if there isn't a defined trash mailbox or we are in it
else if (!trash || this.env.mailbox == trash)
this.permanently_remove_messages();
// we're in Junk folder and delete_junk is enabled
else if (this.env.delete_junk && this.env.junk_mailbox && this.env.mailbox == this.env.junk_mailbox)
this.permanently_remove_messages();
// if there is a trash mailbox defined and we're not currently in it
else {
// if shift was pressed delete it immediately
if ((list && list.modkey == SHIFT_KEY) || (event && rcube_event.get_modifier(event) == SHIFT_KEY)) {
if (confirm(this.get_label('deletemessagesconfirm')))
this.permanently_remove_messages();
}
else
this.move_messages(trash);
}
return true;
};
// delete the selected messages permanently
this.permanently_remove_messages = function()
{
var post_data = this.selection_post_data();
// exit if selection is empty
if (!post_data._uid)
return;
this.show_contentframe(false);
this._with_selected_messages('delete', post_data);
};
// Send a specific move/delete request with UIDs of all selected messages
// @private
this._with_selected_messages = function(action, post_data, lock)
{
var count = 0, msg,
remove = (action == 'delete' || !this.is_multifolder_listing());
// update the list (remove rows, clear selection)
if (this.message_list) {
var n, id, root, roots = [],
selection = this.message_list.get_selection();
for (n=0, len=selection.length; n<len; n++) {
id = selection[n];
if (this.env.threading) {
count += this.update_thread(id);
root = this.message_list.find_root(id);
if (root != id && $.inArray(root, roots) < 0) {
roots.push(root);
}
}
if (remove)
this.message_list.remove_row(id, (this.env.display_next && n == selection.length-1));
}
// make sure there are no selected rows
if (!this.env.display_next && remove)
this.message_list.clear_selection();
// update thread tree icons
for (n=0, len=roots.length; n<len; n++) {
this.add_tree_icons(roots[n]);
}
}
if (count < 0)
post_data._count = (count*-1);
// remove threads from the end of the list
else if (count > 0 && remove)
this.delete_excessive_thread_rows();
if (!remove)
post_data._refresh = 1;
if (!lock) {
msg = action == 'move' ? 'movingmessage' : 'deletingmessage';
lock = this.display_message(this.get_label(msg), 'loading');
}
// send request to server
this.http_post(action, post_data, lock);
};
// build post data for message delete/move/copy/flag requests
this.selection_post_data = function(data)
{
if (typeof(data) != 'object')
data = {};
data._mbox = this.env.mailbox;
if (!data._uid) {
var uids = this.env.uid ? [this.env.uid] : this.message_list.get_selection();
data._uid = this.uids_to_list(uids);
}
if (this.env.action)
data._from = this.env.action;
// also send search request to get the right messages
if (this.env.search_request)
data._search = this.env.search_request;
if (this.env.display_next && this.env.next_uid)
data._next_uid = this.env.next_uid;
return data;
};
// set a specific flag to one or more messages
this.mark_message = function(flag, uid)
{
var a_uids = [], r_uids = [], len, n, id,
list = this.message_list;
if (uid)
a_uids[0] = uid;
else if (this.env.uid)
a_uids[0] = this.env.uid;
else if (list)
a_uids = list.get_selection();
if (!list)
r_uids = a_uids;
else {
list.focus();
for (n=0, len=a_uids.length; n<len; n++) {
id = a_uids[n];
if ((flag == 'read' && list.rows[id].unread)
|| (flag == 'unread' && !list.rows[id].unread)
|| (flag == 'delete' && !list.rows[id].deleted)
|| (flag == 'undelete' && list.rows[id].deleted)
|| (flag == 'flagged' && !list.rows[id].flagged)
|| (flag == 'unflagged' && list.rows[id].flagged))
{
r_uids.push(id);
}
}
}
// nothing to do
if (!r_uids.length && !this.select_all_mode)
return;
switch (flag) {
case 'read':
case 'unread':
this.toggle_read_status(flag, r_uids);
break;
case 'delete':
case 'undelete':
this.toggle_delete_status(r_uids);
break;
case 'flagged':
case 'unflagged':
this.toggle_flagged_status(flag, a_uids);
break;
}
};
// set class to read/unread
this.toggle_read_status = function(flag, a_uids)
{
var i, len = a_uids.length,
post_data = this.selection_post_data({_uid: this.uids_to_list(a_uids), _flag: flag}),
lock = this.display_message(this.get_label('markingmessage'), 'loading');
// mark all message rows as read/unread
for (i=0; i<len; i++)
this.set_message(a_uids[i], 'unread', (flag == 'unread' ? true : false));
this.http_post('mark', post_data, lock);
};
// set image to flagged or unflagged
this.toggle_flagged_status = function(flag, a_uids)
{
var i, len = a_uids.length,
post_data = this.selection_post_data({_uid: this.uids_to_list(a_uids), _flag: flag}),
lock = this.display_message(this.get_label('markingmessage'), 'loading');
// mark all message rows as flagged/unflagged
for (i=0; i<len; i++)
this.set_message(a_uids[i], 'flagged', (flag == 'flagged' ? true : false));
this.http_post('mark', post_data, lock);
};
// mark all message rows as deleted/undeleted
this.toggle_delete_status = function(a_uids)
{
var len = a_uids.length,
i, uid, all_deleted = true,
rows = this.message_list ? this.message_list.rows : {};
if (len == 1) {
if (!this.message_list || (rows[a_uids[0]] && !rows[a_uids[0]].deleted))
this.flag_as_deleted(a_uids);
else
this.flag_as_undeleted(a_uids);
return true;
}
for (i=0; i<len; i++) {
uid = a_uids[i];
if (rows[uid] && !rows[uid].deleted) {
all_deleted = false;
break;
}
}
if (all_deleted)
this.flag_as_undeleted(a_uids);
else
this.flag_as_deleted(a_uids);
return true;
};
this.flag_as_undeleted = function(a_uids)
{
var i, len = a_uids.length,
post_data = this.selection_post_data({_uid: this.uids_to_list(a_uids), _flag: 'undelete'}),
lock = this.display_message(this.get_label('markingmessage'), 'loading');
for (i=0; i<len; i++)
this.set_message(a_uids[i], 'deleted', false);
this.http_post('mark', post_data, lock);
};
this.flag_as_deleted = function(a_uids)
{
var r_uids = [],
post_data = this.selection_post_data({_uid: this.uids_to_list(a_uids), _flag: 'delete'}),
lock = this.display_message(this.get_label('markingmessage'), 'loading'),
rows = this.message_list ? this.message_list.rows : {},
count = 0;
for (var i=0, len=a_uids.length; i<len; i++) {
uid = a_uids[i];
if (rows[uid]) {
if (rows[uid].unread)
r_uids[r_uids.length] = uid;
if (this.env.skip_deleted) {
count += this.update_thread(uid);
this.message_list.remove_row(uid, (this.env.display_next && i == this.message_list.selection.length-1));
}
else
this.set_message(uid, 'deleted', true);
}
}
// make sure there are no selected rows
if (this.env.skip_deleted && this.message_list) {
if (!this.env.display_next)
this.message_list.clear_selection();
if (count < 0)
post_data._count = (count*-1);
else if (count > 0)
// remove threads from the end of the list
this.delete_excessive_thread_rows();
}
// set of messages to mark as seen
if (r_uids.length)
post_data._ruid = this.uids_to_list(r_uids);
if (this.env.skip_deleted && this.env.display_next && this.env.next_uid)
post_data._next_uid = this.env.next_uid;
this.http_post('mark', post_data, lock);
};
// flag as read without mark request (called from backend)
// argument should be a coma-separated list of uids
this.flag_deleted_as_read = function(uids)
{
var uid, i, len,
rows = this.message_list ? this.message_list.rows : {};
if (typeof uids == 'string')
uids = uids.split(',');
for (i=0, len=uids.length; i<len; i++) {
uid = uids[i];
if (rows[uid])
this.set_message(uid, 'unread', false);
}
};
// Converts array of message UIDs to comma-separated list for use in URL
// with select_all mode checking
this.uids_to_list = function(uids)
{
return this.select_all_mode ? '*' : (uids.length <= 1 ? uids.join(',') : uids);
};
// Sets title of the delete button
this.set_button_titles = function()
{
var label = 'deletemessage';
if (!this.env.flag_for_deletion
&& this.env.trash_mailbox && this.env.mailbox != this.env.trash_mailbox
&& (!this.env.delete_junk || !this.env.junk_mailbox || this.env.mailbox != this.env.junk_mailbox)
)
label = 'movemessagetotrash';
this.set_alttext('delete', label);
};
/*********************************************************/
/********* mailbox folders methods *********/
/*********************************************************/
this.expunge_mailbox = function(mbox)
{
var lock, post_data = {_mbox: mbox};
// lock interface if it's the active mailbox
if (mbox == this.env.mailbox) {
lock = this.set_busy(true, 'loading');
post_data._reload = 1;
if (this.env.search_request)
post_data._search = this.env.search_request;
}
// send request to server
this.http_post('expunge', post_data, lock);
};
this.purge_mailbox = function(mbox)
{
var lock, post_data = {_mbox: mbox};
if (!confirm(this.get_label('purgefolderconfirm')))
return false;
// lock interface if it's the active mailbox
if (mbox == this.env.mailbox) {
lock = this.set_busy(true, 'loading');
post_data._reload = 1;
}
// send request to server
this.http_post('purge', post_data, lock);
};
// test if purge command is allowed
this.purge_mailbox_test = function()
{
return (this.env.exists && (
this.env.mailbox == this.env.trash_mailbox
|| this.env.mailbox == this.env.junk_mailbox
|| this.env.mailbox.startsWith(this.env.trash_mailbox + this.env.delimiter)
|| this.env.mailbox.startsWith(this.env.junk_mailbox + this.env.delimiter)
));
};
/*********************************************************/
/********* login form methods *********/
/*********************************************************/
// handler for keyboard events on the _user field
this.login_user_keyup = function(e)
{
var key = rcube_event.get_keycode(e),
passwd = $('#rcmloginpwd');
// enter
if (key == 13 && passwd.length && !passwd.val()) {
passwd.focus();
return rcube_event.cancel(e);
}
return true;
};
/*********************************************************/
/********* message compose methods *********/
/*********************************************************/
this.open_compose_step = function(p)
{
var url = this.url('mail/compose', p);
// open new compose window
if (this.env.compose_extwin && !this.env.extwin) {
this.open_window(url);
}
else {
this.redirect(url);
if (this.env.extwin)
window.resizeTo(Math.max(this.env.popup_width, $(window).width()), $(window).height() + 24);
}
};
// init message compose form: set focus and eventhandlers
this.init_messageform = function()
{
if (!this.gui_objects.messageform)
return false;
var i, elem, pos, input_from = $("[name='_from']"),
input_to = $("[name='_to']"),
input_subject = $("input[name='_subject']"),
input_message = $("[name='_message']").get(0),
html_mode = $("input[name='_is_html']").val() == '1',
ac_fields = ['cc', 'bcc', 'replyto', 'followupto'],
ac_props, opener_rc = this.opener();
// close compose step in opener
if (opener_rc && opener_rc.env.action == 'compose') {
setTimeout(function(){
if (opener.history.length > 1)
opener.history.back();
else
opener_rc.redirect(opener_rc.get_task_url('mail'));
}, 100);
this.env.opened_extwin = true;
}
// configure parallel autocompletion
if (this.env.autocomplete_threads > 0) {
ac_props = {
threads: this.env.autocomplete_threads,
sources: this.env.autocomplete_sources
};
}
// init live search events
this.init_address_input_events(input_to, ac_props);
for (i in ac_fields) {
this.init_address_input_events($("[name='_"+ac_fields[i]+"']"), ac_props);
}
if (!html_mode) {
pos = this.env.top_posting ? 0 : input_message.value.length;
this.set_caret_pos(input_message, pos);
// add signature according to selected identity
// if we have HTML editor, signature is added in callback
if (input_from.prop('type') == 'select-one') {
this.change_identity(input_from[0]);
}
// scroll to the bottom of the textarea (#1490114)
if (pos) {
$(input_message).scrollTop(input_message.scrollHeight);
}
}
// check for locally stored compose data
if (this.env.save_localstorage)
this.compose_restore_dialog(0, html_mode)
if (input_to.val() == '')
elem = input_to;
else if (input_subject.val() == '')
elem = input_subject;
else if (input_message)
elem = input_message;
// focus first empty element (need to be visible on IE8)
$(elem).filter(':visible').focus();
this.env.compose_focus_elem = document.activeElement;
// get summary of all field values
this.compose_field_hash(true);
// start the auto-save timer
this.auto_save_start();
};
this.compose_restore_dialog = function(j, html_mode)
{
var i, key, formdata, index = this.local_storage_get_item('compose.index', []);
var show_next = function(i) {
if (++i < index.length)
ref.compose_restore_dialog(i, html_mode)
}
for (i = j || 0; i < index.length; i++) {
key = index[i];
formdata = this.local_storage_get_item('compose.' + key, null, true);
if (!formdata) {
continue;
}
// restore saved copy of current compose_id
if (formdata.changed && key == this.env.compose_id) {
this.restore_compose_form(key, html_mode);
break;
}
// skip records from 'other' drafts
if (this.env.draft_id && formdata.draft_id && formdata.draft_id != this.env.draft_id) {
continue;
}
// skip records on reply
if (this.env.reply_msgid && formdata.reply_msgid != this.env.reply_msgid) {
continue;
}
// show dialog asking to restore the message
if (formdata.changed && formdata.session != this.env.session_id) {
this.show_popup_dialog(
this.get_label('restoresavedcomposedata')
.replace('$date', new Date(formdata.changed).toLocaleString())
.replace('$subject', formdata._subject)
.replace(/\n/g, '<br/>'),
this.get_label('restoremessage'),
[{
text: this.get_label('restore'),
'class': 'mainaction',
click: function(){
ref.restore_compose_form(key, html_mode);
ref.remove_compose_data(key); // remove old copy
ref.save_compose_form_local(); // save under current compose_id
$(this).dialog('close');
}
},
{
text: this.get_label('delete'),
'class': 'delete',
click: function(){
ref.remove_compose_data(key);
$(this).dialog('close');
show_next(i);
}
},
{
text: this.get_label('ignore'),
click: function(){
$(this).dialog('close');
show_next(i);
}
}]
);
break;
}
}
}
this.init_address_input_events = function(obj, props)
{
this.env.recipients_delimiter = this.env.recipients_separator + ' ';
obj.keydown(function(e) { return ref.ksearch_keydown(e, this, props); })
.attr({ 'autocomplete': 'off', 'aria-autocomplete': 'list', 'aria-expanded': 'false', 'role': 'combobox' });
};
this.submit_messageform = function(draft, saveonly)
{
var form = this.gui_objects.messageform;
if (!form)
return;
// the message has been sent but not saved, ask the user what to do
if (!saveonly && this.env.is_sent) {
return this.show_popup_dialog(this.get_label('messageissent'), '',
[{
text: this.get_label('save'),
'class': 'mainaction',
click: function() {
ref.submit_messageform(false, true);
$(this).dialog('close');
}
},
{
text: this.get_label('cancel'),
click: function() {
$(this).dialog('close');
}
}]
);
}
// all checks passed, send message
var msgid = this.set_busy(true, draft || saveonly ? 'savingmessage' : 'sendingmessage'),
lang = this.spellcheck_lang(),
files = [];
// send files list
$('li', this.gui_objects.attachmentlist).each(function() { files.push(this.id.replace(/^rcmfile/, '')); });
$('input[name="_attachments"]', form).val(files.join());
form.target = 'savetarget';
form._draft.value = draft ? '1' : '';
form.action = this.add_url(form.action, '_unlock', msgid);
form.action = this.add_url(form.action, '_lang', lang);
form.action = this.add_url(form.action, '_framed', 1);
if (saveonly) {
form.action = this.add_url(form.action, '_saveonly', 1);
}
// register timer to notify about connection timeout
this.submit_timer = setTimeout(function(){
ref.set_busy(false, null, msgid);
ref.display_message(ref.get_label('requesttimedout'), 'error');
}, this.env.request_timeout * 1000);
form.submit();
};
this.compose_recipient_select = function(list)
{
var id, n, recipients = 0;
for (n=0; n < list.selection.length; n++) {
id = list.selection[n];
if (this.env.contactdata[id])
recipients++;
}
this.enable_command('add-recipient', recipients);
};
this.compose_add_recipient = function(field)
{
// find last focused field name
if (!field) {
field = $(this.env.focused_field).filter(':visible');
field = field.length ? field.attr('id').replace('_', '') : 'to';
}
var recipients = [], input = $('#_'+field), delim = this.env.recipients_delimiter;
if (this.contact_list && this.contact_list.selection.length) {
for (var id, n=0; n < this.contact_list.selection.length; n++) {
id = this.contact_list.selection[n];
if (id && this.env.contactdata[id]) {
recipients.push(this.env.contactdata[id]);
// group is added, expand it
if (id.charAt(0) == 'E' && this.env.contactdata[id].indexOf('@') < 0 && input.length) {
var gid = id.substr(1);
this.group2expand[gid] = { name:this.env.contactdata[id], input:input.get(0) };
this.http_request('group-expand', {_source: this.env.source, _gid: gid}, false);
}
}
}
}
if (recipients.length && input.length) {
var oldval = input.val(), rx = new RegExp(RegExp.escape(delim) + '\\s*$');
if (oldval && !rx.test(oldval))
oldval += delim + ' ';
input.val(oldval + recipients.join(delim + ' ') + delim + ' ');
this.triggerEvent('add-recipient', { field:field, recipients:recipients });
}
return recipients.length;
};
// checks the input fields before sending a message
this.check_compose_input = function(cmd)
{
// check input fields
var input_to = $("[name='_to']"),
input_cc = $("[name='_cc']"),
input_bcc = $("[name='_bcc']"),
input_from = $("[name='_from']"),
input_subject = $("[name='_subject']");
// check sender (if have no identities)
if (input_from.prop('type') == 'text' && !rcube_check_email(input_from.val(), true)) {
alert(this.get_label('nosenderwarning'));
input_from.focus();
return false;
}
// check for empty recipient
var recipients = input_to.val() ? input_to.val() : (input_cc.val() ? input_cc.val() : input_bcc.val());
if (!rcube_check_email(recipients.replace(/^\s+/, '').replace(/[\s,;]+$/, ''), true)) {
alert(this.get_label('norecipientwarning'));
input_to.focus();
return false;
}
// check if all files has been uploaded
for (var key in this.env.attachments) {
if (typeof this.env.attachments[key] === 'object' && !this.env.attachments[key].complete) {
alert(this.get_label('notuploadedwarning'));
return false;
}
}
// display localized warning for missing subject
if (input_subject.val() == '') {
var buttons = {},
myprompt = $('<div class="prompt">').html('<div class="message">' + this.get_label('nosubjectwarning') + '</div>')
.appendTo(document.body),
prompt_value = $('<input>').attr({type: 'text', size: 30}).val(this.get_label('nosubject'))
.appendTo(myprompt),
save_func = function() {
input_subject.val(prompt_value.val());
myprompt.dialog('close');
ref.command(cmd, { nocheck:true }); // repeat command which triggered this
};
buttons[this.get_label('sendmessage')] = function() {
save_func($(this));
};
buttons[this.get_label('cancel')] = function() {
input_subject.focus();
$(this).dialog('close');
};
myprompt.dialog({
modal: true,
resizable: false,
buttons: buttons,
close: function(event, ui) { $(this).remove(); }
});
prompt_value.select().keydown(function(e) {
if (e.which == 13) save_func();
});
return false;
}
// check for empty body
if (!this.editor.get_content() && !confirm(this.get_label('nobodywarning'))) {
this.editor.focus();
return false;
}
// move body from html editor to textarea (just to be sure, #1485860)
this.editor.save();
return true;
};
this.toggle_editor = function(props, obj, e)
{
// @todo: this should work also with many editors on page
var result = this.editor.toggle(props.html, props.noconvert || false);
// satisfy the expectations of aftertoggle-editor event subscribers
props.mode = props.html ? 'html' : 'plain';
if (!result && e) {
// fix selector value if operation failed
props.mode = props.html ? 'plain' : 'html';
$(e.target).filter('select').val(props.mode);
}
if (result) {
// update internal format flag
$("input[name='_is_html']").val(props.html ? 1 : 0);
}
return result;
};
this.insert_response = function(key)
{
var insert = this.env.textresponses[key] ? this.env.textresponses[key].text : null;
if (!insert)
return false;
this.editor.replace(insert);
};
/**
* Open the dialog to save a new canned response
*/
this.save_response = function()
{
// show dialog to enter a name and to modify the text to be saved
var buttons = {}, text = this.editor.get_content({selection: true, format: 'text', nosig: true}),
html = '<form class="propform">' +
'<div class="prop block"><label>' + this.get_label('responsename') + '</label>' +
'<input type="text" name="name" id="ffresponsename" size="40" /></div>' +
'<div class="prop block"><label>' + this.get_label('responsetext') + '</label>' +
'<textarea name="text" id="ffresponsetext" cols="40" rows="8"></textarea></div>' +
'</form>';
buttons[this.gettext('save')] = function(e) {
var name = $('#ffresponsename').val(),
text = $('#ffresponsetext').val();
if (!text) {
$('#ffresponsetext').select();
return false;
}
if (!name)
name = text.substring(0,40);
var lock = ref.display_message(ref.get_label('savingresponse'), 'loading');
ref.http_post('settings/responses', { _insert:1, _name:name, _text:text }, lock);
$(this).dialog('close');
};
buttons[this.gettext('cancel')] = function() {
$(this).dialog('close');
};
this.show_popup_dialog(html, this.gettext('newresponse'), buttons, {button_classes: ['mainaction']});
$('#ffresponsetext').val(text);
$('#ffresponsename').select();
};
this.add_response_item = function(response)
{
var key = response.key;
this.env.textresponses[key] = response;
// append to responses list
if (this.gui_objects.responseslist) {
var li = $('<li>').appendTo(this.gui_objects.responseslist);
$('<a>').addClass('insertresponse active')
.attr('href', '#')
.attr('rel', key)
.attr('tabindex', '0')
.html(this.quote_html(response.name))
.appendTo(li)
.mousedown(function(e){
return rcube_event.cancel(e);
})
.bind('mouseup keypress', function(e){
if (e.type == 'mouseup' || rcube_event.get_keycode(e) == 13) {
ref.command('insert-response', $(this).attr('rel'));
$(document.body).trigger('mouseup'); // hides the menu
return rcube_event.cancel(e);
}
});
}
};
this.edit_responses = function()
{
// TODO: implement inline editing of responses
};
this.delete_response = function(key)
{
if (!key && this.responses_list) {
var selection = this.responses_list.get_selection();
key = selection[0];
}
// submit delete request
if (key && confirm(this.get_label('deleteresponseconfirm'))) {
this.http_post('settings/delete-response', { _key: key }, false);
}
};
// updates spellchecker buttons on state change
this.spellcheck_state = function()
{
var active = this.editor.spellcheck_state();
$.each(this.buttons.spellcheck || [], function(i, v) {
$('#' + v.id)[active ? 'addClass' : 'removeClass']('selected');
});
return active;
};
// get selected language
this.spellcheck_lang = function()
{
return this.editor.get_language();
};
this.spellcheck_lang_set = function(lang)
{
this.editor.set_language(lang);
};
// resume spellchecking, highlight provided mispellings without new ajax request
this.spellcheck_resume = function(data)
{
this.editor.spellcheck_resume(data);
};
this.set_draft_id = function(id)
{
if (id && id != this.env.draft_id) {
var filter = {task: 'mail', action: ''},
rc = this.opener(false, filter) || this.opener(true, filter);
// refresh the drafts folder in the opener window
if (rc && rc.env.mailbox == this.env.drafts_mailbox)
rc.command('checkmail');
this.env.draft_id = id;
$("input[name='_draft_saveid']").val(id);
// reset history of hidden iframe used for saving draft (#1489643)
// but don't do this on timer-triggered draft-autosaving (#1489789)
if (window.frames['savetarget'] && window.frames['savetarget'].history && !this.draft_autosave_submit) {
window.frames['savetarget'].history.back();
}
this.draft_autosave_submit = false;
}
// always remove local copy upon saving as draft
this.remove_compose_data(this.env.compose_id);
this.compose_skip_unsavedcheck = false;
};
this.auto_save_start = function()
{
if (this.env.draft_autosave) {
this.draft_autosave_submit = false;
this.save_timer = setTimeout(function(){
ref.draft_autosave_submit = true; // set auto-saved flag (#1489789)
ref.command("savedraft");
}, this.env.draft_autosave * 1000);
}
// save compose form content to local storage every 5 seconds
if (!this.local_save_timer && window.localStorage && this.env.save_localstorage) {
// track typing activity and only save on changes
this.compose_type_activity = this.compose_type_activity_last = 0;
$(document).bind('keypress', function(e){ ref.compose_type_activity++; });
this.local_save_timer = setInterval(function(){
if (ref.compose_type_activity > ref.compose_type_activity_last) {
ref.save_compose_form_local();
ref.compose_type_activity_last = ref.compose_type_activity;
}
}, 5000);
$(window).unload(function() {
// remove copy from local storage if compose screen is left after warning
if (!ref.env.server_error)
ref.remove_compose_data(ref.env.compose_id);
});
}
// check for unsaved changes before leaving the compose page
if (!window.onbeforeunload) {
window.onbeforeunload = function() {
if (!ref.compose_skip_unsavedcheck && ref.cmp_hash != ref.compose_field_hash()) {
return ref.get_label('notsentwarning');
}
};
}
// Unlock interface now that saving is complete
this.busy = false;
};
this.compose_field_hash = function(save)
{
// check input fields
var i, id, val, str = '', hash_fields = ['to', 'cc', 'bcc', 'subject'];
for (i=0; i<hash_fields.length; i++)
if (val = $('[name="_' + hash_fields[i] + '"]').val())
str += val + ':';
str += this.editor.get_content({refresh: false});
if (this.env.attachments)
for (id in this.env.attachments)
str += id;
if (save)
this.cmp_hash = str;
return str;
};
// store the contents of the compose form to localstorage
this.save_compose_form_local = function()
{
// feature is disabled
if (!this.env.save_localstorage)
return;
var formdata = { session:this.env.session_id, changed:new Date().getTime() },
ed, empty = true;
// get fresh content from editor
this.editor.save();
if (this.env.draft_id) {
formdata.draft_id = this.env.draft_id;
}
if (this.env.reply_msgid) {
formdata.reply_msgid = this.env.reply_msgid;
}
$('input, select, textarea', this.gui_objects.messageform).each(function(i, elem) {
switch (elem.tagName.toLowerCase()) {
case 'input':
if (elem.type == 'button' || elem.type == 'submit' || (elem.type == 'hidden' && elem.name != '_is_html')) {
break;
}
formdata[elem.name] = elem.type != 'checkbox' || elem.checked ? $(elem).val() : '';
if (formdata[elem.name] != '' && elem.type != 'hidden')
empty = false;
break;
case 'select':
formdata[elem.name] = $('option:checked', elem).val();
break;
default:
formdata[elem.name] = $(elem).val();
if (formdata[elem.name] != '')
empty = false;
}
});
if (!empty) {
var index = this.local_storage_get_item('compose.index', []),
key = this.env.compose_id;
if ($.inArray(key, index) < 0) {
index.push(key);
}
this.local_storage_set_item('compose.' + key, formdata, true);
this.local_storage_set_item('compose.index', index);
}
};
// write stored compose data back to form
this.restore_compose_form = function(key, html_mode)
{
var ed, formdata = this.local_storage_get_item('compose.' + key, true);
if (formdata && typeof formdata == 'object') {
$.each(formdata, function(k, value) {
if (k[0] == '_') {
var elem = $("*[name='"+k+"']");
if (elem[0] && elem[0].type == 'checkbox') {
elem.prop('checked', value != '');
}
else {
elem.val(value);
}
}
});
// initialize HTML editor
if ((formdata._is_html == '1' && !html_mode) || (formdata._is_html != '1' && html_mode)) {
this.command('toggle-editor', {id: this.env.composebody, html: !html_mode, noconvert: true});
}
}
};
// remove stored compose data from localStorage
this.remove_compose_data = function(key)
{
var index = this.local_storage_get_item('compose.index', []);
if ($.inArray(key, index) >= 0) {
this.local_storage_remove_item('compose.' + key);
this.local_storage_set_item('compose.index', $.grep(index, function(val,i) { return val != key; }));
}
};
// clear all stored compose data of this user
this.clear_compose_data = function()
{
var i, index = this.local_storage_get_item('compose.index', []);
for (i=0; i < index.length; i++) {
this.local_storage_remove_item('compose.' + index[i]);
}
this.local_storage_remove_item('compose.index');
};
this.change_identity = function(obj, show_sig)
{
if (!obj || !obj.options)
return false;
if (!show_sig)
show_sig = this.env.show_sig;
var id = obj.options[obj.selectedIndex].value,
sig = this.env.identity,
delim = this.env.recipients_separator,
rx_delim = RegExp.escape(delim);
// enable manual signature insert
if (this.env.signatures && this.env.signatures[id]) {
this.enable_command('insert-sig', true);
this.env.compose_commands.push('insert-sig');
}
else
this.enable_command('insert-sig', false);
// first function execution
if (!this.env.identities_initialized) {
this.env.identities_initialized = true;
if (this.env.show_sig_later)
this.env.show_sig = true;
if (this.env.opened_extwin)
return;
}
// update reply-to/bcc fields with addresses defined in identities
$.each(['replyto', 'bcc'], function() {
var rx, key = this,
old_val = sig && ref.env.identities[sig] ? ref.env.identities[sig][key] : '',
new_val = id && ref.env.identities[id] ? ref.env.identities[id][key] : '',
input = $('[name="_'+key+'"]'), input_val = input.val();
// remove old address(es)
if (old_val && input_val) {
rx = new RegExp('\\s*' + RegExp.escape(old_val) + '\\s*');
input_val = input_val.replace(rx, '');
}
// cleanup
rx = new RegExp(rx_delim + '\\s*' + rx_delim, 'g');
input_val = String(input_val).replace(rx, delim);
rx = new RegExp('^[\\s' + rx_delim + ']+');
input_val = input_val.replace(rx, '');
// add new address(es)
if (new_val && input_val.indexOf(new_val) == -1 && input_val.indexOf(new_val.replace(/"/g, '')) == -1) {
if (input_val) {
rx = new RegExp('[' + rx_delim + '\\s]+$')
input_val = input_val.replace(rx, '') + delim + ' ';
}
input_val += new_val + delim + ' ';
}
if (old_val || new_val)
input.val(input_val).change();
});
this.editor.change_signature(id, show_sig);
this.env.identity = id;
this.triggerEvent('change_identity');
return true;
};
// upload (attachment) file
this.upload_file = function(form, action)
{
if (!form)
return;
// count files and size on capable browser
var size = 0, numfiles = 0;
$('input[type=file]', form).each(function(i, field) {
var files = field.files ? field.files.length : (field.value ? 1 : 0);
// check file size
if (field.files) {
for (var i=0; i < files; i++)
size += field.files[i].size;
}
numfiles += files;
});
// create hidden iframe and post upload form
if (numfiles) {
if (this.env.max_filesize && this.env.filesizeerror && size > this.env.max_filesize) {
this.display_message(this.env.filesizeerror, 'error');
return false;
}
var frame_name = this.async_upload_form(form, action || 'upload', function(e) {
var d, content = '';
try {
if (this.contentDocument) {
d = this.contentDocument;
} else if (this.contentWindow) {
d = this.contentWindow.document;
}
content = d.childNodes[1].innerHTML;
} catch (err) {}
if (!content.match(/add2attachment/) && (!bw.opera || (ref.env.uploadframe && ref.env.uploadframe == e.data.ts))) {
if (!content.match(/display_message/))
ref.display_message(ref.get_label('fileuploaderror'), 'error');
ref.remove_from_attachment_list(e.data.ts);
}
// Opera hack: handle double onload
if (bw.opera)
ref.env.uploadframe = e.data.ts;
});
// display upload indicator and cancel button
var content = '<span>' + this.get_label('uploading' + (numfiles > 1 ? 'many' : '')) + '</span>',
ts = frame_name.replace(/^rcmupload/, '');
this.add2attachment_list(ts, { name:'', html:content, classname:'uploading', frame:frame_name, complete:false });
// upload progress support
if (this.env.upload_progress_time) {
this.upload_progress_start('upload', ts);
}
// set reference to the form object
this.gui_objects.attachmentform = form;
return true;
}
};
// add file name to attachment list
// called from upload page
this.add2attachment_list = function(name, att, upload_id)
{
if (upload_id)
this.triggerEvent('fileuploaded', {name: name, attachment: att, id: upload_id});
if (!this.env.attachments)
this.env.attachments = {};
if (upload_id && this.env.attachments[upload_id])
delete this.env.attachments[upload_id];
this.env.attachments[name] = att;
if (!this.gui_objects.attachmentlist)
return false;
if (!att.complete && this.env.loadingicon)
att.html = '<img src="'+this.env.loadingicon+'" alt="" class="uploading" />' + att.html;
if (!att.complete && att.frame)
att.html = '<a title="'+this.get_label('cancel')+'" onclick="return rcmail.cancel_attachment_upload(\''+name+'\', \''+att.frame+'\');" href="#cancelupload" class="cancelupload">'
+ (this.env.cancelicon ? '<img src="'+this.env.cancelicon+'" alt="'+this.get_label('cancel')+'" />' : this.get_label('cancel')) + '</a>' + att.html;
var indicator, li = $('<li>');
li.attr('id', name)
.addClass(att.classname)
.html(att.html)
.on('mouseover', function() { rcube_webmail.long_subject_title_ex(this); });
// replace indicator's li
if (upload_id && (indicator = document.getElementById(upload_id))) {
li.replaceAll(indicator);
}
else { // add new li
li.appendTo(this.gui_objects.attachmentlist);
}
// set tabindex attribute
var tabindex = $(this.gui_objects.attachmentlist).attr('data-tabindex') || '0';
li.find('a').attr('tabindex', tabindex);
return true;
};
this.remove_from_attachment_list = function(name)
{
if (this.env.attachments) {
delete this.env.attachments[name];
$('#'+name).remove();
}
};
this.remove_attachment = function(name)
{
if (name && this.env.attachments[name])
this.http_post('remove-attachment', { _id:this.env.compose_id, _file:name });
return true;
};
this.cancel_attachment_upload = function(name, frame_name)
{
if (!name || !frame_name)
return false;
this.remove_from_attachment_list(name);
$("iframe[name='"+frame_name+"']").remove();
return false;
};
this.upload_progress_start = function(action, name)
{
setTimeout(function() { ref.http_request(action, {_progress: name}); },
this.env.upload_progress_time * 1000);
};
this.upload_progress_update = function(param)
{
var elem = $('#'+param.name + ' > span');
if (!elem.length || !param.text)
return;
elem.text(param.text);
if (!param.done)
this.upload_progress_start(param.action, param.name);
};
// send remote request to add a new contact
this.add_contact = function(value)
{
if (value)
this.http_post('addcontact', {_address: value});
return true;
};
// send remote request to search mail or contacts
this.qsearch = function(value)
{
if (value != '') {
var r, lock = this.set_busy(true, 'searching'),
url = this.search_params(value),
action = this.env.action == 'compose' && this.contact_list ? 'search-contacts' : 'search';
if (this.message_list)
this.clear_message_list();
else if (this.contact_list)
this.list_contacts_clear();
if (this.env.source)
url._source = this.env.source;
if (this.env.group)
url._gid = this.env.group;
// reset vars
this.env.current_page = 1;
r = this.http_request(action, url, lock);
this.env.qsearch = {lock: lock, request: r};
this.enable_command('set-listmode', this.env.threads && (this.env.search_scope || 'base') == 'base');
return true;
}
return false;
};
this.continue_search = function(request_id)
{
var lock = this.set_busy(true, 'stillsearching');
setTimeout(function() {
var url = ref.search_params();
url._continue = request_id;
ref.env.qsearch = { lock: lock, request: ref.http_request('search', url, lock) };
}, 100);
};
// build URL params for search
this.search_params = function(search, filter)
{
var n, url = {}, mods_arr = [],
mods = this.env.search_mods,
scope = this.env.search_scope || 'base',
mbox = scope == 'all' ? '*' : this.env.mailbox;
if (!filter && this.gui_objects.search_filter)
filter = this.gui_objects.search_filter.value;
if (!search && this.gui_objects.qsearchbox)
search = this.gui_objects.qsearchbox.value;
if (filter)
url._filter = filter;
if (search) {
url._q = search;
if (mods && this.message_list)
mods = mods[mbox] || mods['*'];
if (mods) {
for (n in mods)
mods_arr.push(n);
url._headers = mods_arr.join(',');
}
}
if (scope)
url._scope = scope;
if (mbox && scope != 'all')
url._mbox = mbox;
return url;
};
// reset search filter
this.reset_search_filter = function()
{
this.filter_disabled = true;
if (this.gui_objects.search_filter)
$(this.gui_objects.search_filter).val('ALL').change();
this.filter_disabled = false;
};
// reset quick-search form
this.reset_qsearch = function(all)
{
if (this.gui_objects.qsearchbox)
this.gui_objects.qsearchbox.value = '';
if (this.env.qsearch)
this.abort_request(this.env.qsearch);
if (all) {
this.env.search_scope = 'base';
this.reset_search_filter();
}
this.env.qsearch = null;
this.env.search_request = null;
this.env.search_id = null;
this.enable_command('set-listmode', this.env.threads);
};
this.set_searchscope = function(scope)
{
var old = this.env.search_scope;
this.env.search_scope = scope;
// re-send search query with new scope
if (scope != old && this.env.search_request) {
if (!this.qsearch(this.gui_objects.qsearchbox.value) && this.env.search_filter && this.env.search_filter != 'ALL')
this.filter_mailbox(this.env.search_filter);
if (scope != 'all')
this.select_folder(this.env.mailbox, '', true);
}
};
this.set_searchmods = function(mods)
{
var mbox = this.env.mailbox,
scope = this.env.search_scope || 'base';
if (scope == 'all')
mbox = '*';
if (!this.env.search_mods)
this.env.search_mods = {};
if (mbox)
this.env.search_mods[mbox] = mods;
};
this.is_multifolder_listing = function()
{
return this.env.multifolder_listing !== undefined ? this.env.multifolder_listing :
(this.env.search_request && (this.env.search_scope || 'base') != 'base');
};
// action executed after mail is sent
this.sent_successfully = function(type, msg, folders, save_error)
{
this.display_message(msg, type);
this.compose_skip_unsavedcheck = true;
if (this.env.extwin) {
if (!save_error)
this.lock_form(this.gui_objects.messageform);
var filter = {task: 'mail', action: ''},
rc = this.opener(false, filter) || this.opener(true, filter);
if (rc) {
rc.display_message(msg, type);
// refresh the folder where sent message was saved or replied message comes from
if (folders && $.inArray(rc.env.mailbox, folders) >= 0) {
rc.command('checkmail');
}
}
if (!save_error)
setTimeout(function() { window.close(); }, 1000);
}
else if (!save_error) {
// before redirect we need to wait some time for Chrome (#1486177)
setTimeout(function() { ref.list_mailbox(); }, 500);
}
if (save_error)
this.env.is_sent = true;
};
/*********************************************************/
/********* keyboard live-search methods *********/
/*********************************************************/
// handler for keyboard events on address-fields
this.ksearch_keydown = function(e, obj, props)
{
if (this.ksearch_timer)
clearTimeout(this.ksearch_timer);
var key = rcube_event.get_keycode(e),
mod = rcube_event.get_modifier(e);
switch (key) {
case 38: // arrow up
case 40: // arrow down
if (!this.ksearch_visible())
return;
var dir = key == 38 ? 1 : 0,
highlight = document.getElementById('rcmkSearchItem' + this.ksearch_selected);
if (!highlight)
highlight = this.ksearch_pane.__ul.firstChild;
if (highlight)
this.ksearch_select(dir ? highlight.previousSibling : highlight.nextSibling);
return rcube_event.cancel(e);
case 9: // tab
if (mod == SHIFT_KEY || !this.ksearch_visible()) {
this.ksearch_hide();
return;
}
case 13: // enter
if (!this.ksearch_visible())
return false;
// insert selected address and hide ksearch pane
this.insert_recipient(this.ksearch_selected);
this.ksearch_hide();
return rcube_event.cancel(e);
case 27: // escape
this.ksearch_hide();
return;
case 37: // left
case 39: // right
return;
}
// start timer
this.ksearch_timer = setTimeout(function(){ ref.ksearch_get_results(props); }, 200);
this.ksearch_input = obj;
return true;
};
this.ksearch_visible = function()
{
return this.ksearch_selected !== null && this.ksearch_selected !== undefined && this.ksearch_value;
};
this.ksearch_select = function(node)
{
if (this.ksearch_pane && node) {
this.ksearch_pane.find('li.selected').removeClass('selected').removeAttr('aria-selected');
}
if (node) {
$(node).addClass('selected').attr('aria-selected', 'true');
this.ksearch_selected = node._rcm_id;
$(this.ksearch_input).attr('aria-activedescendant', 'rcmkSearchItem' + this.ksearch_selected);
}
};
this.insert_recipient = function(id)
{
if (id === null || !this.env.contacts[id] || !this.ksearch_input)
return;
// get cursor pos
var inp_value = this.ksearch_input.value,
cpos = this.get_caret_pos(this.ksearch_input),
p = inp_value.lastIndexOf(this.ksearch_value, cpos),
trigger = false,
insert = '',
// replace search string with full address
pre = inp_value.substring(0, p),
end = inp_value.substring(p+this.ksearch_value.length, inp_value.length);
this.ksearch_destroy();
// insert all members of a group
if (typeof this.env.contacts[id] === 'object' && this.env.contacts[id].type == 'group' && !this.env.contacts[id].email) {
insert += this.env.contacts[id].name + this.env.recipients_delimiter;
this.group2expand[this.env.contacts[id].id] = $.extend({ input: this.ksearch_input }, this.env.contacts[id]);
this.http_request('mail/group-expand', {_source: this.env.contacts[id].source, _gid: this.env.contacts[id].id}, false);
}
else if (typeof this.env.contacts[id] === 'object' && this.env.contacts[id].name) {
insert = this.env.contacts[id].name + this.env.recipients_delimiter;
trigger = true;
}
else if (typeof this.env.contacts[id] === 'string') {
insert = this.env.contacts[id] + this.env.recipients_delimiter;
trigger = true;
}
this.ksearch_input.value = pre + insert + end;
// set caret to insert pos
this.set_caret_pos(this.ksearch_input, p + insert.length);
if (trigger) {
this.triggerEvent('autocomplete_insert', { field:this.ksearch_input, insert:insert, data:this.env.contacts[id] });
this.compose_type_activity++;
}
};
this.replace_group_recipients = function(id, recipients)
{
if (this.group2expand[id]) {
this.group2expand[id].input.value = this.group2expand[id].input.value.replace(this.group2expand[id].name, recipients);
this.triggerEvent('autocomplete_insert', { field:this.group2expand[id].input, insert:recipients });
this.group2expand[id] = null;
this.compose_type_activity++;
}
};
// address search processor
this.ksearch_get_results = function(props)
{
var inp_value = this.ksearch_input ? this.ksearch_input.value : null;
if (inp_value === null)
return;
if (this.ksearch_pane && this.ksearch_pane.is(":visible"))
this.ksearch_pane.hide();
// get string from current cursor pos to last comma
var cpos = this.get_caret_pos(this.ksearch_input),
p = inp_value.lastIndexOf(this.env.recipients_separator, cpos-1),
q = inp_value.substring(p+1, cpos),
min = this.env.autocomplete_min_length,
data = this.ksearch_data;
// trim query string
q = $.trim(q);
// Don't (re-)search if the last results are still active
if (q == this.ksearch_value)
return;
this.ksearch_destroy();
if (q.length && q.length < min) {
if (!this.ksearch_info) {
this.ksearch_info = this.display_message(
this.get_label('autocompletechars').replace('$min', min));
}
return;
}
var old_value = this.ksearch_value;
this.ksearch_value = q;
// ...string is empty
if (!q.length)
return;
// ...new search value contains old one and previous search was not finished or its result was empty
if (old_value && old_value.length && q.startsWith(old_value) && (!data || data.num <= 0) && this.env.contacts && !this.env.contacts.length)
return;
var sources = props && props.sources ? props.sources : [''];
var reqid = this.multi_thread_http_request({
items: sources,
threads: props && props.threads ? props.threads : 1,
action: props && props.action ? props.action : 'mail/autocomplete',
postdata: { _search:q, _source:'%s' },
lock: this.display_message(this.get_label('searching'), 'loading')
});
this.ksearch_data = { id:reqid, sources:sources.slice(), num:sources.length };
};
this.ksearch_query_results = function(results, search, reqid)
{
// trigger multi-thread http response callback
this.multi_thread_http_response(results, reqid);
// search stopped in meantime?
if (!this.ksearch_value)
return;
// ignore this outdated search response
if (this.ksearch_input && search != this.ksearch_value)
return;
// display search results
var i, id, len, ul, text, type, init,
value = this.ksearch_value,
maxlen = this.env.autocomplete_max ? this.env.autocomplete_max : 15;
// create results pane if not present
if (!this.ksearch_pane) {
ul = $('<ul>');
this.ksearch_pane = $('<div>').attr('id', 'rcmKSearchpane').attr('role', 'listbox')
.css({ position:'absolute', 'z-index':30000 }).append(ul).appendTo(document.body);
this.ksearch_pane.__ul = ul[0];
}
ul = this.ksearch_pane.__ul;
// remove all search results or add to existing list if parallel search
if (reqid && this.ksearch_pane.data('reqid') == reqid) {
maxlen -= ul.childNodes.length;
}
else {
this.ksearch_pane.data('reqid', reqid);
init = 1;
// reset content
ul.innerHTML = '';
this.env.contacts = [];
// move the results pane right under the input box
var pos = $(this.ksearch_input).offset();
this.ksearch_pane.css({ left:pos.left+'px', top:(pos.top + this.ksearch_input.offsetHeight)+'px', display: 'none'});
}
// add each result line to list
if (results && (len = results.length)) {
for (i=0; i < len && maxlen > 0; i++) {
text = typeof results[i] === 'object' ? (results[i].display || results[i].name) : results[i];
type = typeof results[i] === 'object' ? results[i].type : '';
id = i + this.env.contacts.length;
$('<li>').attr('id', 'rcmkSearchItem' + id)
.attr('role', 'option')
.html('<i class="icon"></i>' + this.quote_html(text.replace(new RegExp('('+RegExp.escape(value)+')', 'ig'), '##$1%%')).replace(/##([^%]+)%%/g, '<b>$1</b>'))
.addClass(type || '')
.appendTo(ul)
.mouseover(function() { ref.ksearch_select(this); })
.mouseup(function() { ref.ksearch_click(this); })
.get(0)._rcm_id = id;
maxlen -= 1;
}
}
if (ul.childNodes.length) {
// set the right aria-* attributes to the input field
$(this.ksearch_input)
.attr('aria-haspopup', 'true')
.attr('aria-expanded', 'true')
.attr('aria-owns', 'rcmKSearchpane');
this.ksearch_pane.show();
// select the first
if (!this.env.contacts.length) {
this.ksearch_select($('li:first', ul).get(0));
}
}
if (len)
this.env.contacts = this.env.contacts.concat(results);
if (this.ksearch_data.id == reqid)
this.ksearch_data.num--;
};
this.ksearch_click = function(node)
{
if (this.ksearch_input)
this.ksearch_input.focus();
this.insert_recipient(node._rcm_id);
this.ksearch_hide();
};
this.ksearch_blur = function()
{
if (this.ksearch_timer)
clearTimeout(this.ksearch_timer);
this.ksearch_input = null;
this.ksearch_hide();
};
this.ksearch_hide = function()
{
this.ksearch_selected = null;
this.ksearch_value = '';
if (this.ksearch_pane)
this.ksearch_pane.hide();
$(this.ksearch_input)
.attr('aria-haspopup', 'false')
.attr('aria-expanded', 'false')
.removeAttr('aria-activedescendant')
.removeAttr('aria-owns');
this.ksearch_destroy();
};
// Clears autocomplete data/requests
this.ksearch_destroy = function()
{
if (this.ksearch_data)
this.multi_thread_request_abort(this.ksearch_data.id);
if (this.ksearch_info)
this.hide_message(this.ksearch_info);
if (this.ksearch_msg)
this.hide_message(this.ksearch_msg);
this.ksearch_data = null;
this.ksearch_info = null;
this.ksearch_msg = null;
};
/*********************************************************/
/********* address book methods *********/
/*********************************************************/
this.contactlist_keypress = function(list)
{
if (list.key_pressed == list.DELETE_KEY)
this.command('delete');
};
this.contactlist_select = function(list)
{
if (this.preview_timer)
clearTimeout(this.preview_timer);
var n, id, sid, contact, writable = false,
selected = list.selection.length,
source = this.env.source ? this.env.address_sources[this.env.source] : null;
// we don't have dblclick handler here, so use 200 instead of this.dblclick_time
if (this.env.contentframe && (id = list.get_single_selection()))
this.preview_timer = setTimeout(function(){ ref.load_contact(id, 'show'); }, 200);
else if (this.env.contentframe)
this.show_contentframe(false);
if (selected) {
list.draggable = false;
// no source = search result, we'll need to detect if any of
// selected contacts are in writable addressbook to enable edit/delete
// we'll also need to know sources used in selection for copy
// and group-addmember operations (drag&drop)
this.env.selection_sources = [];
if (source) {
this.env.selection_sources.push(this.env.source);
}
for (n in list.selection) {
contact = list.data[list.selection[n]];
if (!source) {
sid = String(list.selection[n]).replace(/^[^-]+-/, '');
if (sid && this.env.address_sources[sid]) {
writable = writable || (!this.env.address_sources[sid].readonly && !contact.readonly);
this.env.selection_sources.push(sid);
}
}
else {
writable = writable || (!source.readonly && !contact.readonly);
}
if (contact._type != 'group')
list.draggable = true;
}
this.env.selection_sources = $.unique(this.env.selection_sources);
}
// if a group is currently selected, and there is at least one contact selected
// thend we can enable the group-remove-selected command
this.enable_command('group-remove-selected', this.env.group && selected && writable);
this.enable_command('compose', this.env.group || selected);
this.enable_command('print', selected == 1);
this.enable_command('export-selected', 'copy', selected > 0);
this.enable_command('edit', id && writable);
this.enable_command('delete', 'move', selected && writable);
return false;
};
this.list_contacts = function(src, group, page)
{
var win, folder, url = {},
refresh = src === undefined && group === undefined && page === undefined,
target = window;
if (!src)
src = this.env.source;
if (refresh)
group = this.env.group;
if (page && this.current_page == page && src == this.env.source && group == this.env.group)
return false;
if (src != this.env.source) {
page = this.env.current_page = 1;
this.reset_qsearch();
}
else if (!refresh && group != this.env.group)
page = this.env.current_page = 1;
if (this.env.search_id)
folder = 'S'+this.env.search_id;
else if (!this.env.search_request)
folder = group ? 'G'+src+group : src;
this.env.source = src;
this.env.group = group;
// truncate groups listing stack
var index = $.inArray(this.env.group, this.env.address_group_stack);
if (index < 0)
this.env.address_group_stack = [];
else
this.env.address_group_stack = this.env.address_group_stack.slice(0,index);
// make sure the current group is on top of the stack
if (this.env.group) {
this.env.address_group_stack.push(this.env.group);
// mark the first group on the stack as selected in the directory list
folder = 'G'+src+this.env.address_group_stack[0];
}
else if (this.gui_objects.addresslist_title) {
$(this.gui_objects.addresslist_title).html(this.get_label('contacts'));
}
if (!this.env.search_id)
this.select_folder(folder, '', true);
// load contacts remotely
if (this.gui_objects.contactslist) {
this.list_contacts_remote(src, group, page);
return;
}
if (win = this.get_frame_window(this.env.contentframe)) {
target = win;
url._framed = 1;
}
if (group)
url._gid = group;
if (page)
url._page = page;
if (src)
url._source = src;
// also send search request to get the correct listing
if (this.env.search_request)
url._search = this.env.search_request;
this.set_busy(true, 'loading');
this.location_href(url, target);
};
// send remote request to load contacts list
this.list_contacts_remote = function(src, group, page)
{
// clear message list first
this.list_contacts_clear();
// send request to server
var url = {}, lock = this.set_busy(true, 'loading');
if (src)
url._source = src;
if (page)
url._page = page;
if (group)
url._gid = group;
this.env.source = src;
this.env.group = group;
// also send search request to get the right records
if (this.env.search_request)
url._search = this.env.search_request;
this.http_request(this.env.task == 'mail' ? 'list-contacts' : 'list', url, lock);
};
this.list_contacts_clear = function()
{
this.contact_list.data = {};
this.contact_list.clear(true);
this.show_contentframe(false);
this.enable_command('delete', 'move', 'copy', 'print', false);
this.enable_command('compose', this.env.group);
};
this.set_group_prop = function(prop)
{
if (this.gui_objects.addresslist_title) {
var boxtitle = $(this.gui_objects.addresslist_title).html(''); // clear contents
// add link to pop back to parent group
if (this.env.address_group_stack.length > 1) {
$('<a href="#list">...</a>')
.attr('title', this.gettext('uponelevel'))
.addClass('poplink')
.appendTo(boxtitle)
.click(function(e){ return ref.command('popgroup','',this); });
boxtitle.append(' » ');
}
boxtitle.append($('<span>').text(prop.name));
}
this.triggerEvent('groupupdate', prop);
};
// load contact record
this.load_contact = function(cid, action, framed)
{
var win, url = {}, target = window,
rec = this.contact_list ? this.contact_list.data[cid] : null;
if (win = this.get_frame_window(this.env.contentframe)) {
url._framed = 1;
target = win;
this.show_contentframe(true);
// load dummy content, unselect selected row(s)
if (!cid)
this.contact_list.clear_selection();
this.enable_command('compose', rec && rec.email);
this.enable_command('export-selected', 'print', rec && rec._type != 'group');
}
else if (framed)
return false;
if (action && (cid || action == 'add') && !this.drag_active) {
if (this.env.group)
url._gid = this.env.group;
if (this.env.search_request)
url._search = this.env.search_request;
url._action = action;
url._source = this.env.source;
url._cid = cid;
this.location_href(url, target, true);
}
return true;
};
// add/delete member to/from the group
this.group_member_change = function(what, cid, source, gid)
{
if (what != 'add')
what = 'del';
var label = this.get_label(what == 'add' ? 'addingmember' : 'removingmember'),
lock = this.display_message(label, 'loading'),
post_data = {_cid: cid, _source: source, _gid: gid};
this.http_post('group-'+what+'members', post_data, lock);
};
this.contacts_drag_menu = function(e, to)
{
var dest = to.type == 'group' ? to.source : to.id,
source = this.env.source;
if (!this.env.address_sources[dest] || this.env.address_sources[dest].readonly)
return true;
// search result may contain contacts from many sources, but if there is only one...
if (source == '' && this.env.selection_sources.length == 1)
source = this.env.selection_sources[0];
if (to.type == 'group' && dest == source) {
var cid = this.contact_list.get_selection().join(',');
this.group_member_change('add', cid, dest, to.id);
return true;
}
// move action is not possible, "redirect" to copy if menu wasn't requested
else if (!this.commands.move && rcube_event.get_modifier(e) != SHIFT_KEY) {
this.copy_contacts(to);
return true;
}
return this.drag_menu(e, to);
};
// copy contact(s) to the specified target (group or directory)
this.copy_contacts = function(to)
{
var dest = to.type == 'group' ? to.source : to.id,
source = this.env.source,
group = this.env.group ? this.env.group : '',
cid = this.contact_list.get_selection().join(',');
if (!cid || !this.env.address_sources[dest] || this.env.address_sources[dest].readonly)
return;
// search result may contain contacts from many sources, but if there is only one...
if (source == '' && this.env.selection_sources.length == 1)
source = this.env.selection_sources[0];
// tagret is a group
if (to.type == 'group') {
if (dest == source)
return;
var lock = this.display_message(this.get_label('copyingcontact'), 'loading'),
post_data = {_cid: cid, _source: this.env.source, _to: dest, _togid: to.id, _gid: group};
this.http_post('copy', post_data, lock);
}
// target is an addressbook
else if (to.id != source) {
var lock = this.display_message(this.get_label('copyingcontact'), 'loading'),
post_data = {_cid: cid, _source: this.env.source, _to: to.id, _gid: group};
this.http_post('copy', post_data, lock);
}
};
// move contact(s) to the specified target (group or directory)
this.move_contacts = function(to)
{
var dest = to.type == 'group' ? to.source : to.id,
source = this.env.source,
group = this.env.group ? this.env.group : '';
if (!this.env.address_sources[dest] || this.env.address_sources[dest].readonly)
return;
// search result may contain contacts from many sources, but if there is only one...
if (source == '' && this.env.selection_sources.length == 1)
source = this.env.selection_sources[0];
if (to.type == 'group') {
if (dest == source)
return;
this._with_selected_contacts('move', {_to: dest, _togid: to.id});
}
// target is an addressbook
else if (to.id != source)
this._with_selected_contacts('move', {_to: to.id});
};
// delete contact(s)
this.delete_contacts = function()
{
var undelete = this.env.source && this.env.address_sources[this.env.source].undelete;
if (!undelete && !confirm(this.get_label('deletecontactconfirm')))
return;
return this._with_selected_contacts('delete');
};
this._with_selected_contacts = function(action, post_data)
{
var selection = this.contact_list ? this.contact_list.get_selection() : [];
// exit if no contact specified or if selection is empty
if (!selection.length && !this.env.cid)
return;
var n, a_cids = [],
label = action == 'delete' ? 'contactdeleting' : 'movingcontact',
lock = this.display_message(this.get_label(label), 'loading');
if (this.env.cid)
a_cids.push(this.env.cid);
else {
for (n=0; n<selection.length; n++) {
id = selection[n];
a_cids.push(id);
this.contact_list.remove_row(id, (n == selection.length-1));
}
// hide content frame if we delete the currently displayed contact
if (selection.length == 1)
this.show_contentframe(false);
}
if (!post_data)
post_data = {};
post_data._source = this.env.source;
post_data._from = this.env.action;
post_data._cid = a_cids.join(',');
if (this.env.group)
post_data._gid = this.env.group;
// also send search request to get the right records from the next page
if (this.env.search_request)
post_data._search = this.env.search_request;
// send request to server
this.http_post(action, post_data, lock)
return true;
};
// update a contact record in the list
this.update_contact_row = function(cid, cols_arr, newcid, source, data)
{
var list = this.contact_list;
cid = this.html_identifier(cid);
// when in searching mode, concat cid with the source name
if (!list.rows[cid]) {
cid = cid + '-' + source;
if (newcid)
newcid = newcid + '-' + source;
}
list.update_row(cid, cols_arr, newcid, true);
list.data[cid] = data;
};
// add row to contacts list
this.add_contact_row = function(cid, cols, classes, data)
{
if (!this.gui_objects.contactslist)
return false;
var c, col, list = this.contact_list,
row = { cols:[] };
row.id = 'rcmrow' + this.html_identifier(cid);
row.className = 'contact ' + (classes || '');
if (list.in_selection(cid))
row.className += ' selected';
// add each submitted col
for (c in cols) {
col = {};
col.className = String(c).toLowerCase();
col.innerHTML = cols[c];
row.cols.push(col);
}
// store data in list member
list.data[cid] = data;
list.insert_row(row);
this.enable_command('export', list.rowcount > 0);
};
this.init_contact_form = function()
{
var col;
if (this.env.coltypes) {
this.set_photo_actions($('#ff_photo').val());
for (col in this.env.coltypes)
this.init_edit_field(col, null);
}
$('.contactfieldgroup .row a.deletebutton').click(function() {
ref.delete_edit_field(this);
return false;
});
$('select.addfieldmenu').change(function() {
ref.insert_edit_field($(this).val(), $(this).attr('rel'), this);
this.selectedIndex = 0;
});
// enable date pickers on date fields
if ($.datepicker && this.env.date_format) {
$.datepicker.setDefaults({
dateFormat: this.env.date_format,
changeMonth: true,
changeYear: true,
yearRange: '-120:+10',
showOtherMonths: true,
selectOtherMonths: true
// onSelect: function(dateText) { $(this).focus().val(dateText); }
});
$('input.datepicker').datepicker();
}
// Submit search form on Enter
if (this.env.action == 'search')
$(this.gui_objects.editform).append($('<input type="submit">').hide())
.submit(function() { $('input.mainaction').click(); return false; });
};
// group creation dialog
this.group_create = function()
{
var input = $('<input>').attr('type', 'text'),
content = $('<label>').text(this.get_label('namex')).append(input);
this.show_popup_dialog(content, this.get_label('newgroup'),
[{
text: this.get_label('save'),
'class': 'mainaction',
click: function() {
var name;
if (name = input.val()) {
ref.http_post('group-create', {_source: ref.env.source, _name: name},
ref.set_busy(true, 'loading'));
}
$(this).dialog('close');
}
}]
);
};
// group rename dialog
this.group_rename = function()
{
if (!this.env.group)
return;
var group_name = this.env.contactgroups['G' + this.env.source + this.env.group].name,
input = $('<input>').attr('type', 'text').val(group_name),
content = $('<label>').text(this.get_label('namex')).append(input);
this.show_popup_dialog(content, this.get_label('grouprename'),
[{
text: this.get_label('save'),
'class': 'mainaction',
click: function() {
var name;
if ((name = input.val()) && name != group_name) {
ref.http_post('group-rename', {_source: ref.env.source, _gid: ref.env.group, _name: name},
ref.set_busy(true, 'loading'));
}
$(this).dialog('close');
}
}],
{open: function() { input.select(); }}
);
};
this.group_delete = function()
{
if (this.env.group && confirm(this.get_label('deletegroupconfirm'))) {
var lock = this.set_busy(true, 'groupdeleting');
this.http_post('group-delete', {_source: this.env.source, _gid: this.env.group}, lock);
}
};
// callback from server upon group-delete command
this.remove_group_item = function(prop)
{
var key = 'G'+prop.source+prop.id;
if (this.treelist.remove(key)) {
this.triggerEvent('group_delete', { source:prop.source, id:prop.id });
delete this.env.contactfolders[key];
delete this.env.contactgroups[key];
}
this.list_contacts(prop.source, 0);
};
//remove selected contacts from current active group
this.group_remove_selected = function()
{
this.http_post('group-delmembers', {_cid: this.contact_list.selection,
_source: this.env.source, _gid: this.env.group});
};
//callback after deleting contact(s) from current group
this.remove_group_contacts = function(props)
{
if (this.env.group !== undefined && (this.env.group === props.gid)) {
var n, selection = this.contact_list.get_selection();
for (n=0; n<selection.length; n++) {
id = selection[n];
this.contact_list.remove_row(id, (n == selection.length-1));
}
}
};
// callback for creating a new contact group
this.insert_contact_group = function(prop)
{
prop.type = 'group';
var key = 'G'+prop.source+prop.id,
link = $('<a>').attr('href', '#')
.attr('rel', prop.source+':'+prop.id)
.click(function() { return ref.command('listgroup', prop, this); })
.html(prop.name);
this.env.contactfolders[key] = this.env.contactgroups[key] = prop;
this.treelist.insert({ id:key, html:link, classes:['contactgroup'] }, prop.source, 'contactgroup');
this.triggerEvent('group_insert', { id:prop.id, source:prop.source, name:prop.name, li:this.treelist.get_item(key) });
};
// callback for renaming a contact group
this.update_contact_group = function(prop)
{
var key = 'G'+prop.source+prop.id,
newnode = {};
// group ID has changed, replace link node and identifiers
if (prop.newid) {
var newkey = 'G'+prop.source+prop.newid,
newprop = $.extend({}, prop);
this.env.contactfolders[newkey] = this.env.contactfolders[key];
this.env.contactfolders[newkey].id = prop.newid;
this.env.group = prop.newid;
delete this.env.contactfolders[key];
delete this.env.contactgroups[key];
newprop.id = prop.newid;
newprop.type = 'group';
newnode.id = newkey;
newnode.html = $('<a>').attr('href', '#')
.attr('rel', prop.source+':'+prop.newid)
.click(function() { return ref.command('listgroup', newprop, this); })
.html(prop.name);
}
// update displayed group name
else {
$(this.treelist.get_item(key)).children().first().html(prop.name);
this.env.contactfolders[key].name = this.env.contactgroups[key].name = prop.name;
}
// update list node and re-sort it
this.treelist.update(key, newnode, true);
this.triggerEvent('group_update', { id:prop.id, source:prop.source, name:prop.name, li:this.treelist.get_item(key), newid:prop.newid });
};
this.update_group_commands = function()
{
var source = this.env.source != '' ? this.env.address_sources[this.env.source] : null,
supported = source && source.groups && !source.readonly;
this.enable_command('group-create', supported);
this.enable_command('group-rename', 'group-delete', supported && this.env.group);
};
this.init_edit_field = function(col, elem)
{
var label = this.env.coltypes[col].label;
if (!elem)
elem = $('.ff_' + col);
if (label)
elem.placeholder(label);
};
this.insert_edit_field = function(col, section, menu)
{
// just make pre-defined input field visible
var elem = $('#ff_'+col);
if (elem.length) {
elem.show().focus();
$(menu).children('option[value="'+col+'"]').prop('disabled', true);
}
else {
var lastelem = $('.ff_'+col),
appendcontainer = $('#contactsection'+section+' .contactcontroller'+col);
if (!appendcontainer.length) {
var sect = $('#contactsection'+section),
lastgroup = $('.contactfieldgroup', sect).last();
appendcontainer = $('<fieldset>').addClass('contactfieldgroup contactcontroller'+col);
if (lastgroup.length)
appendcontainer.insertAfter(lastgroup);
else
sect.prepend(appendcontainer);
}
if (appendcontainer.length && appendcontainer.get(0).nodeName == 'FIELDSET') {
var input, colprop = this.env.coltypes[col],
input_id = 'ff_' + col + (colprop.count || 0),
row = $('<div>').addClass('row'),
cell = $('<div>').addClass('contactfieldcontent data'),
label = $('<div>').addClass('contactfieldlabel label');
if (colprop.subtypes_select)
label.html(colprop.subtypes_select);
else
label.html('<label for="' + input_id + '">' + colprop.label + '</label>');
var name_suffix = colprop.limit != 1 ? '[]' : '';
if (colprop.type == 'text' || colprop.type == 'date') {
input = $('<input>')
.addClass('ff_'+col)
.attr({type: 'text', name: '_'+col+name_suffix, size: colprop.size, id: input_id})
.appendTo(cell);
this.init_edit_field(col, input);
if (colprop.type == 'date' && $.datepicker)
input.datepicker();
}
else if (colprop.type == 'textarea') {
input = $('<textarea>')
.addClass('ff_'+col)
.attr({ name: '_'+col+name_suffix, cols:colprop.size, rows:colprop.rows, id: input_id })
.appendTo(cell);
this.init_edit_field(col, input);
}
else if (colprop.type == 'composite') {
var i, childcol, cp, first, templ, cols = [], suffices = [];
// read template for composite field order
if ((templ = this.env[col+'_template'])) {
for (i=0; i < templ.length; i++) {
cols.push(templ[i][1]);
suffices.push(templ[i][2]);
}
}
else { // list fields according to appearance in colprop
for (childcol in colprop.childs)
cols.push(childcol);
}
for (i=0; i < cols.length; i++) {
childcol = cols[i];
cp = colprop.childs[childcol];
input = $('<input>')
.addClass('ff_'+childcol)
.attr({ type: 'text', name: '_'+childcol+name_suffix, size: cp.size })
.appendTo(cell);
cell.append(suffices[i] || " ");
this.init_edit_field(childcol, input);
if (!first) first = input;
}
input = first; // set focus to the first of this composite fields
}
else if (colprop.type == 'select') {
input = $('<select>')
.addClass('ff_'+col)
.attr({ 'name': '_'+col+name_suffix, id: input_id })
.appendTo(cell);
var options = input.attr('options');
options[options.length] = new Option('---', '');
if (colprop.options)
$.each(colprop.options, function(i, val){ options[options.length] = new Option(val, i); });
}
if (input) {
var delbutton = $('<a href="#del"></a>')
.addClass('contactfieldbutton deletebutton')
.attr({title: this.get_label('delete'), rel: col})
.html(this.env.delbutton)
.click(function(){ ref.delete_edit_field(this); return false })
.appendTo(cell);
row.append(label).append(cell).appendTo(appendcontainer.show());
input.first().focus();
// disable option if limit reached
if (!colprop.count) colprop.count = 0;
if (++colprop.count == colprop.limit && colprop.limit)
$(menu).children('option[value="'+col+'"]').prop('disabled', true);
}
}
}
};
this.delete_edit_field = function(elem)
{
var col = $(elem).attr('rel'),
colprop = this.env.coltypes[col],
fieldset = $(elem).parents('fieldset.contactfieldgroup'),
addmenu = fieldset.parent().find('select.addfieldmenu');
// just clear input but don't hide the last field
if (--colprop.count <= 0 && colprop.visible)
$(elem).parent().children('input').val('').blur();
else {
$(elem).parents('div.row').remove();
// hide entire fieldset if no more rows
if (!fieldset.children('div.row').length)
fieldset.hide();
}
// enable option in add-field selector or insert it if necessary
if (addmenu.length) {
var option = addmenu.children('option[value="'+col+'"]');
if (option.length)
option.prop('disabled', false);
else
option = $('<option>').attr('value', col).html(colprop.label).appendTo(addmenu);
addmenu.show();
}
};
this.upload_contact_photo = function(form)
{
if (form && form.elements._photo.value) {
this.async_upload_form(form, 'upload-photo', function(e) {
ref.set_busy(false, null, ref.file_upload_id);
});
// display upload indicator
this.file_upload_id = this.set_busy(true, 'uploading');
}
};
this.replace_contact_photo = function(id)
{
var img_src = id == '-del-' ? this.env.photo_placeholder :
this.env.comm_path + '&_action=photo&_source=' + this.env.source + '&_cid=' + (this.env.cid || 0) + '&_photo=' + id;
this.set_photo_actions(id);
$(this.gui_objects.contactphoto).children('img').attr('src', img_src);
};
this.photo_upload_end = function()
{
this.set_busy(false, null, this.file_upload_id);
delete this.file_upload_id;
};
this.set_photo_actions = function(id)
{
var n, buttons = this.buttons['upload-photo'];
for (n=0; buttons && n < buttons.length; n++)
$('a#'+buttons[n].id).html(this.get_label(id == '-del-' ? 'addphoto' : 'replacephoto'));
$('#ff_photo').val(id);
this.enable_command('upload-photo', this.env.coltypes.photo ? true : false);
this.enable_command('delete-photo', this.env.coltypes.photo && id != '-del-');
};
// load advanced search page
this.advanced_search = function()
{
var win, url = {_form: 1, _action: 'search'}, target = window;
if (win = this.get_frame_window(this.env.contentframe)) {
url._framed = 1;
target = win;
this.contact_list.clear_selection();
}
this.location_href(url, target, true);
return true;
};
// unselect directory/group
this.unselect_directory = function()
{
this.select_folder('');
this.enable_command('search-delete', false);
};
// callback for creating a new saved search record
this.insert_saved_search = function(name, id)
{
var key = 'S'+id,
link = $('<a>').attr('href', '#')
.attr('rel', id)
.click(function() { return ref.command('listsearch', id, this); })
.html(name),
prop = { name:name, id:id };
this.savedsearchlist.insert({ id:key, html:link, classes:['contactsearch'] }, null, 'contactsearch');
this.select_folder(key,'',true);
this.enable_command('search-delete', true);
this.env.search_id = id;
this.triggerEvent('abook_search_insert', prop);
};
// creates a dialog for saved search
this.search_create = function()
{
var input = $('<input>').attr('type', 'text'),
content = $('<label>').text(this.get_label('namex')).append(input);
this.show_popup_dialog(content, this.get_label('searchsave'),
[{
text: this.get_label('save'),
'class': 'mainaction',
click: function() {
var name;
if (name = input.val()) {
ref.http_post('search-create', {_search: ref.env.search_request, _name: name},
ref.set_busy(true, 'loading'));
}
$(this).dialog('close');
}
}]
);
};
this.search_delete = function()
{
if (this.env.search_request) {
var lock = this.set_busy(true, 'savedsearchdeleting');
this.http_post('search-delete', {_sid: this.env.search_id}, lock);
}
};
// callback from server upon search-delete command
this.remove_search_item = function(id)
{
var li, key = 'S'+id;
if (this.savedsearchlist.remove(key)) {
this.triggerEvent('search_delete', { id:id, li:li });
}
this.env.search_id = null;
this.env.search_request = null;
this.list_contacts_clear();
this.reset_qsearch();
this.enable_command('search-delete', 'search-create', false);
};
this.listsearch = function(id)
{
var lock = this.set_busy(true, 'searching');
if (this.contact_list) {
this.list_contacts_clear();
}
this.reset_qsearch();
if (this.savedsearchlist) {
this.treelist.select('');
this.savedsearchlist.select('S'+id);
}
else
this.select_folder('S'+id, '', true);
// reset vars
this.env.current_page = 1;
this.http_request('search', {_sid: id}, lock);
};
/*********************************************************/
/********* user settings methods *********/
/*********************************************************/
// preferences section select and load options frame
this.section_select = function(list)
{
var win, id = list.get_single_selection(), target = window,
url = {_action: 'edit-prefs', _section: id};
if (id) {
if (win = this.get_frame_window(this.env.contentframe)) {
url._framed = 1;
target = win;
}
this.location_href(url, target, true);
}
return true;
};
this.identity_select = function(list)
{
var id;
if (id = list.get_single_selection()) {
this.enable_command('delete', list.rowcount > 1 && this.env.identities_level < 2);
this.load_identity(id, 'edit-identity');
}
};
// load identity record
this.load_identity = function(id, action)
{
if (action == 'edit-identity' && (!id || id == this.env.iid))
return false;
var win, target = window,
url = {_action: action, _iid: id};
if (win = this.get_frame_window(this.env.contentframe)) {
url._framed = 1;
target = win;
}
if (id || action == 'add-identity') {
this.location_href(url, target, true);
}
return true;
};
this.delete_identity = function(id)
{
// exit if no identity is specified or if selection is empty
var selection = this.identity_list.get_selection();
if (!(selection.length || this.env.iid))
return;
if (!id)
id = this.env.iid ? this.env.iid : selection[0];
// submit request with appended token
if (id && confirm(this.get_label('deleteidentityconfirm')))
this.http_post('settings/delete-identity', { _iid: id }, true);
};
this.update_identity_row = function(id, name, add)
{
var list = this.identity_list,
rid = this.html_identifier(id);
if (add) {
list.insert_row({ id:'rcmrow'+rid, cols:[ { className:'mail', innerHTML:name } ] });
list.select(rid);
}
else {
list.update_row(rid, [ name ]);
}
};
this.update_response_row = function(response, oldkey)
{
var list = this.responses_list;
if (list && oldkey) {
list.update_row(oldkey, [ response.name ], response.key, true);
}
else if (list) {
list.insert_row({ id:'rcmrow'+response.key, cols:[ { className:'name', innerHTML:response.name } ] });
list.select(response.key);
}
};
this.remove_response = function(key)
{
var frame;
if (this.env.textresponses) {
delete this.env.textresponses[key];
}
if (this.responses_list) {
this.responses_list.remove_row(key);
if (this.env.contentframe && (frame = this.get_frame_window(this.env.contentframe))) {
frame.location.href = this.env.blankpage;
}
}
this.enable_command('delete', false);
};
this.remove_identity = function(id)
{
var frame, list = this.identity_list,
rid = this.html_identifier(id);
if (list && id) {
list.remove_row(rid);
if (this.env.contentframe && (frame = this.get_frame_window(this.env.contentframe))) {
frame.location.href = this.env.blankpage;
}
}
this.enable_command('delete', false);
};
/*********************************************************/
/********* folder manager methods *********/
/*********************************************************/
this.init_subscription_list = function()
{
var delim = RegExp.escape(this.env.delimiter);
this.last_sub_rx = RegExp('['+delim+']?[^'+delim+']+$');
this.subscription_list = new rcube_treelist_widget(this.gui_objects.subscriptionlist, {
selectable: true,
tabexit: false,
parent_focus: true,
id_prefix: 'rcmli',
id_encode: this.html_identifier_encode,
id_decode: this.html_identifier_decode,
searchbox: '#foldersearch'
});
this.subscription_list
.addEventListener('select', function(node) { ref.subscription_select(node.id); })
.addEventListener('collapse', function(node) { ref.folder_collapsed(node) })
.addEventListener('expand', function(node) { ref.folder_collapsed(node) })
.addEventListener('search', function(p) { if (p.query) ref.subscription_select(); })
.draggable({cancel: 'li.mailbox.root'})
.droppable({
// @todo: find better way, accept callback is executed for every folder
// on the list when dragging starts (and stops), this is slow, but
// I didn't find a method to check droptarget on over event
accept: function(node) {
if (!$(node).is('.mailbox'))
return false;
var source_folder = ref.folder_id2name($(node).attr('id')),
dest_folder = ref.folder_id2name(this.id),
source = ref.env.subscriptionrows[source_folder],
dest = ref.env.subscriptionrows[dest_folder];
return source && !source[2]
&& dest_folder != source_folder.replace(ref.last_sub_rx, '')
&& !dest_folder.startsWith(source_folder + ref.env.delimiter);
},
drop: function(e, ui) {
var source = ref.folder_id2name(ui.draggable.attr('id')),
dest = ref.folder_id2name(this.id);
ref.subscription_move_folder(source, dest);
}
});
};
this.folder_id2name = function(id)
{
return id ? ref.html_identifier_decode(id.replace(/^rcmli/, '')) : null;
};
this.subscription_select = function(id)
{
var folder;
if (id && id != '*' && (folder = this.env.subscriptionrows[id])) {
this.env.mailbox = id;
this.show_folder(id);
this.enable_command('delete-folder', !folder[2]);
}
else {
this.env.mailbox = null;
this.show_contentframe(false);
this.enable_command('delete-folder', 'purge', false);
}
};
this.subscription_move_folder = function(from, to)
{
if (from && to !== null && from != to && to != from.replace(this.last_sub_rx, '')) {
var path = from.split(this.env.delimiter),
basename = path.pop(),
newname = to === '' || to === '*' ? basename : to + this.env.delimiter + basename;
if (newname != from) {
this.http_post('rename-folder', {_folder_oldname: from, _folder_newname: newname},
this.set_busy(true, 'foldermoving'));
}
}
};
// tell server to create and subscribe a new mailbox
this.create_folder = function()
{
this.show_folder('', this.env.mailbox);
};
// delete a specific mailbox with all its messages
this.delete_folder = function(name)
{
if (!name)
name = this.env.mailbox;
if (name && confirm(this.get_label('deletefolderconfirm'))) {
this.http_post('delete-folder', {_mbox: name}, this.set_busy(true, 'folderdeleting'));
}
};
// Add folder row to the table and initialize it
this.add_folder_row = function (id, name, display_name, is_protected, subscribed, class_name, refrow, subfolders)
{
if (!this.gui_objects.subscriptionlist)
return false;
// reset searching
if (this.subscription_list.is_search()) {
this.subscription_select();
this.subscription_list.reset_search();
}
// disable drag-n-drop temporarily
this.subscription_list.draggable('destroy').droppable('destroy');
var row, n, tmp, tmp_name, rowid, collator, pos, p, parent = '',
folders = [], list = [], slist = [],
list_element = $(this.gui_objects.subscriptionlist);
row = refrow ? refrow : $($('li', list_element).get(1)).clone(true);
if (!row.length) {
// Refresh page if we don't have a table row to clone
this.goto_url('folders');
return false;
}
// set ID, reset css class
row.attr({id: 'rcmli' + this.html_identifier_encode(id), 'class': class_name});
if (!refrow || !refrow.length) {
// remove old data, subfolders and toggle
$('ul,div.treetoggle', row).remove();
row.removeData('filtered');
}
// set folder name
$('a:first', row).text(display_name);
// update subscription checkbox
$('input[name="_subscribed[]"]:first', row).val(id)
.prop({checked: subscribed ? true : false, disabled: is_protected ? true : false});
// add to folder/row-ID map
this.env.subscriptionrows[id] = [name, display_name, false];
// copy folders data to an array for sorting
$.each(this.env.subscriptionrows, function(k, v) { v[3] = k; folders.push(v); });
try {
// use collator if supported (FF29, IE11, Opera15, Chrome24)
collator = new Intl.Collator(this.env.locale.replace('_', '-'));
}
catch (e) {};
// sort folders
folders.sort(function(a, b) {
var i, f1, f2,
path1 = a[0].split(ref.env.delimiter),
path2 = b[0].split(ref.env.delimiter),
len = path1.length;
for (i=0; i<len; i++) {
f1 = path1[i];
f2 = path2[i];
if (f1 !== f2) {
if (f2 === undefined)
return 1;
if (collator)
return collator.compare(f1, f2);
else
return f1 < f2 ? -1 : 1;
}
else if (i == len-1) {
return -1
}
}
});
for (n in folders) {
p = folders[n][3];
// protected folder
if (folders[n][2]) {
tmp_name = p + this.env.delimiter;
// prefix namespace cannot have subfolders (#1488349)
if (tmp_name == this.env.prefix_ns)
continue;
slist.push(p);
tmp = tmp_name;
}
// protected folder's child
else if (tmp && p.startsWith(tmp))
slist.push(p);
// other
else {
list.push(p);
tmp = null;
}
}
// check if subfolder of a protected folder
for (n=0; n<slist.length; n++) {
if (id.startsWith(slist[n] + this.env.delimiter))
rowid = slist[n];
}
// find folder position after sorting
for (n=0; !rowid && n<list.length; n++) {
if (n && list[n] == id)
rowid = list[n-1];
}
// add row to the table
if (rowid && (n = this.subscription_list.get_item(rowid, true))) {
// find parent folder
if (pos = id.lastIndexOf(this.env.delimiter)) {
parent = id.substring(0, pos);
parent = this.subscription_list.get_item(parent, true);
// add required tree elements to the parent if not already there
if (!$('div.treetoggle', parent).length) {
$('<div> </div>').addClass('treetoggle collapsed').appendTo(parent);
}
if (!$('ul', parent).length) {
$('<ul>').css('display', 'none').appendTo(parent);
}
}
if (parent && n == parent) {
$('ul:first', parent).append(row);
}
else {
while (p = $(n).parent().parent().get(0)) {
if (parent && p == parent)
break;
if (!$(p).is('li.mailbox'))
break;
n = p;
}
$(n).after(row);
}
}
else {
list_element.append(row);
}
// add subfolders
$.extend(this.env.subscriptionrows, subfolders || {});
// update list widget
this.subscription_list.reset(true);
this.subscription_select();
// expand parent
if (parent) {
this.subscription_list.expand(this.folder_id2name(parent.id));
}
row = row.show().get(0);
if (row.scrollIntoView)
row.scrollIntoView();
return row;
};
// replace an existing table row with a new folder line (with subfolders)
this.replace_folder_row = function(oldid, id, name, display_name, is_protected, class_name)
{
if (!this.gui_objects.subscriptionlist) {
if (this.is_framed()) {
// @FIXME: for some reason this 'parent' variable need to be prefixed with 'window.'
return window.parent.rcmail.replace_folder_row(oldid, id, name, display_name, is_protected, class_name);
}
return false;
}
// reset searching
if (this.subscription_list.is_search()) {
this.subscription_select();
this.subscription_list.reset_search();
}
var subfolders = {},
row = this.subscription_list.get_item(oldid, true),
parent = $(row).parent(),
old_folder = this.env.subscriptionrows[oldid],
prefix_len_id = oldid.length,
prefix_len_name = old_folder[0].length,
subscribed = $('input[name="_subscribed[]"]:first', row).prop('checked');
// no renaming, only update class_name
if (oldid == id) {
$(row).attr('class', class_name || '');
return;
}
// update subfolders
$('li', row).each(function() {
var fname = ref.folder_id2name(this.id),
folder = ref.env.subscriptionrows[fname],
newid = id + fname.slice(prefix_len_id);
this.id = 'rcmli' + ref.html_identifier_encode(newid);
$('input[name="_subscribed[]"]:first', this).val(newid);
folder[0] = name + folder[0].slice(prefix_len_name);
subfolders[newid] = folder;
delete ref.env.subscriptionrows[fname];
});
// get row off the list
row = $(row).detach();
delete this.env.subscriptionrows[oldid];
// remove parent list/toggle elements if not needed
if (parent.get(0) != this.gui_objects.subscriptionlist && !$('li', parent).length) {
$('ul,div.treetoggle', parent.parent()).remove();
}
// move the existing table row
this.add_folder_row(id, name, display_name, is_protected, subscribed, class_name, row, subfolders);
};
// remove the table row of a specific mailbox from the table
this.remove_folder_row = function(folder)
{
// reset searching
if (this.subscription_list.is_search()) {
this.subscription_select();
this.subscription_list.reset_search();
}
var list = [], row = this.subscription_list.get_item(folder, true);
// get subfolders if any
$('li', row).each(function() { list.push(ref.folder_id2name(this.id)); });
// remove folder row (and subfolders)
this.subscription_list.remove(folder);
// update local list variable
list.push(folder);
$.each(list, function(i, v) { delete ref.env.subscriptionrows[v]; });
};
this.subscribe = function(folder)
{
if (folder) {
var lock = this.display_message(this.get_label('foldersubscribing'), 'loading');
this.http_post('subscribe', {_mbox: folder}, lock);
}
};
this.unsubscribe = function(folder)
{
if (folder) {
var lock = this.display_message(this.get_label('folderunsubscribing'), 'loading');
this.http_post('unsubscribe', {_mbox: folder}, lock);
}
};
// when user select a folder in manager
this.show_folder = function(folder, path, force)
{
var win, target = window,
url = '&_action=edit-folder&_mbox='+urlencode(folder);
if (path)
url += '&_path='+urlencode(path);
if (win = this.get_frame_window(this.env.contentframe)) {
target = win;
url += '&_framed=1';
}
if (String(target.location.href).indexOf(url) >= 0 && !force)
this.show_contentframe(true);
else
this.location_href(this.env.comm_path+url, target, true);
};
// disables subscription checkbox (for protected folder)
this.disable_subscription = function(folder)
{
var row = this.subscription_list.get_item(folder, true);
if (row)
$('input[name="_subscribed[]"]:first', row).prop('disabled', true);
};
this.folder_size = function(folder)
{
var lock = this.set_busy(true, 'loading');
this.http_post('folder-size', {_mbox: folder}, lock);
};
this.folder_size_update = function(size)
{
$('#folder-size').replaceWith(size);
};
// filter folders by namespace
this.folder_filter = function(prefix)
{
this.subscription_list.reset_search();
this.subscription_list.container.children('li').each(function() {
var i, folder = ref.folder_id2name(this.id);
// show all folders
if (prefix == '---') {
}
// got namespace prefix
else if (prefix) {
if (folder !== prefix) {
$(this).data('filtered', true).hide();
return
}
}
// no namespace prefix, filter out all other namespaces
else {
// first get all namespace roots
for (i in ref.env.ns_roots) {
if (folder === ref.env.ns_roots[i]) {
$(this).data('filtered', true).hide();
return;
}
}
}
$(this).removeData('filtered').show();
});
};
/*********************************************************/
/********* GUI functionality *********/
/*********************************************************/
var init_button = function(cmd, prop)
{
var elm = document.getElementById(prop.id);
if (!elm)
return;
var preload = false;
if (prop.type == 'image') {
elm = elm.parentNode;
preload = true;
}
elm._command = cmd;
elm._id = prop.id;
if (prop.sel) {
elm.onmousedown = function(e) { return ref.button_sel(this._command, this._id); };
elm.onmouseup = function(e) { return ref.button_out(this._command, this._id); };
if (preload)
new Image().src = prop.sel;
}
if (prop.over) {
elm.onmouseover = function(e) { return ref.button_over(this._command, this._id); };
elm.onmouseout = function(e) { return ref.button_out(this._command, this._id); };
if (preload)
new Image().src = prop.over;
}
};
// set event handlers on registered buttons
this.init_buttons = function()
{
for (var cmd in this.buttons) {
if (typeof cmd !== 'string')
continue;
for (var i=0; i<this.buttons[cmd].length; i++) {
init_button(cmd, this.buttons[cmd][i]);
}
}
};
// set button to a specific state
this.set_button = function(command, state)
{
var n, button, obj, $obj, a_buttons = this.buttons[command],
len = a_buttons ? a_buttons.length : 0;
for (n=0; n<len; n++) {
button = a_buttons[n];
obj = document.getElementById(button.id);
if (!obj || button.status === state)
continue;
// get default/passive setting of the button
if (button.type == 'image' && !button.status) {
button.pas = obj._original_src ? obj._original_src : obj.src;
// respect PNG fix on IE browsers
if (obj.runtimeStyle && obj.runtimeStyle.filter && obj.runtimeStyle.filter.match(/src=['"]([^'"]+)['"]/))
button.pas = RegExp.$1;
}
else if (!button.status)
button.pas = String(obj.className);
button.status = state;
// set image according to button state
if (button.type == 'image' && button[state]) {
obj.src = button[state];
}
// set class name according to button state
else if (button[state] !== undefined) {
obj.className = button[state];
}
// disable/enable input buttons
if (button.type == 'input') {
obj.disabled = state == 'pas';
}
else if (button.type == 'uibutton') {
button.status = state;
$(obj).button('option', 'disabled', state == 'pas');
}
else {
$obj = $(obj);
$obj
.attr('tabindex', state == 'pas' || state == 'sel' ? '-1' : ($obj.attr('data-tabindex') || '0'))
.attr('aria-disabled', state == 'pas' || state == 'sel' ? 'true' : 'false');
}
}
};
// display a specific alttext
this.set_alttext = function(command, label)
{
var n, button, obj, link, a_buttons = this.buttons[command],
len = a_buttons ? a_buttons.length : 0;
for (n=0; n<len; n++) {
button = a_buttons[n];
obj = document.getElementById(button.id);
if (button.type == 'image' && obj) {
obj.setAttribute('alt', this.get_label(label));
if ((link = obj.parentNode) && link.tagName.toLowerCase() == 'a')
link.setAttribute('title', this.get_label(label));
}
else if (obj)
obj.setAttribute('title', this.get_label(label));
}
};
// mouse over button
this.button_over = function(command, id)
{
this.button_event(command, id, 'over');
};
// mouse down on button
this.button_sel = function(command, id)
{
this.button_event(command, id, 'sel');
};
// mouse out of button
this.button_out = function(command, id)
{
this.button_event(command, id, 'act');
};
// event of button
this.button_event = function(command, id, event)
{
var n, button, obj, a_buttons = this.buttons[command],
len = a_buttons ? a_buttons.length : 0;
for (n=0; n<len; n++) {
button = a_buttons[n];
if (button.id == id && button.status == 'act') {
if (button[event] && (obj = document.getElementById(button.id))) {
obj[button.type == 'image' ? 'src' : 'className'] = button[event];
}
if (event == 'sel') {
this.buttons_sel[id] = command;
}
}
}
};
// write to the document/window title
this.set_pagetitle = function(title)
{
if (title && document.title)
document.title = title;
};
// display a system message, list of types in common.css (below #message definition)
this.display_message = function(msg, type, timeout, key)
{
// pass command to parent window
if (this.is_framed())
return parent.rcmail.display_message(msg, type, timeout);
if (!this.gui_objects.message) {
// save message in order to display after page loaded
if (type != 'loading')
this.pending_message = [msg, type, timeout, key];
return 1;
}
if (!type)
type = 'notice';
if (!key)
key = this.html_identifier(msg);
var date = new Date(),
id = type + date.getTime();
if (!timeout) {
switch (type) {
case 'error':
case 'warning':
timeout = this.message_time * 2;
break;
case 'uploading':
timeout = 0;
break;
default:
timeout = this.message_time;
}
}
if (type == 'loading') {
key = 'loading';
timeout = this.env.request_timeout * 1000;
if (!msg)
msg = this.get_label('loading');
}
// The same message is already displayed
if (this.messages[key]) {
// replace label
if (this.messages[key].obj)
this.messages[key].obj.html(msg);
// store label in stack
if (type == 'loading') {
this.messages[key].labels.push({'id': id, 'msg': msg});
}
// add element and set timeout
this.messages[key].elements.push(id);
setTimeout(function() { ref.hide_message(id, type == 'loading'); }, timeout);
return id;
}
// create DOM object and display it
var obj = $('<div>').addClass(type).html(msg).data('key', key),
cont = $(this.gui_objects.message).append(obj).show();
this.messages[key] = {'obj': obj, 'elements': [id]};
if (type == 'loading') {
this.messages[key].labels = [{'id': id, 'msg': msg}];
}
else if (type != 'uploading') {
obj.click(function() { return ref.hide_message(obj); })
.attr('role', 'alert');
}
this.triggerEvent('message', { message:msg, type:type, timeout:timeout, object:obj });
if (timeout > 0)
setTimeout(function() { ref.hide_message(id, type != 'loading'); }, timeout);
return id;
};
// make a message to disapear
this.hide_message = function(obj, fade)
{
// pass command to parent window
if (this.is_framed())
return parent.rcmail.hide_message(obj, fade);
if (!this.gui_objects.message)
return;
var k, n, i, o, m = this.messages;
// Hide message by object, don't use for 'loading'!
if (typeof obj === 'object') {
o = $(obj);
k = o.data('key');
this.hide_message_object(o, fade);
if (m[k])
delete m[k];
}
// Hide message by id
else {
for (k in m) {
for (n in m[k].elements) {
if (m[k] && m[k].elements[n] == obj) {
m[k].elements.splice(n, 1);
// hide DOM element if last instance is removed
if (!m[k].elements.length) {
this.hide_message_object(m[k].obj, fade);
delete m[k];
}
// set pending action label for 'loading' message
else if (k == 'loading') {
for (i in m[k].labels) {
if (m[k].labels[i].id == obj) {
delete m[k].labels[i];
}
else {
o = m[k].labels[i].msg;
m[k].obj.html(o);
}
}
}
}
}
}
}
};
// hide message object and remove from the DOM
this.hide_message_object = function(o, fade)
{
if (fade)
o.fadeOut(600, function() {$(this).remove(); });
else
o.hide().remove();
};
// remove all messages immediately
this.clear_messages = function()
{
// pass command to parent window
if (this.is_framed())
return parent.rcmail.clear_messages();
var k, n, m = this.messages;
for (k in m)
for (n in m[k].elements)
if (m[k].obj)
this.hide_message_object(m[k].obj);
this.messages = {};
};
// display uploading message with progress indicator
// data should contain: name, total, current, percent, text
this.display_progress = function(data)
{
if (!data || !data.name)
return;
var msg = this.messages['progress' + data.name];
if (!data.label)
data.label = this.get_label('uploadingmany');
if (!msg) {
if (!data.percent || data.percent < 100)
this.display_message(data.label, 'uploading', 0, 'progress' + data.name);
return;
}
if (!data.total || data.percent >= 100) {
this.hide_message(msg.obj);
return;
}
if (data.text)
data.label += ' ' + data.text;
msg.obj.text(data.label);
};
// open a jquery UI dialog with the given content
this.show_popup_dialog = function(content, title, buttons, options)
{
// forward call to parent window
if (this.is_framed()) {
return parent.rcmail.show_popup_dialog(content, title, buttons, options);
}
var popup = $('<div class="popup">');
if (typeof content == 'object')
popup.append(content);
else
popup.html(content);
options = $.extend({
title: title,
buttons: buttons,
modal: true,
resizable: true,
width: 500,
close: function(event, ui) { $(this).remove(); }
}, options || {});
popup.dialog(options);
// resize and center popup
var win = $(window), w = win.width(), h = win.height(),
width = popup.width(), height = popup.height();
popup.dialog('option', {
height: Math.min(h - 40, height + 75 + (buttons ? 50 : 0)),
width: Math.min(w - 20, width + 36)
});
// assign special classes to dialog buttons
$.each(options.button_classes || [], function(i, v) {
if (v) $($('.ui-dialog-buttonpane button.ui-button', popup.parent()).get(i)).addClass(v);
});
return popup;
};
// enable/disable buttons for page shifting
this.set_page_buttons = function()
{
this.enable_command('nextpage', 'lastpage', this.env.pagecount > this.env.current_page);
this.enable_command('previouspage', 'firstpage', this.env.current_page > 1);
};
// mark a mailbox as selected and set environment variable
this.select_folder = function(name, prefix, encode)
{
if (this.savedsearchlist) {
this.savedsearchlist.select('');
}
if (this.treelist) {
this.treelist.select(name);
}
else if (this.gui_objects.folderlist) {
$('li.selected', this.gui_objects.folderlist).removeClass('selected');
$(this.get_folder_li(name, prefix, encode)).addClass('selected');
// trigger event hook
this.triggerEvent('selectfolder', { folder:name, prefix:prefix });
}
};
// adds a class to selected folder
this.mark_folder = function(name, class_name, prefix, encode)
{
$(this.get_folder_li(name, prefix, encode)).addClass(class_name);
this.triggerEvent('markfolder', {folder: name, mark: class_name, status: true});
};
// adds a class to selected folder
this.unmark_folder = function(name, class_name, prefix, encode)
{
$(this.get_folder_li(name, prefix, encode)).removeClass(class_name);
this.triggerEvent('markfolder', {folder: name, mark: class_name, status: false});
};
// helper method to find a folder list item
this.get_folder_li = function(name, prefix, encode)
{
if (!prefix)
prefix = 'rcmli';
if (this.gui_objects.folderlist) {
name = this.html_identifier(name, encode);
return document.getElementById(prefix+name);
}
};
// for reordering column array (Konqueror workaround)
// and for setting some message list global variables
this.set_message_coltypes = function(listcols, repl, smart_col)
{
var list = this.message_list,
thead = list ? list.thead : null,
repl, cell, col, n, len, tr;
this.env.listcols = listcols;
if (!this.env.coltypes)
this.env.coltypes = {};
// replace old column headers
if (thead) {
if (repl) {
thead.innerHTML = '';
tr = document.createElement('tr');
for (c=0, len=repl.length; c < len; c++) {
cell = document.createElement('th');
cell.innerHTML = repl[c].html || '';
if (repl[c].id) cell.id = repl[c].id;
if (repl[c].className) cell.className = repl[c].className;
tr.appendChild(cell);
}
thead.appendChild(tr);
}
for (n=0, len=this.env.listcols.length; n<len; n++) {
col = this.env.listcols[n];
if ((cell = thead.rows[0].cells[n]) && (col == 'from' || col == 'to' || col == 'fromto')) {
$(cell).attr('rel', col).find('span,a').text(this.get_label(col == 'fromto' ? smart_col : col));
}
}
}
this.env.subject_col = null;
this.env.flagged_col = null;
this.env.status_col = null;
if (this.env.coltypes.folder)
this.env.coltypes.folder.hidden = !(this.env.search_request || this.env.search_id) || this.env.search_scope == 'base';
if ((n = $.inArray('subject', this.env.listcols)) >= 0) {
this.env.subject_col = n;
if (list)
list.subject_col = n;
}
if ((n = $.inArray('flag', this.env.listcols)) >= 0)
this.env.flagged_col = n;
if ((n = $.inArray('status', this.env.listcols)) >= 0)
this.env.status_col = n;
if (list) {
list.hide_column('folder', (this.env.coltypes.folder && this.env.coltypes.folder.hidden) || $.inArray('folder', this.env.listcols) < 0);
list.init_header();
}
};
// replace content of row count display
this.set_rowcount = function(text, mbox)
{
// #1487752
if (mbox && mbox != this.env.mailbox)
return false;
$(this.gui_objects.countdisplay).html(text);
// update page navigation buttons
this.set_page_buttons();
};
// replace content of mailboxname display
this.set_mailboxname = function(content)
{
if (this.gui_objects.mailboxname && content)
this.gui_objects.mailboxname.innerHTML = content;
};
// replace content of quota display
this.set_quota = function(content)
{
if (this.gui_objects.quotadisplay && content && content.type == 'text')
$(this.gui_objects.quotadisplay).text((content.percent||0) + '%').attr('title', content.title);
this.triggerEvent('setquota', content);
this.env.quota_content = content;
};
// update trash folder state
this.set_trash_count = function(count)
{
this[(count ? 'un' : '') + 'mark_folder'](this.env.trash_mailbox, 'empty', '', true);
};
// update the mailboxlist
this.set_unread_count = function(mbox, count, set_title, mark)
{
if (!this.gui_objects.mailboxlist)
return false;
this.env.unread_counts[mbox] = count;
this.set_unread_count_display(mbox, set_title);
if (mark)
this.mark_folder(mbox, mark, '', true);
else if (!count)
this.unmark_folder(mbox, 'recent', '', true);
};
// update the mailbox count display
this.set_unread_count_display = function(mbox, set_title)
{
var reg, link, text_obj, item, mycount, childcount, div;
if (item = this.get_folder_li(mbox, '', true)) {
mycount = this.env.unread_counts[mbox] ? this.env.unread_counts[mbox] : 0;
link = $(item).children('a').eq(0);
text_obj = link.children('span.unreadcount');
if (!text_obj.length && mycount)
text_obj = $('<span>').addClass('unreadcount').appendTo(link);
reg = /\s+\([0-9]+\)$/i;
childcount = 0;
if ((div = item.getElementsByTagName('div')[0]) &&
div.className.match(/collapsed/)) {
// add children's counters
for (var k in this.env.unread_counts)
if (k.startsWith(mbox + this.env.delimiter))
childcount += this.env.unread_counts[k];
}
if (mycount && text_obj.length)
text_obj.html(this.env.unreadwrap.replace(/%[sd]/, mycount));
else if (text_obj.length)
text_obj.remove();
// set parent's display
reg = new RegExp(RegExp.escape(this.env.delimiter) + '[^' + RegExp.escape(this.env.delimiter) + ']+$');
if (mbox.match(reg))
this.set_unread_count_display(mbox.replace(reg, ''), false);
// set the right classes
if ((mycount+childcount)>0)
$(item).addClass('unread');
else
$(item).removeClass('unread');
}
// set unread count to window title
reg = /^\([0-9]+\)\s+/i;
if (set_title && document.title) {
var new_title = '',
doc_title = String(document.title);
if (mycount && doc_title.match(reg))
new_title = doc_title.replace(reg, '('+mycount+') ');
else if (mycount)
new_title = '('+mycount+') '+doc_title;
else
new_title = doc_title.replace(reg, '');
this.set_pagetitle(new_title);
}
};
// display fetched raw headers
this.set_headers = function(content)
{
if (this.gui_objects.all_headers_row && this.gui_objects.all_headers_box && content)
$(this.gui_objects.all_headers_box).html(content).show();
};
// display all-headers row and fetch raw message headers
this.show_headers = function(props, elem)
{
if (!this.gui_objects.all_headers_row || !this.gui_objects.all_headers_box || !this.env.uid)
return;
$(elem).removeClass('show-headers').addClass('hide-headers');
$(this.gui_objects.all_headers_row).show();
elem.onclick = function() { ref.command('hide-headers', '', elem); };
// fetch headers only once
if (!this.gui_objects.all_headers_box.innerHTML) {
this.http_post('headers', {_uid: this.env.uid, _mbox: this.env.mailbox},
this.display_message(this.get_label('loading'), 'loading')
);
}
};
// hide all-headers row
this.hide_headers = function(props, elem)
{
if (!this.gui_objects.all_headers_row || !this.gui_objects.all_headers_box)
return;
$(elem).removeClass('hide-headers').addClass('show-headers');
$(this.gui_objects.all_headers_row).hide();
elem.onclick = function() { ref.command('show-headers', '', elem); };
};
// create folder selector popup, position and display it
this.folder_selector = function(event, callback)
{
var container = this.folder_selector_element;
if (!container) {
var rows = [],
delim = this.env.delimiter,
ul = $('<ul class="toolbarmenu">'),
link = document.createElement('a');
container = $('<div id="folder-selector" class="popupmenu"></div>');
link.href = '#';
link.className = 'icon';
// loop over sorted folders list
$.each(this.env.mailboxes_list, function() {
var n = 0, s = 0,
folder = ref.env.mailboxes[this],
id = folder.id,
a = $(link.cloneNode(false)),
row = $('<li>');
if (folder.virtual)
a.addClass('virtual').attr('aria-disabled', 'true').attr('tabindex', '-1');
else
a.addClass('active').data('id', folder.id);
if (folder['class'])
a.addClass(folder['class']);
// calculate/set indentation level
while ((s = id.indexOf(delim, s)) >= 0) {
n++; s++;
}
a.css('padding-left', n ? (n * 16) + 'px' : 0);
// add folder name element
a.append($('<span>').text(folder.name));
row.append(a);
rows.push(row);
});
ul.append(rows).appendTo(container);
// temporarily show element to calculate its size
container.css({left: '-1000px', top: '-1000px'})
.appendTo($('body')).show();
// set max-height if the list is long
if (rows.length > 10)
container.css('max-height', $('li', container)[0].offsetHeight * 10 + 9);
// register delegate event handler for folder item clicks
container.on('click', 'a.active', function(e){
container.data('callback')($(this).data('id'));
return false;
});
this.folder_selector_element = container;
}
container.data('callback', callback);
// position menu on the screen
this.show_menu('folder-selector', true, event);
};
/***********************************************/
/********* popup menu functions *********/
/***********************************************/
// Show/hide a specific popup menu
this.show_menu = function(prop, show, event)
{
var name = typeof prop == 'object' ? prop.menu : prop,
obj = $('#'+name),
ref = event && event.target ? $(event.target) : $(obj.attr('rel') || '#'+name+'link'),
keyboard = rcube_event.is_keyboard(event),
align = obj.attr('data-align') || '',
stack = false;
// find "real" button element
if (ref.get(0).tagName != 'A' && ref.closest('a').length)
ref = ref.closest('a');
if (typeof prop == 'string')
prop = { menu:name };
// let plugins or skins provide the menu element
if (!obj.length) {
obj = this.triggerEvent('menu-get', { name:name, props:prop, originalEvent:event });
}
if (!obj || !obj.length) {
// just delegate the action to subscribers
return this.triggerEvent(show === false ? 'menu-close' : 'menu-open', { name:name, props:prop, originalEvent:event });
}
// move element to top for proper absolute positioning
obj.appendTo(document.body);
if (typeof show == 'undefined')
show = obj.is(':visible') ? false : true;
if (show && ref.length) {
var win = $(window),
pos = ref.offset(),
above = align.indexOf('bottom') >= 0;
stack = ref.attr('role') == 'menuitem' || ref.closest('[role=menuitem]').length > 0;
ref.offsetWidth = ref.outerWidth();
ref.offsetHeight = ref.outerHeight();
if (!above && pos.top + ref.offsetHeight + obj.height() > win.height()) {
above = true;
}
if (align.indexOf('right') >= 0) {
pos.left = pos.left + ref.outerWidth() - obj.width();
}
else if (stack) {
pos.left = pos.left + ref.offsetWidth - 5;
pos.top -= ref.offsetHeight;
}
if (pos.left + obj.width() > win.width()) {
pos.left = win.width() - obj.width() - 12;
}
pos.top = Math.max(0, pos.top + (above ? -obj.height() : ref.offsetHeight));
obj.css({ left:pos.left+'px', top:pos.top+'px' });
}
// add menu to stack
if (show) {
// truncate stack down to the one containing the ref link
for (var i = this.menu_stack.length - 1; stack && i >= 0; i--) {
if (!$(ref).parents('#'+this.menu_stack[i]).length)
this.hide_menu(this.menu_stack[i], event);
}
if (stack && this.menu_stack.length) {
obj.data('parent', $.last(this.menu_stack));
obj.css('z-index', ($('#'+$.last(this.menu_stack)).css('z-index') || 0) + 1);
}
else if (!stack && this.menu_stack.length) {
this.hide_menu(this.menu_stack[0], event);
}
obj.show().attr('aria-hidden', 'false').data('opener', ref.attr('aria-expanded', 'true').get(0));
this.triggerEvent('menu-open', { name:name, obj:obj, props:prop, originalEvent:event });
this.menu_stack.push(name);
this.menu_keyboard_active = show && keyboard;
if (this.menu_keyboard_active) {
this.focused_menu = name;
obj.find('a,input:not(:disabled)').not('[aria-disabled=true]').first().focus();
}
}
else { // close menu
this.hide_menu(name, event);
}
return show;
};
// hide the given popup menu (and it's childs)
this.hide_menu = function(name, event)
{
if (!this.menu_stack.length) {
// delegate to subscribers
this.triggerEvent('menu-close', { name:name, props:{ menu:name }, originalEvent:event });
return;
}
var obj, keyboard = rcube_event.is_keyboard(event);
for (var j=this.menu_stack.length-1; j >= 0; j--) {
obj = $('#' + this.menu_stack[j]).hide().attr('aria-hidden', 'true').data('parent', false);
this.triggerEvent('menu-close', { name:this.menu_stack[j], obj:obj, props:{ menu:this.menu_stack[j] }, originalEvent:event });
if (this.menu_stack[j] == name) {
j = -1; // stop loop
if (obj.data('opener')) {
$(obj.data('opener')).attr('aria-expanded', 'false');
if (keyboard)
obj.data('opener').focus();
}
}
this.menu_stack.pop();
}
// focus previous menu in stack
if (this.menu_stack.length && keyboard) {
this.menu_keyboard_active = true;
this.focused_menu = $.last(this.menu_stack);
if (!obj || !obj.data('opener'))
$('#'+this.focused_menu).find('a,input:not(:disabled)').not('[aria-disabled=true]').first().focus();
}
else {
this.focused_menu = null;
this.menu_keyboard_active = false;
}
}
// position a menu element on the screen in relation to other object
this.element_position = function(element, obj)
{
var obj = $(obj), win = $(window),
width = obj.outerWidth(),
height = obj.outerHeight(),
menu_pos = obj.data('menu-pos'),
win_height = win.height(),
elem_height = $(element).height(),
elem_width = $(element).width(),
pos = obj.offset(),
top = pos.top,
left = pos.left + width;
if (menu_pos == 'bottom') {
top += height;
left -= width;
}
else
left -= 5;
if (top + elem_height > win_height) {
top -= elem_height - height;
if (top < 0)
top = Math.max(0, (win_height - elem_height) / 2);
}
if (left + elem_width > win.width())
left -= elem_width + width;
element.css({left: left + 'px', top: top + 'px'});
};
// initialize HTML editor
this.editor_init = function(config, id)
{
this.editor = new rcube_text_editor(config, id);
};
/********************************************************/
/********* html to text conversion functions *********/
/********************************************************/
this.html2plain = function(html, func)
{
return this.format_converter(html, 'html', func);
};
this.plain2html = function(plain, func)
{
return this.format_converter(plain, 'plain', func);
};
this.format_converter = function(text, format, func)
{
// warn the user (if converted content is not empty)
if (!text
|| (format == 'html' && !(text.replace(/<[^>]+>| |\xC2\xA0|\s/g, '')).length)
|| (format != 'html' && !(text.replace(/\xC2\xA0|\s/g, '')).length)
) {
// without setTimeout() here, textarea is filled with initial (onload) content
if (func)
setTimeout(function() { func(''); }, 50);
return true;
}
var confirmed = this.env.editor_warned || confirm(this.get_label('editorwarning'));
this.env.editor_warned = true;
if (!confirmed)
return false;
var url = '?_task=utils&_action=' + (format == 'html' ? 'html2text' : 'text2html'),
lock = this.set_busy(true, 'converting');
this.log('HTTP POST: ' + url);
$.ajax({ type: 'POST', url: url, data: text, contentType: 'application/octet-stream',
error: function(o, status, err) { ref.http_error(o, status, err, lock); },
success: function(data) {
ref.set_busy(false, null, lock);
if (func) func(data);
}
});
return true;
};
/********************************************************/
/********* remote request methods *********/
/********************************************************/
// compose a valid url with the given parameters
this.url = function(action, query)
{
var querystring = typeof query === 'string' ? '&' + query : '';
if (typeof action !== 'string')
query = action;
else if (!query || typeof query !== 'object')
query = {};
if (action)
query._action = action;
else if (this.env.action)
query._action = this.env.action;
var base = this.env.comm_path, k, param = {};
// overwrite task name
if (action && action.match(/([a-z0-9_-]+)\/([a-z0-9-_.]+)/)) {
query._action = RegExp.$2;
base = base.replace(/\_task=[a-z0-9_-]+/, '_task='+RegExp.$1);
}
// remove undefined values
for (k in query) {
if (query[k] !== undefined && query[k] !== null)
param[k] = query[k];
}
return base + (base.indexOf('?') > -1 ? '&' : '?') + $.param(param) + querystring;
};
this.redirect = function(url, lock)
{
if (lock || lock === null)
this.set_busy(true);
if (this.is_framed()) {
parent.rcmail.redirect(url, lock);
}
else {
if (this.env.extwin) {
if (typeof url == 'string')
url += (url.indexOf('?') < 0 ? '?' : '&') + '_extwin=1';
else
url._extwin = 1;
}
this.location_href(url, window);
}
};
this.goto_url = function(action, query, lock)
{
this.redirect(this.url(action, query), lock);
};
this.location_href = function(url, target, frame)
{
if (frame)
this.lock_frame();
if (typeof url == 'object')
url = this.env.comm_path + '&' + $.param(url);
// simulate real link click to force IE to send referer header
if (bw.ie && target == window)
$('<a>').attr('href', url).appendTo(document.body).get(0).click();
else
target.location.href = url;
// reset keep-alive interval
this.start_keepalive();
};
// update browser location to remember current view
this.update_state = function(query)
{
if (window.history.replaceState)
window.history.replaceState({}, document.title, rcmail.url('', query));
};
// send a http request to the server
this.http_request = function(action, data, lock)
{
if (typeof data !== 'object')
data = rcube_parse_query(data);
data._remote = 1;
data._unlock = lock ? lock : 0;
// trigger plugin hook
var result = this.triggerEvent('request' + action, data);
// abort if one of the handlers returned false
if (result === false) {
if (data._unlock)
this.set_busy(false, null, data._unlock);
return false;
}
else if (result !== undefined) {
data = result;
if (data._action) {
action = data._action;
delete data._action;
}
}
var url = this.url(action, data);
// send request
this.log('HTTP GET: ' + url);
// reset keep-alive interval
this.start_keepalive();
return $.ajax({
type: 'GET', url: url, dataType: 'json',
success: function(data) { ref.http_response(data); },
error: function(o, status, err) { ref.http_error(o, status, err, lock, action); }
});
};
// send a http POST request to the server
this.http_post = function(action, data, lock)
{
if (typeof data !== 'object')
data = rcube_parse_query(data);
data._remote = 1;
data._unlock = lock ? lock : 0;
// trigger plugin hook
var result = this.triggerEvent('request'+action, data);
// abort if one of the handlers returned false
if (result === false) {
if (data._unlock)
this.set_busy(false, null, data._unlock);
return false;
}
else if (result !== undefined) {
data = result;
if (data._action) {
action = data._action;
delete data._action;
}
}
var url = this.url(action);
// send request
this.log('HTTP POST: ' + url);
// reset keep-alive interval
this.start_keepalive();
return $.ajax({
type: 'POST', url: url, data: data, dataType: 'json',
success: function(data){ ref.http_response(data); },
error: function(o, status, err) { ref.http_error(o, status, err, lock, action); }
});
};
// aborts ajax request
this.abort_request = function(r)
{
if (r.request)
r.request.abort();
if (r.lock)
this.set_busy(false, null, r.lock);
};
// handle HTTP response
this.http_response = function(response)
{
if (!response)
return;
if (response.unlock)
this.set_busy(false);
this.triggerEvent('responsebefore', {response: response});
this.triggerEvent('responsebefore'+response.action, {response: response});
// set env vars
if (response.env)
this.set_env(response.env);
// we have labels to add
if (typeof response.texts === 'object') {
for (var name in response.texts)
if (typeof response.texts[name] === 'string')
this.add_label(name, response.texts[name]);
}
// if we get javascript code from server -> execute it
if (response.exec) {
this.log(response.exec);
eval(response.exec);
}
// execute callback functions of plugins
if (response.callbacks && response.callbacks.length) {
for (var i=0; i < response.callbacks.length; i++)
this.triggerEvent(response.callbacks[i][0], response.callbacks[i][1]);
}
// process the response data according to the sent action
switch (response.action) {
case 'delete':
if (this.task == 'addressbook') {
var sid, uid = this.contact_list.get_selection(), writable = false;
if (uid && this.contact_list.rows[uid]) {
// search results, get source ID from record ID
if (this.env.source == '') {
sid = String(uid).replace(/^[^-]+-/, '');
writable = sid && this.env.address_sources[sid] && !this.env.address_sources[sid].readonly;
}
else {
writable = !this.env.address_sources[this.env.source].readonly;
}
}
this.enable_command('compose', (uid && this.contact_list.rows[uid]));
this.enable_command('delete', 'edit', writable);
this.enable_command('export', (this.contact_list && this.contact_list.rowcount > 0));
this.enable_command('export-selected', 'print', false);
}
case 'move':
if (this.env.action == 'show') {
// re-enable commands on move/delete error
this.enable_command(this.env.message_commands, true);
if (!this.env.list_post)
this.enable_command('reply-list', false);
}
else if (this.task == 'addressbook') {
this.triggerEvent('listupdate', { folder:this.env.source, rowcount:this.contact_list.rowcount });
}
case 'purge':
case 'expunge':
if (this.task == 'mail') {
if (!this.env.exists) {
// clear preview pane content
if (this.env.contentframe)
this.show_contentframe(false);
// disable commands useless when mailbox is empty
this.enable_command(this.env.message_commands, 'purge', 'expunge',
'select-all', 'select-none', 'expand-all', 'expand-unread', 'collapse-all', false);
}
if (this.message_list)
this.triggerEvent('listupdate', { folder:this.env.mailbox, rowcount:this.message_list.rowcount });
}
break;
case 'refresh':
case 'check-recent':
// update message flags
$.each(this.env.recent_flags || {}, function(uid, flags) {
ref.set_message(uid, 'deleted', flags.deleted);
ref.set_message(uid, 'replied', flags.answered);
ref.set_message(uid, 'unread', !flags.seen);
ref.set_message(uid, 'forwarded', flags.forwarded);
ref.set_message(uid, 'flagged', flags.flagged);
});
delete this.env.recent_flags;
case 'getunread':
case 'search':
this.env.qsearch = null;
case 'list':
if (this.task == 'mail') {
var is_multifolder = this.is_multifolder_listing();
this.enable_command('show', 'select-all', 'select-none', this.env.messagecount > 0);
this.enable_command('expunge', this.env.exists && !is_multifolder);
this.enable_command('purge', this.purge_mailbox_test() && !is_multifolder);
this.enable_command('import-messages', !is_multifolder);
this.enable_command('expand-all', 'expand-unread', 'collapse-all', this.env.threading && this.env.messagecount && !is_multifolder);
if ((response.action == 'list' || response.action == 'search') && this.message_list) {
var list = this.message_list, uid = this.env.list_uid;
// highlight message row when we're back from message page
if (uid) {
if (!list.rows[uid])
uid += '-' + this.env.mailbox;
if (list.rows[uid]) {
list.select(uid);
}
delete this.env.list_uid;
}
this.enable_command('set-listmode', this.env.threads && !is_multifolder);
if (list.rowcount > 0)
list.focus();
this.msglist_select(list);
this.triggerEvent('listupdate', { folder:this.env.mailbox, rowcount:list.rowcount });
}
}
else if (this.task == 'addressbook') {
this.enable_command('export', (this.contact_list && this.contact_list.rowcount > 0));
if (response.action == 'list' || response.action == 'search') {
this.enable_command('search-create', this.env.source == '');
this.enable_command('search-delete', this.env.search_id);
this.update_group_commands();
if (this.contact_list.rowcount > 0)
this.contact_list.focus();
this.triggerEvent('listupdate', { folder:this.env.source, rowcount:this.contact_list.rowcount });
}
}
break;
case 'list-contacts':
case 'search-contacts':
if (this.contact_list && this.contact_list.rowcount > 0)
this.contact_list.focus();
break;
}
if (response.unlock)
this.hide_message(response.unlock);
this.triggerEvent('responseafter', {response: response});
this.triggerEvent('responseafter'+response.action, {response: response});
// reset keep-alive interval
this.start_keepalive();
};
// handle HTTP request errors
this.http_error = function(request, status, err, lock, action)
{
var errmsg = request.statusText;
this.set_busy(false, null, lock);
request.abort();
// don't display error message on page unload (#1488547)
if (this.unload)
return;
if (request.status && errmsg)
this.display_message(this.get_label('servererror') + ' (' + errmsg + ')', 'error');
else if (status == 'timeout')
this.display_message(this.get_label('requesttimedout'), 'error');
else if (request.status == 0 && status != 'abort')
this.display_message(this.get_label('connerror'), 'error');
// redirect to url specified in location header if not empty
var location_url = request.getResponseHeader("Location");
if (location_url && this.env.action != 'compose') // don't redirect on compose screen, contents might get lost (#1488926)
this.redirect(location_url);
// 403 Forbidden response (CSRF prevention) - reload the page.
// In case there's a new valid session it will be used, otherwise
// login form will be presented (#1488960).
if (request.status == 403) {
(this.is_framed() ? parent : window).location.reload();
return;
}
// re-send keep-alive requests after 30 seconds
if (action == 'keep-alive')
setTimeout(function(){ ref.keep_alive(); ref.start_keepalive(); }, 30000);
};
// handler for session errors detected on the server
this.session_error = function(redirect_url)
{
this.env.server_error = 401;
// save message in local storage and do not redirect
if (this.env.action == 'compose') {
this.save_compose_form_local();
this.compose_skip_unsavedcheck = true;
}
else if (redirect_url) {
setTimeout(function(){ ref.redirect(redirect_url, true); }, 2000);
}
};
// callback when an iframe finished loading
this.iframe_loaded = function(unlock)
{
this.set_busy(false, null, unlock);
if (this.submit_timer)
clearTimeout(this.submit_timer);
};
/**
Send multi-threaded parallel HTTP requests to the server for a list if items.
The string '%' in either a GET query or POST parameters will be replaced with the respective item value.
This is the argument object expected: {
items: ['foo','bar','gna'], // list of items to send requests for
action: 'task/some-action', // Roudncube action to call
query: { q:'%s' }, // GET query parameters
postdata: { source:'%s' }, // POST data (sends a POST request if present)
threads: 3, // max. number of concurrent requests
onresponse: function(data){ }, // Callback function called for every response received from server
whendone: function(alldata){ } // Callback function called when all requests have been sent
}
*/
this.multi_thread_http_request = function(prop)
{
var i, item, reqid = new Date().getTime(),
threads = prop.threads || 1;
prop.reqid = reqid;
prop.running = 0;
prop.requests = [];
prop.result = [];
prop._items = $.extend([], prop.items); // copy items
if (!prop.lock)
prop.lock = this.display_message(this.get_label('loading'), 'loading');
// add the request arguments to the jobs pool
this.http_request_jobs[reqid] = prop;
// start n threads
for (i=0; i < threads; i++) {
item = prop._items.shift();
if (item === undefined)
break;
prop.running++;
prop.requests.push(this.multi_thread_send_request(prop, item));
}
return reqid;
};
// helper method to send an HTTP request with the given iterator value
this.multi_thread_send_request = function(prop, item)
{
var k, postdata, query;
// replace %s in post data
if (prop.postdata) {
postdata = {};
for (k in prop.postdata) {
postdata[k] = String(prop.postdata[k]).replace('%s', item);
}
postdata._reqid = prop.reqid;
}
// replace %s in query
else if (typeof prop.query == 'string') {
query = prop.query.replace('%s', item);
query += '&_reqid=' + prop.reqid;
}
else if (typeof prop.query == 'object' && prop.query) {
query = {};
for (k in prop.query) {
query[k] = String(prop.query[k]).replace('%s', item);
}
query._reqid = prop.reqid;
}
// send HTTP GET or POST request
return postdata ? this.http_post(prop.action, postdata) : this.http_request(prop.action, query);
};
// callback function for multi-threaded http responses
this.multi_thread_http_response = function(data, reqid)
{
var prop = this.http_request_jobs[reqid];
if (!prop || prop.running <= 0 || prop.cancelled)
return;
prop.running--;
// trigger response callback
if (prop.onresponse && typeof prop.onresponse == 'function') {
prop.onresponse(data);
}
prop.result = $.extend(prop.result, data);
// send next request if prop.items is not yet empty
var item = prop._items.shift();
if (item !== undefined) {
prop.running++;
prop.requests.push(this.multi_thread_send_request(prop, item));
}
// trigger whendone callback and mark this request as done
else if (prop.running == 0) {
if (prop.whendone && typeof prop.whendone == 'function') {
prop.whendone(prop.result);
}
this.set_busy(false, '', prop.lock);
// remove from this.http_request_jobs pool
delete this.http_request_jobs[reqid];
}
};
// abort a running multi-thread request with the given identifier
this.multi_thread_request_abort = function(reqid)
{
var prop = this.http_request_jobs[reqid];
if (prop) {
for (var i=0; prop.running > 0 && i < prop.requests.length; i++) {
if (prop.requests[i].abort)
prop.requests[i].abort();
}
prop.running = 0;
prop.cancelled = true;
this.set_busy(false, '', prop.lock);
}
};
// post the given form to a hidden iframe
this.async_upload_form = function(form, action, onload)
{
// create hidden iframe
var ts = new Date().getTime(),
frame_name = 'rcmupload' + ts,
frame = this.async_upload_form_frame(frame_name);
// upload progress support
if (this.env.upload_progress_name) {
var fname = this.env.upload_progress_name,
field = $('input[name='+fname+']', form);
if (!field.length) {
field = $('<input>').attr({type: 'hidden', name: fname});
field.prependTo(form);
}
field.val(ts);
}
// handle upload errors by parsing iframe content in onload
frame.bind('load', {ts:ts}, onload);
$(form).attr({
target: frame_name,
action: this.url(action, {_id: this.env.compose_id || '', _uploadid: ts, _from: this.env.action}),
method: 'POST'})
.attr(form.encoding ? 'encoding' : 'enctype', 'multipart/form-data')
.submit();
return frame_name;
};
// create iframe element for files upload
this.async_upload_form_frame = function(name)
{
return $('<iframe>').attr({name: name, style: 'border: none; width: 0; height: 0; visibility: hidden'})
.appendTo(document.body);
};
// html5 file-drop API
this.document_drag_hover = function(e, over)
{
e.preventDefault();
$(this.gui_objects.filedrop)[(over?'addClass':'removeClass')]('active');
};
this.file_drag_hover = function(e, over)
{
e.preventDefault();
e.stopPropagation();
$(this.gui_objects.filedrop)[(over?'addClass':'removeClass')]('hover');
};
// handler when files are dropped to a designated area.
// compose a multipart form data and submit it to the server
this.file_dropped = function(e)
{
// abort event and reset UI
this.file_drag_hover(e, false);
// prepare multipart form data composition
var files = e.target.files || e.dataTransfer.files,
formdata = window.FormData ? new FormData() : null,
fieldname = (this.env.filedrop.fieldname || '_file') + (this.env.filedrop.single ? '' : '[]'),
boundary = '------multipartformboundary' + (new Date).getTime(),
dashdash = '--', crlf = '\r\n',
multipart = dashdash + boundary + crlf;
if (!files || !files.length)
return;
// inline function to submit the files to the server
var submit_data = function() {
var multiple = files.length > 1,
ts = new Date().getTime(),
content = '<span>' + (multiple ? ref.get_label('uploadingmany') : files[0].name) + '</span>';
// add to attachments list
if (!ref.add2attachment_list(ts, { name:'', html:content, classname:'uploading', complete:false }))
ref.file_upload_id = ref.set_busy(true, 'uploading');
// complete multipart content and post request
multipart += dashdash + boundary + dashdash + crlf;
$.ajax({
type: 'POST',
dataType: 'json',
url: ref.url(ref.env.filedrop.action || 'upload', {_id: ref.env.compose_id||ref.env.cid||'', _uploadid: ts, _remote: 1, _from: ref.env.action}),
contentType: formdata ? false : 'multipart/form-data; boundary=' + boundary,
processData: false,
timeout: 0, // disable default timeout set in ajaxSetup()
data: formdata || multipart,
headers: {'X-Roundcube-Request': ref.env.request_token},
xhr: function() { var xhr = jQuery.ajaxSettings.xhr(); if (!formdata && xhr.sendAsBinary) xhr.send = xhr.sendAsBinary; return xhr; },
success: function(data){ ref.http_response(data); },
error: function(o, status, err) { ref.http_error(o, status, err, null, 'attachment'); }
});
};
// get contents of all dropped files
var last = this.env.filedrop.single ? 0 : files.length - 1;
for (var j=0, i=0, f; j <= last && (f = files[i]); i++) {
if (!f.name) f.name = f.fileName;
if (!f.size) f.size = f.fileSize;
if (!f.type) f.type = 'application/octet-stream';
// file name contains non-ASCII characters, do UTF8-binary string conversion.
if (!formdata && /[^\x20-\x7E]/.test(f.name))
f.name_bin = unescape(encodeURIComponent(f.name));
// filter by file type if requested
if (this.env.filedrop.filter && !f.type.match(new RegExp(this.env.filedrop.filter))) {
// TODO: show message to user
continue;
}
// do it the easy way with FormData (FF 4+, Chrome 5+, Safari 5+)
if (formdata) {
formdata.append(fieldname, f);
if (j == last)
return submit_data();
}
// use FileReader supporetd by Firefox 3.6
else if (window.FileReader) {
var reader = new FileReader();
// closure to pass file properties to async callback function
reader.onload = (function(file, j) {
return function(e) {
multipart += 'Content-Disposition: form-data; name="' + fieldname + '"';
multipart += '; filename="' + (f.name_bin || file.name) + '"' + crlf;
multipart += 'Content-Length: ' + file.size + crlf;
multipart += 'Content-Type: ' + file.type + crlf + crlf;
multipart += reader.result + crlf;
multipart += dashdash + boundary + crlf;
if (j == last) // we're done, submit the data
return submit_data();
}
})(f,j);
reader.readAsBinaryString(f);
}
// Firefox 3
else if (f.getAsBinary) {
multipart += 'Content-Disposition: form-data; name="' + fieldname + '"';
multipart += '; filename="' + (f.name_bin || f.name) + '"' + crlf;
multipart += 'Content-Length: ' + f.size + crlf;
multipart += 'Content-Type: ' + f.type + crlf + crlf;
multipart += f.getAsBinary() + crlf;
multipart += dashdash + boundary +crlf;
if (j == last)
return submit_data();
}
j++;
}
};
// starts interval for keep-alive signal
this.start_keepalive = function()
{
if (!this.env.session_lifetime || this.env.framed || this.env.extwin || this.task == 'login' || this.env.action == 'print')
return;
if (this._keepalive)
clearInterval(this._keepalive);
this._keepalive = setInterval(function(){ ref.keep_alive(); }, this.env.session_lifetime * 0.5 * 1000);
};
// starts interval for refresh signal
this.start_refresh = function()
{
if (!this.env.refresh_interval || this.env.framed || this.env.extwin || this.task == 'login' || this.env.action == 'print')
return;
if (this._refresh)
clearInterval(this._refresh);
this._refresh = setInterval(function(){ ref.refresh(); }, this.env.refresh_interval * 1000);
};
// sends keep-alive signal
this.keep_alive = function()
{
if (!this.busy)
this.http_request('keep-alive');
};
// sends refresh signal
this.refresh = function()
{
if (this.busy) {
// try again after 10 seconds
setTimeout(function(){ ref.refresh(); ref.start_refresh(); }, 10000);
return;
}
var params = {}, lock = this.set_busy(true, 'refreshing');
if (this.task == 'mail' && this.gui_objects.mailboxlist)
params = this.check_recent_params();
params._last = Math.floor(this.env.lastrefresh.getTime() / 1000);
this.env.lastrefresh = new Date();
// plugins should bind to 'requestrefresh' event to add own params
this.http_post('refresh', params, lock);
};
// returns check-recent request parameters
this.check_recent_params = function()
{
var params = {_mbox: this.env.mailbox};
if (this.gui_objects.mailboxlist)
params._folderlist = 1;
if (this.gui_objects.quotadisplay)
params._quota = 1;
if (this.env.search_request)
params._search = this.env.search_request;
if (this.gui_objects.messagelist) {
params._list = 1;
// message uids for flag updates check
params._uids = $.map(this.message_list.rows, function(row, uid) { return uid; }).join(',');
}
return params;
};
/********************************************************/
/********* helper methods *********/
/********************************************************/
/**
* Quote html entities
*/
this.quote_html = function(str)
{
return String(str).replace(/</g, '<').replace(/>/g, '>').replace(/"/g, '"');
};
// get window.opener.rcmail if available
this.opener = function(deep, filter)
{
var i, win = window.opener;
// catch Error: Permission denied to access property rcmail
try {
if (win && !win.closed) {
// try parent of the opener window, e.g. preview frame
if (deep && (!win.rcmail || win.rcmail.env.framed) && win.parent && win.parent.rcmail)
win = win.parent;
if (win.rcmail && filter)
for (i in filter)
if (win.rcmail.env[i] != filter[i])
return;
return win.rcmail;
}
}
catch (e) {}
};
// check if we're in show mode or if we have a unique selection
// and return the message uid
this.get_single_uid = function()
{
var uid = this.env.uid || (this.message_list ? this.message_list.get_single_selection() : null);
var result = ref.triggerEvent('get_single_uid', { uid: uid });
return result || uid;
};
// same as above but for contacts
this.get_single_cid = function()
{
var cid = this.env.cid || (this.contact_list ? this.contact_list.get_single_selection() : null);
var result = ref.triggerEvent('get_single_cid', { cid: cid });
return result || cid;
};
// get the IMP mailbox of the message with the given UID
this.get_message_mailbox = function(uid)
{
var msg = this.env.messages ? this.env.messages[uid] : {};
return msg.mbox || this.env.mailbox;
};
// gets cursor position
this.get_caret_pos = function(obj)
{
if (obj.selectionEnd !== undefined)
return obj.selectionEnd;
return obj.value.length;
};
// moves cursor to specified position
this.set_caret_pos = function(obj, pos)
{
try {
if (obj.setSelectionRange)
obj.setSelectionRange(pos, pos);
}
catch(e) {} // catch Firefox exception if obj is hidden
};
// get selected text from an input field
this.get_input_selection = function(obj)
{
var start = 0, end = 0, normalizedValue = '';
if (typeof obj.selectionStart == "number" && typeof obj.selectionEnd == "number") {
normalizedValue = obj.value;
start = obj.selectionStart;
end = obj.selectionEnd;
}
return {start: start, end: end, text: normalizedValue.substr(start, end-start)};
};
// disable/enable all fields of a form
this.lock_form = function(form, lock)
{
if (!form || !form.elements)
return;
var n, len, elm;
if (lock)
this.disabled_form_elements = [];
for (n=0, len=form.elements.length; n<len; n++) {
elm = form.elements[n];
if (elm.type == 'hidden')
continue;
// remember which elem was disabled before lock
if (lock && elm.disabled)
this.disabled_form_elements.push(elm);
else if (lock || $.inArray(elm, this.disabled_form_elements) < 0)
elm.disabled = lock;
}
};
this.mailto_handler_uri = function()
{
return location.href.split('?')[0] + '?_task=mail&_action=compose&_to=%s';
};
this.register_protocol_handler = function(name)
{
try {
window.navigator.registerProtocolHandler('mailto', this.mailto_handler_uri(), name);
}
catch(e) {
this.display_message(String(e), 'error');
}
};
this.check_protocol_handler = function(name, elem)
{
var nav = window.navigator;
if (!nav || (typeof nav.registerProtocolHandler != 'function')) {
$(elem).addClass('disabled').click(function(){ return false; });
}
else if (typeof nav.isProtocolHandlerRegistered == 'function') {
var status = nav.isProtocolHandlerRegistered('mailto', this.mailto_handler_uri());
if (status)
$(elem).parent().find('.mailtoprotohandler-status').html(status);
}
else {
$(elem).click(function() { ref.register_protocol_handler(name); return false; });
}
};
// Checks browser capabilities eg. PDF support, TIF support
this.browser_capabilities_check = function()
{
if (!this.env.browser_capabilities)
this.env.browser_capabilities = {};
if (this.env.browser_capabilities.pdf === undefined)
this.env.browser_capabilities.pdf = this.pdf_support_check();
if (this.env.browser_capabilities.flash === undefined)
this.env.browser_capabilities.flash = this.flash_support_check();
if (this.env.browser_capabilities.tif === undefined)
this.tif_support_check();
};
// Returns browser capabilities string
this.browser_capabilities = function()
{
if (!this.env.browser_capabilities)
return '';
var n, ret = [];
for (n in this.env.browser_capabilities)
ret.push(n + '=' + this.env.browser_capabilities[n]);
return ret.join();
};
this.tif_support_check = function()
{
var img = new Image();
img.onload = function() { ref.env.browser_capabilities.tif = 1; };
img.onerror = function() { ref.env.browser_capabilities.tif = 0; };
img.src = this.assets_path('program/resources/blank.tif');
};
this.pdf_support_check = function()
{
var plugin = navigator.mimeTypes ? navigator.mimeTypes["application/pdf"] : {},
plugins = navigator.plugins,
len = plugins.length,
regex = /Adobe Reader|PDF|Acrobat/i;
if (plugin && plugin.enabledPlugin)
return 1;
if ('ActiveXObject' in window) {
try {
if (plugin = new ActiveXObject("AcroPDF.PDF"))
return 1;
}
catch (e) {}
try {
if (plugin = new ActiveXObject("PDF.PdfCtrl"))
return 1;
}
catch (e) {}
}
for (i=0; i<len; i++) {
plugin = plugins[i];
if (typeof plugin === 'String') {
if (regex.test(plugin))
return 1;
}
else if (plugin.name && regex.test(plugin.name))
return 1;
}
return 0;
};
this.flash_support_check = function()
{
var plugin = navigator.mimeTypes ? navigator.mimeTypes["application/x-shockwave-flash"] : {};
if (plugin && plugin.enabledPlugin)
return 1;
if ('ActiveXObject' in window) {
try {
if (plugin = new ActiveXObject("ShockwaveFlash.ShockwaveFlash"))
return 1;
}
catch (e) {}
}
return 0;
};
this.assets_path = function(path)
{
if (this.env.assets_path && !path.startsWith(this.env.assets_path)) {
path = this.env.assets_path + path;
}
return path;
};
// Cookie setter
this.set_cookie = function(name, value, expires)
{
setCookie(name, value, expires, this.env.cookie_path, this.env.cookie_domain, this.env.cookie_secure);
};
this.get_local_storage_prefix = function()
{
if (!this.local_storage_prefix)
this.local_storage_prefix = 'roundcube.' + (this.env.user_id || 'anonymous') + '.';
return this.local_storage_prefix;
};
// wrapper for localStorage.getItem(key)
this.local_storage_get_item = function(key, deflt, encrypted)
{
var item, result;
// TODO: add encryption
try {
item = localStorage.getItem(this.get_local_storage_prefix() + key);
result = JSON.parse(item);
}
catch (e) { }
return result || deflt || null;
};
// wrapper for localStorage.setItem(key, data)
this.local_storage_set_item = function(key, data, encrypted)
{
// try/catch to handle no localStorage support, but also error
// in Safari-in-private-browsing-mode where localStorage exists
// but can't be used (#1489996)
try {
// TODO: add encryption
localStorage.setItem(this.get_local_storage_prefix() + key, JSON.stringify(data));
return true;
}
catch (e) {
return false;
}
};
// wrapper for localStorage.removeItem(key)
this.local_storage_remove_item = function(key)
{
try {
localStorage.removeItem(this.get_local_storage_prefix() + key);
return true;
}
catch (e) {
return false;
}
};
this.print_dialog = function()
{
if (bw.safari)
setTimeout('window.print()', 10);
else
window.print();
};
} // end object rcube_webmail
// some static methods
rcube_webmail.long_subject_title = function(elem, indent)
{
if (!elem.title) {
var $elem = $(elem);
if ($elem.width() + (indent || 0) * 15 > $elem.parent().width())
elem.title = rcube_webmail.subject_text(elem);
}
};
rcube_webmail.long_subject_title_ex = function(elem)
{
if (!elem.title) {
var $elem = $(elem),
txt = $.trim($elem.text()),
tmp = $('<span>').text(txt)
.css({'position': 'absolute', 'float': 'left', 'visibility': 'hidden',
'font-size': $elem.css('font-size'), 'font-weight': $elem.css('font-weight')})
.appendTo($('body')),
w = tmp.width();
tmp.remove();
if (w + $('span.branch', $elem).width() * 15 > $elem.width())
elem.title = rcube_webmail.subject_text(elem);
}
};
rcube_webmail.subject_text = function(elem)
{
var t = $(elem).clone();
t.find('.skip-on-drag').remove();
return t.text();
};
rcube_webmail.prototype.get_cookie = getCookie;
// copy event engine prototype
rcube_webmail.prototype.addEventListener = rcube_event_engine.prototype.addEventListener;
rcube_webmail.prototype.removeEventListener = rcube_event_engine.prototype.removeEventListener;
rcube_webmail.prototype.triggerEvent = rcube_event_engine.prototype.triggerEvent;
| apache-2.0 |
ostap0207/remotify.me | remotify.common/src/main/java/messages/device/FileListMessage.java | 410 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package messages.device;
import entities.File;
/**
*
* @author Ostap
*/
public class FileListMessage extends CommandMessage{
//in
public String dirPath;
//out
public File[] files;
@Override
public int getMessageId() {
return FILELIST_COMMAND_MESSAGE;
}
}
| apache-2.0 |
chonton/apm-client | client/src/test/java/org/honton/chas/datadog/apm/cdi/TracerTest.java | 560 | package org.honton.chas.datadog.apm.cdi;
import org.honton.chas.datadog.apm.SpanBuilder;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TracerTest {
private TracerTestImpl tracer;
@Before
public void setupTracer() {
tracer = new TracerTestImpl();
}
@Test
public void testParentChild() {
SpanBuilder rootBuilder = tracer.createSpan();
Assert.assertNull(rootBuilder.parent());
SpanBuilder childBuilder = tracer.createSpan();
Assert.assertSame(rootBuilder, childBuilder.parent());
}
}
| apache-2.0 |
keepcosmos/beanmother | beanmother-core/src/main/java/io/beanmother/core/mapper/MapperMediator.java | 488 | package io.beanmother.core.mapper;
/**
* Mediator for {@link FixtureMapper}.
*
* Generally, FixtureMapper has a {@link FixtureConverter} and FixtureConverter has a FixtureMapper.
* MapperMediator solves circular reference problem between FixtureMapper and FixtureConverter.
*/
public interface MapperMediator {
/**
* Get FixtureMapper
*/
FixtureMapper getFixtureMapper();
/**
* Get FixtureConverter
*/
FixtureConverter getFixtureConverter();
}
| apache-2.0 |
ankurkotwal/making-apps-beautiful | XYZReader_CP1/src/main/java/com/example/xyzreader/cp1/ArticleListFragment.java | 9181 | /*
* Copyright 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.xyzreader.cp1;
import android.app.Activity;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.support.v4.app.ListFragment;
import android.support.v4.app.NotificationCompat;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import com.example.xyzreader.cp1.dummy.DummyContent;
/**
* A list fragment representing a list of Articles. This fragment also supports tablet devices by
* allowing list items to be given an 'activated' state upon selection. This helps indicate which
* item is currently being viewed in a {@link ArticleDetailFragment}. <p> Activities containing this
* fragment MUST implement the {@link Callbacks} interface.
*/
public class ArticleListFragment extends ListFragment {
/**
* The serialization (saved instance state) Bundle key representing the activated item position.
* Only used on tablets.
*/
private static final String STATE_ACTIVATED_POSITION = "activated_position";
/**
* The fragment's current callback object, which is notified of list item clicks.
*/
private Callbacks mCallbacks = sDummyCallbacks;
/**
* The current activated item position. Only used on tablets.
*/
private int mActivatedPosition = ListView.INVALID_POSITION;
/**
* A callback interface that all activities containing this fragment must implement. This
* mechanism allows activities to be notified of item selections.
*/
public interface Callbacks {
/**
* Callback for when an item has been selected.
*/
public void onItemSelected(String id);
}
/**
* A dummy implementation of the {@link Callbacks} interface that does nothing. Used only when
* this fragment is not attached to an activity.
*/
private static Callbacks sDummyCallbacks = new Callbacks() {
@Override
public void onItemSelected(String id) {
}
};
/**
* Mandatory empty constructor for the fragment manager to instantiate the fragment (e.g. upon
* screen orientation changes).
*/
public ArticleListFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setListAdapter(new MyAdapter());
setHasOptionsMenu(true);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
// Restore the previously serialized activated item position.
if (savedInstanceState != null
&& savedInstanceState.containsKey(STATE_ACTIVATED_POSITION)) {
setActivatedPosition(savedInstanceState
.getInt(STATE_ACTIVATED_POSITION));
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
// Activities containing this fragment must implement its callbacks.
if (!(activity instanceof Callbacks)) {
throw new IllegalStateException(
"Activity must implement fragment's callbacks.");
}
mCallbacks = (Callbacks) activity;
}
@Override
public void onDetach() {
super.onDetach();
// Reset the active callbacks interface to the dummy implementation.
mCallbacks = sDummyCallbacks;
}
@Override
public void onListItemClick(ListView listView, View view, int position,
long id) {
super.onListItemClick(listView, view, position, id);
// Notify the active callbacks interface (the activity, if the
// fragment is attached to one) that an item has been selected.
mCallbacks.onItemSelected(DummyContent.ITEMS.get(position).id);
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
if (mActivatedPosition != ListView.INVALID_POSITION) {
// Serialize and persist the activated item position.
outState.putInt(STATE_ACTIVATED_POSITION, mActivatedPosition);
}
}
/**
* Turns on activate-on-click mode. When this mode is on, list items will be given the
* 'activated' state when touched.
*/
public void setActivateOnItemClick(boolean activateOnItemClick) {
// When setting CHOICE_MODE_SINGLE, ListView will automatically
// give items the 'activated' state when touched.
getListView().setChoiceMode(
activateOnItemClick ? ListView.CHOICE_MODE_SINGLE
: ListView.CHOICE_MODE_NONE);
}
private void setActivatedPosition(int position) {
if (position == ListView.INVALID_POSITION) {
getListView().setItemChecked(mActivatedPosition, false);
} else {
getListView().setItemChecked(position, true);
}
mActivatedPosition = position;
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.main, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.refresh) {
DummyContent.DummyItem dummy = DummyContent.ITEMS.get(0);
BitmapFactory.Options options = new BitmapFactory.Options();
options.inSampleSize = 16;
Bitmap photo = BitmapFactory.decodeResource(getResources(), dummy.photoResId);
Notification n = new NotificationCompat.Builder(getActivity())
.setContentTitle("Article published")
.setSmallIcon(R.drawable.stat_icon)
.setDefaults(Notification.DEFAULT_SOUND)
.setLargeIcon(photo)
.setStyle(new NotificationCompat.BigPictureStyle()
.bigPicture(photo)
.setSummaryText(dummy.title))
.setContentText(dummy.title)
.setContentIntent(PendingIntent.getActivity(getActivity(), 0,
new Intent(getActivity(), getActivity().getClass()),
PendingIntent.FLAG_UPDATE_CURRENT))
.setAutoCancel(true)
.addAction(R.drawable.ic_share, "Share",
PendingIntent.getActivity(getActivity(), 0,
new Intent(getActivity(), getActivity().getClass()),
PendingIntent.FLAG_UPDATE_CURRENT))
.build();
NotificationManager nm = (NotificationManager) getActivity().getSystemService(Context.NOTIFICATION_SERVICE);
nm.notify(1, n);
return true;
}
return super.onOptionsItemSelected(item);
}
private class MyAdapter extends BaseAdapter {
@Override
public int getCount() {
return DummyContent.ITEMS.size();
}
@Override
public Object getItem(int position) {
return DummyContent.ITEMS.get(position);
}
@Override
public long getItemId(int position) {
return DummyContent.ITEMS.get(position).id.hashCode();
}
@Override
public View getView(int position, View convertView, ViewGroup container) {
if (convertView == null) {
convertView = LayoutInflater.from(getActivity())
.inflate(R.layout.list_item_article, container, false);
}
final DummyContent.DummyItem item = (DummyContent.DummyItem) getItem(position);
((TextView) convertView.findViewById(R.id.article_title)).setText(item.title);
((TextView) convertView.findViewById(R.id.article_subtitle)).setText(
item.time + " by " + item.author);
Bitmap b = BitmapFactory.decodeResource(getResources(), item.photoResId);
((ImageView) convertView.findViewById(R.id.thumbnail)).setImageBitmap(b);
return convertView;
}
}
}
| apache-2.0 |
dpflower/dptools | DP/Core/DP/Common/ByteHelper.cs | 2552 | using System;
using System.Collections.Generic;
using System.Text;
namespace DP.Common
{
public class ByteHelper
{
/// <summary>
/// 合并字节数组
/// Merges the specified source array.
/// </summary>
/// <param name="sourceArray">The source array.</param>
/// <param name="destinationArray">The destination array.</param>
public static void Merge(ref byte[] sourceArray, byte[] destinationArray)
{
if (destinationArray != null)
{
if (sourceArray == null)
{
sourceArray = new byte[destinationArray.Length];
sourceArray = destinationArray;
}
else
{
lock (sourceArray.SyncRoot)
{
Array.Resize(ref sourceArray, sourceArray.Length + destinationArray.Length);
Array.Copy(destinationArray, 0, sourceArray, sourceArray.Length - destinationArray.Length, destinationArray.Length);
}
}
}
}
/// <summary>
/// 移除字节数组前 指定长度字节数据。
/// Removes the specified received array.
/// </summary>
/// <param name="receivedArray">The received array.</param>
/// <param name="length">The length.</param>
public static void Remove(ref byte[] receivedArray, int length)
{
Remove(ref receivedArray, 0, length);
}
/// <summary>
/// 移除字节数组 指定 位置 指定长的字节数据。
/// Removes the specified received array.
/// </summary>
/// <param name="receivedArray">The received array.</param>
/// <param name="index">The index.</param>
/// <param name="length">The length.</param>
public static void Remove(ref byte[] receivedArray, int index, int length)
{
lock (receivedArray.SyncRoot)
{
if (receivedArray.Length >= length + index)
{
Array.Copy(receivedArray, length + index, receivedArray, index, receivedArray.Length - length - index);
Array.Resize(ref receivedArray, receivedArray.Length - length);
}
else if (receivedArray.Length >= index)
{
Array.Resize(ref receivedArray, index);
}
}
}
}
}
| apache-2.0 |
nus-ncl/services-in-one | common/src/main/java/sg/ncl/common/exception/base/ForbiddenException.java | 348 | package sg.ncl.common.exception.base;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ResponseStatus;
/**
* @author Te Ye
*/
@ResponseStatus(HttpStatus.FORBIDDEN)
public class ForbiddenException extends RuntimeException {
public ForbiddenException() {
super("Permission denied");
}
}
| apache-2.0 |
bavuongco10/react-native-school-board | js/components/detailsMiniPoster/styles.js | 956 | import React from 'react-native';
const { StyleSheet } = React;
export default StyleSheet.create ({
cardContainer: {
flex: 1,
position: 'absolute',
top: 200,
right: 16,
left: 16,
flexDirection: 'row'
},
cardImage: {
height: 184,
width: 135,
borderRadius: 3
},
cardDetails: {
paddingLeft: 10,
flex: 1,
paddingTop: 50
},
cardTitle: {
color: 'black',
fontSize: 19,
fontWeight: '500',
paddingTop: 10
},
cardTagline: {
color: 'black',
fontSize: 15
},
cardGenre: {
flexDirection: 'row'
},
cardGenreItem: {
textAlign: 'left',
fontSize: 11,
marginRight: 5,
color: 'black'
},
cardNumbers: {
flexDirection: 'row',
marginTop: 5
},
cardStar: {
flexDirection: 'row'
},
cardStarRatings: {
marginLeft: 5,
fontSize: 12,
color: 'white'
},
cardRunningHours: {
marginLeft: 5,
fontSize: 12
},
}); | apache-2.0 |
googleapis/google-auth-library-nodejs | samples/adc.js | 1232 | // Copyright 2018, Google, LLC.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
'use strict';
/**
* Import the GoogleAuth library, and create a new GoogleAuth client.
*/
const {GoogleAuth} = require('google-auth-library');
/**
* Acquire a client, and make a request to an API that's enabled by default.
*/
async function main() {
const auth = new GoogleAuth({
scopes: 'https://www.googleapis.com/auth/cloud-platform',
});
const client = await auth.getClient();
const projectId = await auth.getProjectId();
const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`;
const res = await client.request({url});
console.log('DNS Info:');
console.log(res.data);
}
main().catch(console.error);
| apache-2.0 |
AzuraCast/AzuraCast | src/Entity/Api/StationRemote.php | 1976 | <?php
declare(strict_types=1);
namespace App\Entity\Api;
use App\Entity\Api\Traits\HasLinks;
use App\Traits\LoadFromParentObject;
use OpenApi\Attributes as OA;
#[OA\Schema(
schema: 'Api_StationRemote',
type: 'object'
)]
class StationRemote
{
use LoadFromParentObject;
use HasLinks;
#[OA\Property]
public ?int $id = null;
#[OA\Property(example: '128kbps MP3')]
public ?string $display_name = null;
#[OA\Property(example: true)]
public bool $is_visible_on_public_pages = true;
#[OA\Property(example: 'icecast')]
public string $type;
#[OA\Property(example: 'true')]
public bool $is_editable = true;
#[OA\Property(example: false)]
public bool $enable_autodj = false;
#[OA\Property(example: 'mp3')]
public ?string $autodj_format = null;
#[OA\Property(example: 128)]
public ?int $autodj_bitrate = null;
#[OA\Property(example: 'https://custom-listen-url.example.com/stream.mp3')]
public ?string $custom_listen_url = null;
#[OA\Property(example: 'https://custom-url.example.com')]
public string $url = '';
#[OA\Property(example: '/stream.mp3')]
public ?string $mount = null;
#[OA\Property(example: 'password')]
public ?string $admin_password = null;
#[OA\Property(example: 8000)]
public ?int $source_port = null;
#[OA\Property(example: '/')]
public ?string $source_mount = null;
#[OA\Property(example: 'source')]
public ?string $source_username = null;
#[OA\Property(example: 'password')]
public ?string $source_password = null;
#[OA\Property(example: false)]
public bool $is_public = false;
#[OA\Property(
description: 'The most recent number of unique listeners.',
example: 10
)]
public int $listeners_unique = 0;
#[OA\Property(
description: 'The most recent number of total (non-unique) listeners.',
example: 12
)]
public int $listeners_total = 0;
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-pinpointemail/src/main/java/com/amazonaws/services/pinpointemail/model/MailFromAttributes.java | 21772 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.pinpointemail.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* A list of attributes that are associated with a MAIL FROM domain.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/pinpoint-email-2018-07-26/MailFromAttributes" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class MailFromAttributes implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The name of a domain that an email identity uses as a custom MAIL FROM domain.
* </p>
*/
private String mailFromDomain;
/**
* <p>
* The status of the MAIL FROM domain. This status can have the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>PENDING</code> – Amazon Pinpoint hasn't started searching for the MX record yet.
* </p>
* </li>
* <li>
* <p>
* <code>SUCCESS</code> – Amazon Pinpoint detected the required MX record for the MAIL FROM domain.
* </p>
* </li>
* <li>
* <p>
* <code>FAILED</code> – Amazon Pinpoint can't find the required MX record, or the record no longer exists.
* </p>
* </li>
* <li>
* <p>
* <code>TEMPORARY_FAILURE</code> – A temporary issue occurred, which prevented Amazon Pinpoint from determining the
* status of the MAIL FROM domain.
* </p>
* </li>
* </ul>
*/
private String mailFromDomainStatus;
/**
* <p>
* The action that Amazon Pinpoint to takes if it can't read the required MX record for a custom MAIL FROM domain.
* When you set this value to <code>UseDefaultValue</code>, Amazon Pinpoint uses <i>amazonses.com</i> as the MAIL
* FROM domain. When you set this value to <code>RejectMessage</code>, Amazon Pinpoint returns a
* <code>MailFromDomainNotVerified</code> error, and doesn't attempt to deliver the email.
* </p>
* <p>
* These behaviors are taken when the custom MAIL FROM domain configuration is in the <code>Pending</code>,
* <code>Failed</code>, and <code>TemporaryFailure</code> states.
* </p>
*/
private String behaviorOnMxFailure;
/**
* <p>
* The name of a domain that an email identity uses as a custom MAIL FROM domain.
* </p>
*
* @param mailFromDomain
* The name of a domain that an email identity uses as a custom MAIL FROM domain.
*/
public void setMailFromDomain(String mailFromDomain) {
this.mailFromDomain = mailFromDomain;
}
/**
* <p>
* The name of a domain that an email identity uses as a custom MAIL FROM domain.
* </p>
*
* @return The name of a domain that an email identity uses as a custom MAIL FROM domain.
*/
public String getMailFromDomain() {
return this.mailFromDomain;
}
/**
* <p>
* The name of a domain that an email identity uses as a custom MAIL FROM domain.
* </p>
*
* @param mailFromDomain
* The name of a domain that an email identity uses as a custom MAIL FROM domain.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public MailFromAttributes withMailFromDomain(String mailFromDomain) {
setMailFromDomain(mailFromDomain);
return this;
}
/**
* <p>
* The status of the MAIL FROM domain. This status can have the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>PENDING</code> – Amazon Pinpoint hasn't started searching for the MX record yet.
* </p>
* </li>
* <li>
* <p>
* <code>SUCCESS</code> – Amazon Pinpoint detected the required MX record for the MAIL FROM domain.
* </p>
* </li>
* <li>
* <p>
* <code>FAILED</code> – Amazon Pinpoint can't find the required MX record, or the record no longer exists.
* </p>
* </li>
* <li>
* <p>
* <code>TEMPORARY_FAILURE</code> – A temporary issue occurred, which prevented Amazon Pinpoint from determining the
* status of the MAIL FROM domain.
* </p>
* </li>
* </ul>
*
* @param mailFromDomainStatus
* The status of the MAIL FROM domain. This status can have the following values:</p>
* <ul>
* <li>
* <p>
* <code>PENDING</code> – Amazon Pinpoint hasn't started searching for the MX record yet.
* </p>
* </li>
* <li>
* <p>
* <code>SUCCESS</code> – Amazon Pinpoint detected the required MX record for the MAIL FROM domain.
* </p>
* </li>
* <li>
* <p>
* <code>FAILED</code> – Amazon Pinpoint can't find the required MX record, or the record no longer exists.
* </p>
* </li>
* <li>
* <p>
* <code>TEMPORARY_FAILURE</code> – A temporary issue occurred, which prevented Amazon Pinpoint from
* determining the status of the MAIL FROM domain.
* </p>
* </li>
* @see MailFromDomainStatus
*/
public void setMailFromDomainStatus(String mailFromDomainStatus) {
this.mailFromDomainStatus = mailFromDomainStatus;
}
/**
* <p>
* The status of the MAIL FROM domain. This status can have the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>PENDING</code> – Amazon Pinpoint hasn't started searching for the MX record yet.
* </p>
* </li>
* <li>
* <p>
* <code>SUCCESS</code> – Amazon Pinpoint detected the required MX record for the MAIL FROM domain.
* </p>
* </li>
* <li>
* <p>
* <code>FAILED</code> – Amazon Pinpoint can't find the required MX record, or the record no longer exists.
* </p>
* </li>
* <li>
* <p>
* <code>TEMPORARY_FAILURE</code> – A temporary issue occurred, which prevented Amazon Pinpoint from determining the
* status of the MAIL FROM domain.
* </p>
* </li>
* </ul>
*
* @return The status of the MAIL FROM domain. This status can have the following values:</p>
* <ul>
* <li>
* <p>
* <code>PENDING</code> – Amazon Pinpoint hasn't started searching for the MX record yet.
* </p>
* </li>
* <li>
* <p>
* <code>SUCCESS</code> – Amazon Pinpoint detected the required MX record for the MAIL FROM domain.
* </p>
* </li>
* <li>
* <p>
* <code>FAILED</code> – Amazon Pinpoint can't find the required MX record, or the record no longer exists.
* </p>
* </li>
* <li>
* <p>
* <code>TEMPORARY_FAILURE</code> – A temporary issue occurred, which prevented Amazon Pinpoint from
* determining the status of the MAIL FROM domain.
* </p>
* </li>
* @see MailFromDomainStatus
*/
public String getMailFromDomainStatus() {
return this.mailFromDomainStatus;
}
/**
* <p>
* The status of the MAIL FROM domain. This status can have the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>PENDING</code> – Amazon Pinpoint hasn't started searching for the MX record yet.
* </p>
* </li>
* <li>
* <p>
* <code>SUCCESS</code> – Amazon Pinpoint detected the required MX record for the MAIL FROM domain.
* </p>
* </li>
* <li>
* <p>
* <code>FAILED</code> – Amazon Pinpoint can't find the required MX record, or the record no longer exists.
* </p>
* </li>
* <li>
* <p>
* <code>TEMPORARY_FAILURE</code> – A temporary issue occurred, which prevented Amazon Pinpoint from determining the
* status of the MAIL FROM domain.
* </p>
* </li>
* </ul>
*
* @param mailFromDomainStatus
* The status of the MAIL FROM domain. This status can have the following values:</p>
* <ul>
* <li>
* <p>
* <code>PENDING</code> – Amazon Pinpoint hasn't started searching for the MX record yet.
* </p>
* </li>
* <li>
* <p>
* <code>SUCCESS</code> – Amazon Pinpoint detected the required MX record for the MAIL FROM domain.
* </p>
* </li>
* <li>
* <p>
* <code>FAILED</code> – Amazon Pinpoint can't find the required MX record, or the record no longer exists.
* </p>
* </li>
* <li>
* <p>
* <code>TEMPORARY_FAILURE</code> – A temporary issue occurred, which prevented Amazon Pinpoint from
* determining the status of the MAIL FROM domain.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see MailFromDomainStatus
*/
public MailFromAttributes withMailFromDomainStatus(String mailFromDomainStatus) {
setMailFromDomainStatus(mailFromDomainStatus);
return this;
}
/**
* <p>
* The status of the MAIL FROM domain. This status can have the following values:
* </p>
* <ul>
* <li>
* <p>
* <code>PENDING</code> – Amazon Pinpoint hasn't started searching for the MX record yet.
* </p>
* </li>
* <li>
* <p>
* <code>SUCCESS</code> – Amazon Pinpoint detected the required MX record for the MAIL FROM domain.
* </p>
* </li>
* <li>
* <p>
* <code>FAILED</code> – Amazon Pinpoint can't find the required MX record, or the record no longer exists.
* </p>
* </li>
* <li>
* <p>
* <code>TEMPORARY_FAILURE</code> – A temporary issue occurred, which prevented Amazon Pinpoint from determining the
* status of the MAIL FROM domain.
* </p>
* </li>
* </ul>
*
* @param mailFromDomainStatus
* The status of the MAIL FROM domain. This status can have the following values:</p>
* <ul>
* <li>
* <p>
* <code>PENDING</code> – Amazon Pinpoint hasn't started searching for the MX record yet.
* </p>
* </li>
* <li>
* <p>
* <code>SUCCESS</code> – Amazon Pinpoint detected the required MX record for the MAIL FROM domain.
* </p>
* </li>
* <li>
* <p>
* <code>FAILED</code> – Amazon Pinpoint can't find the required MX record, or the record no longer exists.
* </p>
* </li>
* <li>
* <p>
* <code>TEMPORARY_FAILURE</code> – A temporary issue occurred, which prevented Amazon Pinpoint from
* determining the status of the MAIL FROM domain.
* </p>
* </li>
* @return Returns a reference to this object so that method calls can be chained together.
* @see MailFromDomainStatus
*/
public MailFromAttributes withMailFromDomainStatus(MailFromDomainStatus mailFromDomainStatus) {
this.mailFromDomainStatus = mailFromDomainStatus.toString();
return this;
}
/**
* <p>
* The action that Amazon Pinpoint to takes if it can't read the required MX record for a custom MAIL FROM domain.
* When you set this value to <code>UseDefaultValue</code>, Amazon Pinpoint uses <i>amazonses.com</i> as the MAIL
* FROM domain. When you set this value to <code>RejectMessage</code>, Amazon Pinpoint returns a
* <code>MailFromDomainNotVerified</code> error, and doesn't attempt to deliver the email.
* </p>
* <p>
* These behaviors are taken when the custom MAIL FROM domain configuration is in the <code>Pending</code>,
* <code>Failed</code>, and <code>TemporaryFailure</code> states.
* </p>
*
* @param behaviorOnMxFailure
* The action that Amazon Pinpoint to takes if it can't read the required MX record for a custom MAIL FROM
* domain. When you set this value to <code>UseDefaultValue</code>, Amazon Pinpoint uses <i>amazonses.com</i>
* as the MAIL FROM domain. When you set this value to <code>RejectMessage</code>, Amazon Pinpoint returns a
* <code>MailFromDomainNotVerified</code> error, and doesn't attempt to deliver the email.</p>
* <p>
* These behaviors are taken when the custom MAIL FROM domain configuration is in the <code>Pending</code>,
* <code>Failed</code>, and <code>TemporaryFailure</code> states.
* @see BehaviorOnMxFailure
*/
public void setBehaviorOnMxFailure(String behaviorOnMxFailure) {
this.behaviorOnMxFailure = behaviorOnMxFailure;
}
/**
* <p>
* The action that Amazon Pinpoint to takes if it can't read the required MX record for a custom MAIL FROM domain.
* When you set this value to <code>UseDefaultValue</code>, Amazon Pinpoint uses <i>amazonses.com</i> as the MAIL
* FROM domain. When you set this value to <code>RejectMessage</code>, Amazon Pinpoint returns a
* <code>MailFromDomainNotVerified</code> error, and doesn't attempt to deliver the email.
* </p>
* <p>
* These behaviors are taken when the custom MAIL FROM domain configuration is in the <code>Pending</code>,
* <code>Failed</code>, and <code>TemporaryFailure</code> states.
* </p>
*
* @return The action that Amazon Pinpoint to takes if it can't read the required MX record for a custom MAIL FROM
* domain. When you set this value to <code>UseDefaultValue</code>, Amazon Pinpoint uses
* <i>amazonses.com</i> as the MAIL FROM domain. When you set this value to <code>RejectMessage</code>,
* Amazon Pinpoint returns a <code>MailFromDomainNotVerified</code> error, and doesn't attempt to deliver
* the email.</p>
* <p>
* These behaviors are taken when the custom MAIL FROM domain configuration is in the <code>Pending</code>,
* <code>Failed</code>, and <code>TemporaryFailure</code> states.
* @see BehaviorOnMxFailure
*/
public String getBehaviorOnMxFailure() {
return this.behaviorOnMxFailure;
}
/**
* <p>
* The action that Amazon Pinpoint to takes if it can't read the required MX record for a custom MAIL FROM domain.
* When you set this value to <code>UseDefaultValue</code>, Amazon Pinpoint uses <i>amazonses.com</i> as the MAIL
* FROM domain. When you set this value to <code>RejectMessage</code>, Amazon Pinpoint returns a
* <code>MailFromDomainNotVerified</code> error, and doesn't attempt to deliver the email.
* </p>
* <p>
* These behaviors are taken when the custom MAIL FROM domain configuration is in the <code>Pending</code>,
* <code>Failed</code>, and <code>TemporaryFailure</code> states.
* </p>
*
* @param behaviorOnMxFailure
* The action that Amazon Pinpoint to takes if it can't read the required MX record for a custom MAIL FROM
* domain. When you set this value to <code>UseDefaultValue</code>, Amazon Pinpoint uses <i>amazonses.com</i>
* as the MAIL FROM domain. When you set this value to <code>RejectMessage</code>, Amazon Pinpoint returns a
* <code>MailFromDomainNotVerified</code> error, and doesn't attempt to deliver the email.</p>
* <p>
* These behaviors are taken when the custom MAIL FROM domain configuration is in the <code>Pending</code>,
* <code>Failed</code>, and <code>TemporaryFailure</code> states.
* @return Returns a reference to this object so that method calls can be chained together.
* @see BehaviorOnMxFailure
*/
public MailFromAttributes withBehaviorOnMxFailure(String behaviorOnMxFailure) {
setBehaviorOnMxFailure(behaviorOnMxFailure);
return this;
}
/**
* <p>
* The action that Amazon Pinpoint to takes if it can't read the required MX record for a custom MAIL FROM domain.
* When you set this value to <code>UseDefaultValue</code>, Amazon Pinpoint uses <i>amazonses.com</i> as the MAIL
* FROM domain. When you set this value to <code>RejectMessage</code>, Amazon Pinpoint returns a
* <code>MailFromDomainNotVerified</code> error, and doesn't attempt to deliver the email.
* </p>
* <p>
* These behaviors are taken when the custom MAIL FROM domain configuration is in the <code>Pending</code>,
* <code>Failed</code>, and <code>TemporaryFailure</code> states.
* </p>
*
* @param behaviorOnMxFailure
* The action that Amazon Pinpoint to takes if it can't read the required MX record for a custom MAIL FROM
* domain. When you set this value to <code>UseDefaultValue</code>, Amazon Pinpoint uses <i>amazonses.com</i>
* as the MAIL FROM domain. When you set this value to <code>RejectMessage</code>, Amazon Pinpoint returns a
* <code>MailFromDomainNotVerified</code> error, and doesn't attempt to deliver the email.</p>
* <p>
* These behaviors are taken when the custom MAIL FROM domain configuration is in the <code>Pending</code>,
* <code>Failed</code>, and <code>TemporaryFailure</code> states.
* @return Returns a reference to this object so that method calls can be chained together.
* @see BehaviorOnMxFailure
*/
public MailFromAttributes withBehaviorOnMxFailure(BehaviorOnMxFailure behaviorOnMxFailure) {
this.behaviorOnMxFailure = behaviorOnMxFailure.toString();
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getMailFromDomain() != null)
sb.append("MailFromDomain: ").append(getMailFromDomain()).append(",");
if (getMailFromDomainStatus() != null)
sb.append("MailFromDomainStatus: ").append(getMailFromDomainStatus()).append(",");
if (getBehaviorOnMxFailure() != null)
sb.append("BehaviorOnMxFailure: ").append(getBehaviorOnMxFailure());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof MailFromAttributes == false)
return false;
MailFromAttributes other = (MailFromAttributes) obj;
if (other.getMailFromDomain() == null ^ this.getMailFromDomain() == null)
return false;
if (other.getMailFromDomain() != null && other.getMailFromDomain().equals(this.getMailFromDomain()) == false)
return false;
if (other.getMailFromDomainStatus() == null ^ this.getMailFromDomainStatus() == null)
return false;
if (other.getMailFromDomainStatus() != null && other.getMailFromDomainStatus().equals(this.getMailFromDomainStatus()) == false)
return false;
if (other.getBehaviorOnMxFailure() == null ^ this.getBehaviorOnMxFailure() == null)
return false;
if (other.getBehaviorOnMxFailure() != null && other.getBehaviorOnMxFailure().equals(this.getBehaviorOnMxFailure()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getMailFromDomain() == null) ? 0 : getMailFromDomain().hashCode());
hashCode = prime * hashCode + ((getMailFromDomainStatus() == null) ? 0 : getMailFromDomainStatus().hashCode());
hashCode = prime * hashCode + ((getBehaviorOnMxFailure() == null) ? 0 : getBehaviorOnMxFailure().hashCode());
return hashCode;
}
@Override
public MailFromAttributes clone() {
try {
return (MailFromAttributes) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.pinpointemail.model.transform.MailFromAttributesMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| apache-2.0 |
erdi/grails-core | grails-hibernate/src/main/groovy/org/codehaus/groovy/grails/orm/hibernate/support/HibernateDialectDetectorFactoryBean.java | 3873 | /*
* Copyright 2004-2005 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.groovy.grails.orm.hibernate.support;
import groovy.util.ConfigObject;
import org.codehaus.groovy.grails.commons.GrailsApplication;
import org.codehaus.groovy.grails.orm.hibernate.exceptions.CouldNotDetermineHibernateDialectException;
import org.codehaus.groovy.grails.plugins.support.aware.GrailsApplicationAware;
import org.hibernate.HibernateException;
import org.hibernate.dialect.Dialect;
import org.hibernate.dialect.resolver.DialectFactory;
import org.springframework.beans.factory.FactoryBean;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.jdbc.datasource.DataSourceUtils;
import org.springframework.jdbc.support.JdbcUtils;
import org.springframework.jdbc.support.MetaDataAccessException;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
import javax.sql.DataSource;
import java.sql.Connection;
import java.util.Properties;
/**
* @author Steven Devijver
*/
public class HibernateDialectDetectorFactoryBean implements FactoryBean<String>, InitializingBean, GrailsApplicationAware {
private DataSource dataSource;
private Properties vendorNameDialectMappings;
private String hibernateDialectClassName;
private Dialect hibernateDialect;
private GrailsApplication grailsApplication;
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
}
public void setVendorNameDialectMappings(Properties mappings) {
vendorNameDialectMappings = mappings;
}
public String getObject() {
return hibernateDialectClassName;
}
public Class<String> getObjectType() {
return String.class;
}
public boolean isSingleton() {
return true;
}
public void afterPropertiesSet() throws MetaDataAccessException {
Assert.notNull(dataSource, "Data source is not set!");
Assert.notNull(vendorNameDialectMappings, "Vendor name/dialect mappings are not set!");
Connection connection = null;
String dbName = (String)JdbcUtils.extractDatabaseMetaData(dataSource, "getDatabaseProductName");
try {
connection = DataSourceUtils.getConnection(dataSource);
try {
ConfigObject config = grailsApplication != null ? grailsApplication.getConfig() : null;
Properties properties = config != null ? config.toProperties() : new Properties();
hibernateDialect = DialectFactory.buildDialect(properties, connection);
hibernateDialectClassName = hibernateDialect.getClass().getName();
} catch (HibernateException e) {
hibernateDialectClassName = vendorNameDialectMappings.getProperty(dbName);
}
if (!StringUtils.hasText(hibernateDialectClassName)) {
throw new CouldNotDetermineHibernateDialectException(
"Could not determine Hibernate dialect for database name [" + dbName + "]!");
}
} finally {
DataSourceUtils.releaseConnection(connection,dataSource);
}
}
public void setGrailsApplication(GrailsApplication grailsApplication) {
this.grailsApplication = grailsApplication;
}
}
| apache-2.0 |
WestCoastInformatics/OTF-Mapping-Service | webapp/src/main/webapp/js/widgets/projectDetails/projectDetails.js | 59010 | 'use strict';
angular.module('mapProjectApp.widgets.projectDetails', [ 'adf.provider' ]).config(
function(dashboardProvider) {
dashboardProvider.widget('projectDetails', {
title : 'Project Details',
description : 'Displays details for a specific map project.',
templateUrl : 'js/widgets/projectDetails/projectDetails.html',
controller : 'projectDetailsCtrl',
resolve : {
data : function(projectDetailsService, config) {
if (!config.terminology) {
return 'SNOMEDCT';
}
return config.terminology;
}
},
edit : {}
});
}).service('projectDetailsService', function($q, $http) {
return {
get : function(terminology) {
var deferred = $q.defer();
$http({
url : root_metadata + 'metadata/terminology/id/' + terminology,
dataType : 'json',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
if (data) {
data.terminology = terminology;
deferred.resolve(data);
} else {
deferred.reject();
}
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
deferred.reject();
});
return deferred.promise;
}
};
})
// filter out users/entities who are already on the selected list
// since this is used to return list of potential entities to add for
// selection
.filter('elementFilter', function() {
return function(elementList, elementsToCheck) {
var out = new Array();
if (elementList == undefined || elementsToCheck == undefined)
return out;
for (var i = 0; i < elementList.length; i++) {
var found = false;
for (var j = 0; j < elementsToCheck.length; j++) {
if (elementList[i].name === elementsToCheck[j].name) {
found = true;
break;
}
}
if (found == false)
out.push(elementList[i]);
}
return out;
};
})
.controller(
'projectDetailsCtrl',
[
'$scope',
'$http',
'$sce',
'$rootScope',
'$location',
'localStorageService',
'$q',
function($scope, $http, $sce, $rootScope, $location, localStorageService, $q) {
$scope.page = 'project';
$scope.currentRole = localStorageService.get('currentRole');
$scope.currentUser = localStorageService.get('currentUser');
$scope.focusProject = localStorageService.get('focusProject');
$scope.mapProjects = localStorageService.get('mapProjects');
$scope.mapUsers = localStorageService.get('mapUsers');
$scope.focusProjectBeforeChanges = {};
$scope.focusProjectBeforeChanges = angular.copy($scope.focusProject);
$scope.editModeEnabled = false;
$scope.reportDefinitions = new Array();
$scope.qaCheckDefinitions = new Array();
$scope.allowableMapTypes = [ {
displayName : 'Extended Map',
name : 'ExtendedMap'
}, {
displayName : 'Complex Map',
name : 'ComplexMap'
}, {
displayName : 'Simple Map',
name : 'SimpleMap'
} ];
$scope.allowableMapRelationStyles = [ {
displayName : 'Map Category Style',
name : 'MAP_CATEGORY_STYLE'
}, {
displayName : 'Relationship Style',
name : 'RELATIONSHIP_STYLE'
} ];
$scope.allowableWorkflowTypes = [ {
displayName : 'Conflict workflow',
name : 'CONFLICT_PROJECT'
}, {
displayName : 'Review workflow',
name : 'REVIEW_PROJECT'
}, {
displayName : 'Simple workflow',
name : 'SIMPLE_PATH'
}, {
displayName : 'Legacy workflow',
name : 'LEGACY_PATH'
} ];
// watch for focus project change
$scope.$on('localStorageModule.notification.setFocusProject', function(event, parameters) {
$scope.focusProject = parameters.focusProject;
});
$scope.userToken = localStorageService.get('userToken');
$scope.$watch([ 'focusProject', 'userToken' ], function() {
if ($scope.focusProject != null && $scope.userToken != null) {
// n/a
}
$http.defaults.headers.common.Authorization = $scope.userToken;
$scope.go();
});
$scope.go = function() {
console.debug('Formatting project details');
$http({
url : root_mapping + 'advice/advices',
dataType : 'json',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
console.debug(' advices = ' + data);
$scope.mapAdvices = data.mapAdvice;
localStorageService.add('mapAdvices', data.mapAdvice);
$rootScope.$broadcast('localStorageModule.notification.setMapAdvices', {
key : 'mapAdvices',
mapAdvices : data.mapAdvices
});
$scope.allowableMapAdvices = localStorageService.get('mapAdvices');
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
$http({
url : root_mapping + 'relation/relations',
dataType : 'json',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
$scope.mapRelations = data.mapRelation;
localStorageService.add('mapRelations', data.mapRelation);
$rootScope.$broadcast('localStorageModule.notification.setMapRelations', {
key : 'mapRelations',
mapRelations : data.mapRelations
});
$scope.allowableMapRelations = localStorageService.get('mapRelations');
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
$http({
url : root_mapping + 'principle/principles',
dataType : 'json',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
$scope.mapPrinciples = data.mapPrinciple;
localStorageService.add('mapPrinciples', data.mapPrinciple);
$rootScope.$broadcast('localStorageModule.notification.setMapPrinciples', {
key : 'mapPrinciples',
mapPrinciples : data.mapPrinciples
});
$scope.allowableMapPrinciples = localStorageService.get('mapPrinciples');
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
$http({
url : root_mapping + 'ageRange/ageRanges',
dataType : 'json',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
$scope.mapAgeRanges = data.mapAgeRange;
localStorageService.add('mapAgeRanges', data.mapAgeRange);
$rootScope.$broadcast('localStorageModule.notification.setMapAgeRanges', {
key : 'mapAgeRanges',
mapAgeRanges : data.mapAgeRanges
});
$scope.allowableMapAgeRanges = localStorageService.get('mapAgeRanges');
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
$http({
url : root_reporting + 'definition/definitions',
dataType : 'json',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
// $scope.reportDefinitions
// = data.reportDefinition;
for (var i = 0; i < data.reportDefinition.length; i++) {
$scope.reportDefinitions.push(data.reportDefinition[i]);
}
localStorageService.add('reportDefinitions', $scope.reportDefinitions);
$rootScope.$broadcast('localStorageModule.notification.setMapRelations', {
key : 'reportDefinitions',
reportDefinitions : $scope.reportDefinitions
});
$scope.allowableReportDefinitions = localStorageService.get('reportDefinitions');
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
$http({
url : root_reporting + 'qaCheckDefinition/qaCheckDefinitions',
dataType : 'json',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
for (var i = 0; i < data.reportDefinition.length; i++) {
$scope.qaCheckDefinitions.push(data.reportDefinition[i]);
}
localStorageService.add('qaCheckDefinitions', data.reportDefinition);
$rootScope.$broadcast('localStorageModule.notification.setQACheckDefinitions', {
key : 'qaCheckDefinitions',
qaCheckDefinitions : $scope.qaCheckDefinitions
});
$scope.allowableQACheckDefinitions = localStorageService.get('qaCheckDefinitions');
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
// find selected elements from the allowable
// lists
$scope.selectedMapType = $scope.getSelectedMapType();
$scope.selectedMapRelationStyle = $scope.getSelectedMapRelationStyle();
$scope.selectedWorkflowType = $scope.getSelectedWorkflowType();
/*
* // determine if this project has a principles document if
* ($scope.focusProject.destinationTerminology == 'ICD10') {
* $scope.focusProject.mapPrincipleDocumentPath = 'doc/';
* $scope.focusProject.mapPrincipleDocument =
* 'ICD10_MappingPersonnelHandbook.docx';
* $scope.focusProject.mapPrincipleDocumentName = 'Mapping Personnel
* Handbook'; } else { $scope.focusProject.mapPrincipleDocument = null; }
*/
// set the scope maps
$scope.scopeMap = {};
$scope.scopeExcludedMap = {};
// set pagination variables
$scope.pageSize = 5;
$scope.maxSize = 5;
$scope.getPagedAdvices(1);
$scope.getPagedRelations(1);
$scope.getPagedPrinciples(1);
$scope.getPagedScopeConcepts(1);
$scope.getPagedScopeExcludedConcepts(1);
$scope.getPagedReportDefinitions(1);
// need to initialize selected qa check definitions since they
// are persisted in the
// report definition array
$scope.focusProject.qaCheckDefinition = new Array();
for (var i = 0; i < $scope.focusProject.reportDefinition.length; i++) {
if ($scope.focusProject.reportDefinition[i].qacheck == true)
$scope.focusProject.qaCheckDefinition.push($scope.focusProject.reportDefinition[i]);
}
$scope.getPagedQACheckDefinitions(1);
$scope.orderProp = 'id';
};
$scope.goMapRecords = function() {
// redirect page
var path = '/project/records';
$location.path(path);
};
// function to return trusted html code (for tooltip
// content)
$scope.to_trusted = function(html_code) {
return $sce.trustAsHtml(html_code);
};
// /////////////////////////////////////////////////////////////
// Functions to display and filter advices and
// principles
// NOTE: This is a workaround due to pagination
// issues
// /////////////////////////////////////////////////////////////
// get paged functions
// - sorts (by id) filtered elements
// - counts number of filtered elmeents
// - returns artificial page via slice
$scope.getPagedAdvices = function(page, filter) {
$scope.adviceFilter = filter;
$scope.pagedAdvice = $scope.sortByKey($scope.focusProject.mapAdvice, 'name').filter(
containsAdviceFilter);
$scope.pagedAdviceCount = $scope.pagedAdvice.length;
$scope.pagedAdvice = $scope.pagedAdvice.slice((page - 1) * $scope.pageSize, page
* $scope.pageSize);
};
$scope.getPagedRelations = function(page, filter) {
$scope.relationFilter = filter;
$scope.pagedRelation = $scope.sortByKey($scope.focusProject.mapRelation, 'name').filter(
containsRelationFilter);
$scope.pagedRelationCount = $scope.pagedRelation.length;
$scope.pagedRelation = $scope.pagedRelation.slice((page - 1) * $scope.pageSize, page
* $scope.pageSize);
};
$scope.getPagedPrinciples = function(page, filter) {
$scope.principleFilter = filter;
$scope.pagedPrinciple = $scope.sortByKey($scope.focusProject.mapPrinciple, 'principleId')
.filter(containsPrincipleFilter);
$scope.pagedPrincipleCount = $scope.pagedPrinciple.length;
$scope.pagedPrinciple = $scope.pagedPrinciple.slice((page - 1) * $scope.pageSize, page
* $scope.pageSize);
};
$scope.getPagedReportDefinitions = function(page, filter) {
$scope.reportDefinitionFilter = filter;
$scope.pagedReportDefinition = $scope.sortByKey($scope.focusProject.reportDefinition,
'name').filter(containsReportDefinitionFilter);
// remove qa check report definitions from the list; they have
// their own section
for (var j = 0; j < $scope.pagedReportDefinition.length; j++) {
if ($scope.pagedReportDefinition[j].qacheck == true) {
$scope.pagedReportDefinition.splice(j, 1);
}
}
$scope.pagedReportDefinitionCount = $scope.pagedReportDefinition.length;
$scope.pagedReportDefinition = $scope.pagedReportDefinition.slice((page - 1)
* $scope.pageSize, page * $scope.pageSize);
};
$scope.getPagedQACheckDefinitions = function(page, filter) {
$scope.qaCheckDefinitionFilter = filter;
$scope.pagedQACheckDefinition = $scope.sortByKey($scope.focusProject.qaCheckDefinition,
'name').filter(containsQACheckDefinitionFilter);
$scope.pagedQACheckDefinitionCount = $scope.pagedQACheckDefinition.length;
$scope.pagedQACheckDefinition = $scope.pagedQACheckDefinition.slice((page - 1)
* $scope.pageSize, page * $scope.pageSize);
};
$scope.getPagedScopeConcepts = function(page) {
console.debug('Called paged scope concept for page ' + page);
// construct a paging/filtering/sorting object
var pfsParameterObj = {
'startIndex' : page == -1 ? -1 : (page - 1) * $scope.pageSize,
'maxResults' : page == -1 ? -1 : $scope.pageSize,
'sortField' : '',
'queryRestriction' : ''
};
$rootScope.glassPane++;
$http({
url : root_mapping + 'project/id/' + $scope.focusProject.id + '/scopeConcepts',
dataType : 'json',
data : pfsParameterObj,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
console.debug(' scope concepts = ' + data);
$rootScope.glassPane--;
$scope.pagedScopeConcept = data.searchResult;
$scope.pagedScopeConceptCount = data.totalCount;
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
$scope.getPagedScopeExcludedConcepts = function(page, filter) {
console.debug('Called paged scope concept for page ' + page);
// construct a paging/filtering/sorting object
var pfsParameterObj = {
'startIndex' : page == -1 ? -1 : (page - 1) * $scope.pageSize,
'maxResults' : page == -1 ? -1 : $scope.pageSize,
'sortField' : '',
'queryRestriction' : ''
};
$rootScope.glassPane++;
$http({
url : root_mapping + 'project/id/' + $scope.focusProject.id + '/scopeExcludedConcepts',
dataType : 'json',
data : pfsParameterObj,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
console.debug(' scope excluded = ' + data);
$rootScope.glassPane--;
$scope.pagedScopeExcludedConcept = data.searchResult;
$scope.pagedScopeExcludedConceptCount = data.totalCount;
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
// functions to reset the filter and retrieve
// unfiltered results
$scope.resetAdviceFilter = function() {
$scope.adviceFilter = '';
$scope.getPagedAdvices(1);
};
$scope.resetRelationFilter = function() {
$scope.relationFilter = '';
$scope.getPagedRelations(1);
};
$scope.resetPrincipleFilter = function() {
$scope.principleFilter = '';
$scope.getPagedPrinciples(1);
};
$scope.resetScopeConceptFilter = function() {
$scope.scopeConceptFilter = '';
$scope.getPagedScopeConcepts(1);
};
$scope.resetReportDefinitionFilter = function() {
$scope.reportDefinitionFilter = '';
$scope.getPagedReportDefinitions(1);
};
$scope.resetQACheckDefinitionFilter = function() {
$scope.qaCheckDefinitionFilter = '';
$scope.getPagedQACheckDefinitions(1);
};
$scope.resetScopeExcludedConceptFilter = function() {
$scope.scopeExcludedConceptFilter = '';
$scope.getPagedScopeExcludedConcepts(1);
};
// element-specific functions for filtering
// do not want to search id or objectId
function containsAdviceFilter(element) {
// check if advice filter is empty
if ($scope.adviceFilter === '' || $scope.adviceFilter == null)
return true;
// otherwise check if upper-case advice filter
// matches upper-case element name or detail
if (element.detail.toString().toUpperCase().indexOf(
$scope.adviceFilter.toString().toUpperCase()) != -1)
return true;
if (element.name.toString().toUpperCase().indexOf(
$scope.adviceFilter.toString().toUpperCase()) != -1)
return true;
// otherwise return false
return false;
}
function containsRelationFilter(element) {
// check if relation filter is empty
if ($scope.relationFilter === '' || $scope.relationFilter == null)
return true;
// otherwise check if upper-case relation filter
// matches upper-case element name or detail
if (element.terminologyId.toString().toUpperCase().indexOf(
$scope.relationFilter.toString().toUpperCase()) != -1)
return true;
if (element.name.toString().toUpperCase().indexOf(
$scope.relationFilter.toString().toUpperCase()) != -1)
return true;
// otherwise return false
return false;
}
function containsPrincipleFilter(element) {
// check if principle filter is empty
if ($scope.principleFilter === '' || $scope.principleFilter == null)
return true;
// otherwise check if upper-case principle
// filter matches upper-case element name or
// detail
if (element.principleId.toString().toUpperCase().indexOf(
$scope.principleFilter.toString().toUpperCase()) != -1)
return true;
// if (
// element.detail.toString().toUpperCase().indexOf(
// $scope.principleFilter.toString().toUpperCase())
// != -1) return true;
if (element.name.toString().toUpperCase().indexOf(
$scope.principleFilter.toString().toUpperCase()) != -1)
return true;
if (element.sectionRef != null
&& element.sectionRef.toString().toUpperCase().indexOf(
$scope.principleFilter.toString().toUpperCase()) != -1)
return true;
// otherwise return false
return false;
}
function containsScopeConceptFilter(element) {
// check if scopeConcept filter is empty
if ($scope.scopeConceptFilter === '' || $scope.scopeConceptFilter == null)
return true;
// otherwise check if upper-case scopeConcept
// filter matches upper-case element name or
// detail
if (element.scopeConceptId.toString().toUpperCase().indexOf(
$scope.scopeConceptFilter.toString().toUpperCase()) != -1)
return true;
if (element.name.toString().toUpperCase().indexOf(
$scope.scopeConceptFilter.toString().toUpperCase()) != -1)
return true;
// otherwise return false
return false;
}
function containsScopeExcludedConceptFilter(element) {
// check if scopeConcept filter is empty
if ($scope.scopeExcludesConceptFilter === '' || $scope.scopeExcludesConceptFilter == null)
return true;
// otherwise check if upper-case scopeConcept
// filter matches upper-case element name or
// detail
if (element.scopeExcludesConceptId.toString().toUpperCase().indexOf(
$scope.scopeExcludesConceptFilter.toString().toUpperCase()) != -1)
return true;
if (element.name.toString().toUpperCase().indexOf(
$scope.scopeExcludesConceptFilter.toString().toUpperCase()) != -1)
return true;
// otherwise return false
return false;
}
function containsReportDefinitionFilter(element) {
// check if reportDefinition filter is empty
if ($scope.reportDefinitionFilter === '' || $scope.reportDefinitionFilter == null)
return true;
// otherwise check if upper-case
// reportDefinition filter
// matches upper-case element name or detail
if (element.name.toString().toUpperCase().indexOf(
$scope.reportDefinitionFilter.toString().toUpperCase()) != -1)
return true;
// otherwise return false
return false;
}
function containsQACheckDefinitionFilter(element) {
// check if qaCheckDefinition filter is empty
if ($scope.qaCheckDefinitionFilter === '' || $scope.qaCheckDefinitionFilter == null)
return true;
// otherwise check if upper-case
// qaCheckDefinition filter
// matches upper-case element name or detail
if (element.name.toString().toUpperCase().indexOf(
$scope.qaCheckDefinitionFilter.toString().toUpperCase()) != -1)
return true;
// otherwise return false
return false;
}
// helper function to sort a JSON array by field
$scope.sortByKey = function sortById(array, key) {
return array.sort(function(a, b) {
var x, y;
// if a number
if (!isNaN(parseInt(a[key]))) {
x = a[key];
y = b[key];
} else {
x = new String(a[key]).toUpperCase();
y = new String(b[key]).toUpperCase();
}
if (x < y)
return -1;
if (x > y)
return 1;
return 0;
});
};
// function to change project from the header
$scope.changeFocusProject = function(mapProject) {
$scope.focusProject = mapProject;
// update and broadcast the new focus project
localStorageService.add('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
// update the user preferences
$scope.preferences.lastMapProjectId = $scope.focusProject.id;
localStorageService.add('preferences', $scope.preferences);
$rootScope.$broadcast('localStorageModule.notification.setUserPreferences', {
key : 'userPreferences',
userPreferences : $scope.preferences
});
};
$scope.goToHelp = function() {
var path;
if ($scope.page != 'mainDashboard') {
path = 'help/' + $scope.page + 'Help.html';
} else {
path = 'help/' + $scope.currentRole + 'DashboardHelp.html';
}
// redirect page
$location.path(path);
};
$scope.isEmailViewable = function(email) {
if (email.indexOf('ihtsdo.org') > -1) {
return true;
} else {
return false;
}
};
$scope.toggleEditMode = function() {
if ($scope.editModeEnabled == true) {
$scope.editModeEnabled = false;
$scope.updateMapProject();
} else {
$scope.editModeEnabled = true;
}
};
$scope.getSelectedMapRelationStyle = function() {
for (var j = 0; j < $scope.allowableMapRelationStyles.length; j++) {
if ($scope.focusProject.mapRelationStyle === $scope.allowableMapRelationStyles[j].name)
return $scope.allowableMapRelationStyles[j];
}
return null;
};
$scope.selectMapRelationStyle = function() {
// update and broadcast the updated focus
// project
$scope.focusProject.mapRelationStyle = $scope.selectedMapRelationStyle.name;
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.updateMapProject();
};
$scope.getSelectedMapType = function() {
for (var j = 0; j < $scope.allowableMapTypes.length; j++) {
if ($scope.focusProject.mapRefsetPattern === $scope.allowableMapTypes[j].name)
return $scope.allowableMapTypes[j];
}
return null;
};
$scope.selectMapType = function() {
$scope.focusProject.mapType = $scope.selectedMapType.name;
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.updateMapProject();
};
$scope.getSelectedWorkflowType = function() {
for (var j = 0; j < $scope.allowableWorkflowTypes.length; j++) {
if ($scope.focusProject.workflowType === $scope.allowableWorkflowTypes[j].name)
return $scope.allowableWorkflowTypes[j];
}
return null;
};
$scope.selectWorkflowType = function() {
$scope.focusProject.workflowType = $scope.selectedWorkflowType.name;
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.updateMapProject();
};
$scope.deleteLead = function(lead) {
for (var j = 0; j < $scope.focusProject.mapLead.length; j++) {
if (lead.userName === $scope.focusProject.mapLead[j].userName) {
$scope.focusProject.mapLead.splice(j, 1);
}
}
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
};
$scope.addLead = function(user) {
for (var i = 0; i < $scope.focusProject.mapSpecialist.length; i++) {
if ($scope.focusProject.mapSpecialist[i].name == user.name) {
confirm('User ' + user.name
+ ' is already a Map Specialist.\nUser cannot have more than one role.');
return;
}
}
$scope.focusProject.mapLead.push(user);
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
};
$scope.addMapUserToMapProjectWithRole = function(user, role) {
// check role
if (role != 'Specialist' && role != 'Lead') {
return;
}
// check user non-null
if (user == null || user == undefined) {
return;
}
// check user valid
if (user.userName == null || user.userName == undefined) {
alert('You must specify a login name.');
return;
}
if (user.name == null || user.name == undefined) {
alert('You must specify the user\'s name');
return;
}
if (user.email == null || user.email == undefined) {
alert('You must specify the user\'s email. Enter "none" or similar text if unknown');
return;
}
// by default the application role is Viewer
user.applicationRole = 'VIEWER';
// add the user
$http({
url : root_mapping + 'user/add',
dataType : 'json',
data : user,
method : 'PUT',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
// copy the newly updated object with id
var user = data;
$scope.mapUsers.push(user);
localStorageService.add('mapUsers', $scope.mapUsers);
// add this user to the focus project
if (role == 'Specialist')
$scope.focusProject.mapSpecialist.push(user);
else if (role == 'Lead')
$scope.focusProject.mapLead.push(user);
// update the project
$scope.updateMapProject();
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
};
$scope.deleteSpecialist = function(specialist) {
for (var j = 0; j < $scope.focusProject.mapSpecialist.length; j++) {
if (specialist.userName === $scope.focusProject.mapSpecialist[j].userName) {
$scope.focusProject.mapSpecialist.splice(j, 1);
}
}
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
};
$scope.addSpecialist = function(user) {
for (var i = 0; i < $scope.focusProject.mapLead.length; i++) {
if ($scope.focusProject.mapLead[i].name == user.name) {
confirm('User ' + user.name
+ ' is already a Map Lead.\nUser cannot have more than one role.');
return;
}
}
$scope.focusProject.mapSpecialist.push(user);
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
};
$scope.deleteAdvice = function(advice) {
for (var j = 0; j < $scope.focusProject.mapAdvice.length; j++) {
if (advice.name === $scope.focusProject.mapAdvice[j].name) {
$scope.focusProject.mapAdvice.splice(j, 1);
}
}
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.pageAdvice = 1;
$scope.resetAdviceFilter();
$scope.updateMapProject();
};
$scope.addAdvice = function(advice) {
$scope.focusProject.mapAdvice.push(advice);
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.resetAdviceFilter();
$scope.updateMapProject();
};
$scope.updateAdvice = function(advice) {
console.debug('in updateAdvice');
$http({
url : root_mapping + 'advice/update',
dataType : 'json',
data : advice,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
console.debug(' success');
}).error(function(data, status, headers, config) {
$scope.recordError = 'Error updating map advice.';
$rootScope.handleHttpError(data, status, headers, config);
}).then(function(data) {
$http({
url : root_mapping + 'advice/advices',
dataType : 'json',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
$scope.mapAdvices = data.mapAdvice;
for (var j = 0; j < $scope.focusProject.mapAdvice.length; j++) {
if (advice.id === $scope.focusProject.mapAdvice[j].id) {
$scope.focusProject.mapAdvice[j] = advice;
}
}
localStorageService.add('mapAdvices', data.mapAdvice);
$rootScope.$broadcast('localStorageModule.notification.setMapAdvices', {
key : 'mapAdvices',
mapAdvices : data.mapAdvices
});
$scope.allowableMapAdvices = localStorageService.get('mapAdvices');
// update
// and
// broadcast
// the
// updated
// focus
// project
localStorageService.add('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.updateMapProject();
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
});
};
$scope.submitNewMapAdvice = function(mapAdviceName, mapAdviceDetail, allowableForNullTarget,
isComputed) {
console.debug('in submitNewMapAdvice');
var obj = {
'name' : mapAdviceName,
'detail' : mapAdviceDetail,
'isAllowableForNullTarget' : allowableForNullTarget,
'isComputed' : isComputed
};
$rootScope.glassPane++;
$http({
url : root_mapping + 'advice/add',
dataType : 'json',
data : obj,
method : 'PUT',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
$rootScope.glassPane--;
console.debug(' success');
// add the new advice to the available list
$scope.mapAdvices.push(data);
$scope.allowableMapAdvices.push(data);
// add the new advice to the current project
$scope.focusProject.mapAdvice.push(data);
// update the map project
$scope.updateMapProject().then(function(response) {
$scope.resetAdviceFilter();
});
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
$scope.deleteRelation = function(relation) {
for (var j = 0; j < $scope.focusProject.mapRelation.length; j++) {
if (relation.name === $scope.focusProject.mapRelation[j].name) {
$scope.focusProject.mapRelation.splice(j, 1);
}
}
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.resetRelationFilter();
$scope.updateMapProject();
};
$scope.addRelation = function(relation) {
$scope.focusProject.mapRelation.push(relation);
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.resetRelationFilter();
$scope.updateMapProject();
};
$scope.submitNewMapRelation = function(relation) {
console.debug('in submitNewMapRelation for application');
$rootScope.glassPane++;
$http({
url : root_mapping + 'relation/add',
dataType : 'json',
data : relation,
method : 'PUT',
headers : {
'Content-Type' : 'application/json'
}
})
.success(function(data) {
$rootScope.glassPane--;
console.debug(' success');
// add new relations to the sets
$scope.mapRelations.push(data);
$scope.allowableMapRelations.push(data);
// add the new advice to the current project
$scope.focusProject.mapRelation.push(data);
// update the map project
$scope.updateMapProject().then(function() {
$scope.resetRelationFilter();
});
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
$scope.deleteReportDefinition = function(reportDefinition) {
for (var j = 0; j < $scope.focusProject.reportDefinition.length; j++) {
if (reportDefinition.name === $scope.focusProject.reportDefinition[j].name) {
$scope.focusProject.reportDefinition.splice(j, 1);
}
}
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.resetReportDefinitionFilter();
$scope.updateMapProject();
};
$scope.deleteQACheckDefinition = function(qaCheckDefinition) {
// check the local qaCheck set
for (var j = 0; j < $scope.focusProject.qaCheckDefinition.length; j++) {
if (qaCheckDefinition.id === $scope.focusProject.qaCheckDefinition[j].id) {
$scope.focusProject.qaCheckDefinition.splice(j, 1);
}
}
// also need to remove from the project, qa definitions are in
// reportDefinitions collection
for (var j = 0; j < $scope.focusProject.reportDefinition.length; j++) {
if (qaCheckDefinition.id === $scope.focusProject.reportDefinition[j].id) {
$scope.focusProject.reportDefinition.splice(j, 1);
}
}
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.resetQACheckDefinitionFilter();
$scope.updateMapProject();
};
$scope.addReportDefinition = function(reportDefinition) {
$scope.focusProject.reportDefinition.push(reportDefinition);
console.debug($scope.focusProject.reportDefinition);
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.resetReportDefinitionFilter();
$scope.updateMapProject();
};
$scope.addQACheckDefinition = function(qaCheckDefinition) {
$scope.focusProject.qaCheckDefinition.push(qaCheckDefinition);
$scope.focusProject.reportDefinition.push(qaCheckDefinition);
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.resetQACheckDefinitionFilter();
$scope.updateMapProject();
};
$scope.deletePrinciple = function(principle) {
for (var j = 0; j < $scope.focusProject.mapPrinciple.length; j++) {
if (principle.name === $scope.focusProject.mapPrinciple[j].name) {
$scope.focusProject.mapPrinciple.splice(j, 1);
}
}
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.resetPrincipleFilter();
$scope.updateMapProject();
};
$scope.addPrinciple = function(principle) {
$scope.focusProject.mapPrinciple.push(principle);
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.resetPrincipleFilter();
$scope.updateMapProject();
};
$scope.updatePrinciple = function(principle) {
console.debug('in updatePrinciple', principle);
$http({
url : root_mapping + 'principle/update',
dataType : 'json',
data : principle,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
console.debug(' success');
}).error(function(data, status, headers, config) {
$scope.recordError = 'Error updating map principle.';
$rootScope.handleHttpError(data, status, headers, config);
}).then(function(data) {
$http({
url : root_mapping + 'principle/principles',
dataType : 'json',
method : 'GET',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
$scope.mapPrinciples = data.mapPrinciple;
for (var j = 0; j < $scope.focusProject.mapPrinciple.length; j++) {
if (principle.id === $scope.focusProject.mapPrinciple[j].id) {
$scope.focusProject.mapPrinciple[j] = principle;
}
}
localStorageService.add('mapPrinciples', data.mapPrinciple);
$rootScope.$broadcast('localStorageModule.notification.setMapPrinciples', {
key : 'mapPrinciples',
mapPrinciples : data.mapPrinciples
});
$scope.allowableMapPrinciples = localStorageService.get('mapPrinciples');
// update
// and
// broadcast
// the
// updated
// focus
// project
localStorageService.add('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.updateMapProject();
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
});
};
$scope.submitNewMapPrinciple = function(principle) {
console.debug('in submitNewMapPrinciple', principle);
$rootScope.glassPane++;
$http({
url : root_mapping + 'principle/add',
dataType : 'json',
data : principle,
method : 'PUT',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
$rootScope.glassPane--;
console.debug(' success');
// add principle to the local sets
$scope.mapPrinciples.push(data);
$scope.allowableMapPrinciples.push(data);
// add the new advice to the current project
$scope.focusProject.mapPrinciple.push(data);
// update the map project
$scope.updateMapProject().then(function() {
$scope.resetPrincipleFilter();
});
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
$scope.deleteAgeRange = function(ageRange) {
for (var j = 0; j < $scope.focusProject.mapAgeRange.length; j++) {
if (ageRange.name === $scope.focusProject.mapAgeRange[j].name) {
$scope.focusProject.mapAgeRange.splice(j, 1);
}
}
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.updateMapProject();
};
$scope.addAgeRange = function(ageRange) {
$scope.focusProject.mapAgeRange.push(ageRange);
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.updateMapProject();
};
$scope.submitNewMapAgeRange = function(ageRange) {
console.debug('in submitNewMapAgeRange', ageRange);
$rootScope.glassPane++;
$http({
url : root_mapping + 'ageRange/add',
dataType : 'json',
data : ageRange,
method : 'PUT',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
$rootScope.glassPane--;
console.debug(' success');
// add principle to the local sets
$scope.mapAgeRanges.push(data);
$scope.allowableMapAgeRanges.push(data);
// add the new advice to the current project
$scope.focusProject.mapAgeRange.push(data);
// update the map project
$scope.updateMapProject().then(function() {
$scope.resetAgeRangeFilter();
});
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
$scope.submitNewErrorMessage = function(message) {
var localErrorMessages = $scope.focusProject.errorMessages;
localErrorMessages.push(message);
$scope.focusProject.errorMessages = localErrorMessages;
$scope.updateMapProject();
};
$scope.deleteErrorMessage = function(message) {
for (var j = 0; j < $scope.focusProject.errorMessages.length; j++) {
if (message === $scope.focusProject.errorMessages[j]) {
$scope.focusProject.errorMessages.splice(j, 1);
}
}
// update and broadcast the updated focus
// project
localStorageService.set('focusProject', $scope.focusProject);
$rootScope.$broadcast('localStorageModule.notification.setFocusProject', {
key : 'focusProject',
focusProject : $scope.focusProject
});
$scope.updateMapProject();
};
// ////////////////////////////////////////////
// Scope Include Concept Addition/Removal
// ////////////////////////////////////////////
// remove a single concept (using the [x] button)
$scope.removeScopeIncludedConcept = function(scopeConcept, currentPage) {
console.debug('in removeScopeIncludedConcept', scopeConcept, currentPage);
$rootScope.glassPane++;
$http({
url : root_mapping + 'project/id/' + $scope.focusProject.id + '/scopeConcept/remove',
data : scopeConcept.terminologyId,
method : 'POST',
headers : {
'Content-Type' : 'text/plain'
}
}).success(function(data) {
console.debug(' success');
$rootScope.glassPane--;
// re-page the scope concepts
$scope.getPagedScopeConcepts(currentPage);
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
// remove a single/batch of excluded concepts
$scope.removeScopeIncludedConcepts = function(scopeConceptsUnsplit) {
console.debug('in removeScopeIncludedConcepts', scopeConceptsUnsplit);
$rootScope.glassPane++;
var scopeConcepts = scopeConceptsUnsplit.split(/,\s*|\s+/);
$http({
url : root_mapping + 'project/id/' + $scope.focusProject.id + '/scopeConcepts/remove',
dataType : 'json',
data : scopeConcepts,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
console.debug(' success');
$rootScope.glassPane--;
$scope.getPagedScopeConcepts(1);
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
// submit a single/batch of concepts for addition
$scope.submitNewScopeIncludedConcepts = function(scopeConceptsUnsplit) {
console.debug('in submitNewScopeIncludedConcept', scopeConceptsUnsplit);
$rootScope.glassPane++;
var scopeConcepts = scopeConceptsUnsplit.split(/,\s*|\s+/);
$http({
url : root_mapping + 'project/id/' + $scope.focusProject.id + '/scopeConcepts/add',
dataType : 'json',
data : scopeConcepts,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
$rootScope.glassPane--;
console.debug(' success');
$scope.resetScopeConceptFilter();
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
};
// ////////////////////////////////////////////
// Scope Exclude Concept Addition/Removal
// ////////////////////////////////////////////
// remove a single concept (using the [x] button)
$scope.removeScopeExcludedConcept = function(scopeExcludedConcept, currentPage) {
console.debug('in removeScopeExcludedConcept', scopeExcludedConcept, currentPage);
$rootScope.glassPane++;
$http(
{
url : root_mapping + 'project/id/' + $scope.focusProject.id
+ '/scopeExcludedConcept/remove',
dataType : 'json',
data : scopeExcludedConcept.terminologyId,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
console.debug(' success');
$rootScope.glassPane--;
$scope.getPagedScopeExcludedConcepts(currentPage);
}).error(function(data, status, headers, config) {
$rootScope.handleHttpError(data, status, headers, config);
});
};
// remove a single/batch of excluded concepts
$scope.removeScopeExcludedConcepts = function(scopeExcludedConceptsUnsplit) {
console.debug('in removeScopeExcludedConcepts', scopeExcludedConceptsUnsplit);
$rootScope.glassPane++;
var scopeExcludedConcepts = scopeExcludedConceptsUnsplit.split(/,\s*|\s+/);
$http(
{
url : root_mapping + 'project/id/' + $scope.focusProject.id
+ '/scopeExcludedConcepts/remove',
dataType : 'json',
data : scopeExcludedConcepts,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function() {
$rootScope.glassPane--;
console.debug(' success');
$scope.getPagedScopeExcludedConcepts(1);
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
// submit a single/batch of concepts for addition
$scope.submitNewScopeExcludedConcepts = function(scopeExcludedConceptsUnsplit) {
console.debug('in submitNewScopeExcludedConcept', scopeExcludedConceptsUnsplit);
$rootScope.glassPane++;
var scopeExcludedConcepts = scopeExcludedConceptsUnsplit.split(/,\s*|\s+/);
$http(
{
url : root_mapping + 'project/id/' + $scope.focusProject.id
+ '/scopeExcludedConcepts/add',
dataType : 'json',
data : scopeExcludedConcepts,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
console.debug(' success');
$rootScope.glassPane--;
$scope.getPagedScopeExcludedConcepts(1);
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
});
};
$scope.updateCachedMapProjects = function() {
// first, update focus project
localStorageService.add('focusProject', $scope.focusProject);
var mapProjects = $scope.mapProjects;
// replace the focus project in the list
for (var i = 0; i < mapProjects.length; i++) {
if (mapProjects[i].id == $scope.focusProject.id) {
mapProjects[i] = $scope.focusProject;
}
}
// set the map projects in the cache
localStorageService.add('mapProjects', mapProjects);
};
// /////////////////////////////////////
// Model reset, clears all filters
// /////////////////////////////////////
$scope.resetModel = function() {
angular.copy($scope.focusProjectBeforeChanges, $scope.focusProject);
$scope.resetAdviceFilter();
$scope.resetRelationFilter();
$scope.resetPrincipleFilter();
$scope.resetScopeConceptFilter();
$scope.resetScopeExcludedConceptFilter();
$scope.resetReportDefinitionFilter();
};
/**
* Function to update a map project via REST call and update the cached
* projects
*/
$scope.updateMapProject = function() {
console.debug('Update map project');
// first, add the modified project to the cache
localStorageService.add('focusProject', $scope.focusProject);
var deferred = $q.defer();
$rootScope.glassPane++;
$http({
url : root_mapping + 'project/update',
dataType : 'json',
data : $scope.focusProject,
method : 'POST',
headers : {
'Content-Type' : 'application/json'
}
}).success(function(data) {
console.debug(' success');
$rootScope.glassPane--;
// update the cached project list
for (var i = 0; i < $scope.mapProjects.length; i++) {
if ($scope.mapProjects[i].id = data.id) {
$scope.mapProjects[i] = data;
}
}
localStorageService.add('mapProjects', $scope.mapProjects);
deferred.resolve();
}).error(function(data, status, headers, config) {
$rootScope.glassPane--;
$rootScope.handleHttpError(data, status, headers, config);
deferred.reject();
});
return deferred.promise;
};
$scope.onFileSelect = function($files) {
// $files: an array of files selected, each file
// has name, size, and type.
for (var i = 0; i < $files.length; i++) {
var $file = $files[i];
$rootScope.glassPane++;
// $upload.upload({
// url : root_mapping + 'upload/' + $scope.focusProject.id,
// file : $file,
// progress : function(e) {
// // n/a
// }
// }).error(function(data, status, headers, config) {
// // file is not uploaded successfully
// $scope.recordError = 'Error updating map project.';
// $rootScope.handleHttpError(data, status, headers, config);
// $rootScope.glassPane--;
// }).success(function(data) {
// // file is uploaded successfully
// confirm('The mapping principle handbook file upload is complete.');
// $rootScope.glassPane--;
// $scope.focusProject.mapPrincipleSourceDocument = data.substring(1, data.length - 1);
// });
}
};
} ]);
| apache-2.0 |
lenicliu/spring-boot | spring-boot-samples/spring-boot-sample-hypermedia-jpa/src/test/java/sample/hypermedia/jpa/SampleHypermediaJpaApplicationTests.java | 1130 | /*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sample.hypermedia.jpa;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringApplicationConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.context.web.WebAppConfiguration;
@RunWith(SpringRunner.class)
@SpringApplicationConfiguration(SampleHypermediaJpaApplication.class)
@WebAppConfiguration
public class SampleHypermediaJpaApplicationTests {
@Test
public void contextLoads() {
}
}
| apache-2.0 |
qobel/esoguproject | spring-framework/spring-web/src/test/java/org/springframework/http/HttpHeadersTests.java | 9305 | /*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.http;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.Charset;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.EnumSet;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.Locale;
import java.util.TimeZone;
import org.hamcrest.Matchers;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import org.junit.Before;
import org.junit.Test;
/**
* Unit tests for {@link org.springframework.http.HttpHeaders}.
* @author Arjen Poutsma
*/
public class HttpHeadersTests {
private HttpHeaders headers;
@Before
public void setUp() {
headers = new HttpHeaders();
}
@Test
public void accept() {
MediaType mediaType1 = new MediaType("text", "html");
MediaType mediaType2 = new MediaType("text", "plain");
List<MediaType> mediaTypes = new ArrayList<MediaType>(2);
mediaTypes.add(mediaType1);
mediaTypes.add(mediaType2);
headers.setAccept(mediaTypes);
assertEquals("Invalid Accept header", mediaTypes, headers.getAccept());
assertEquals("Invalid Accept header", "text/html, text/plain", headers.getFirst("Accept"));
}
@Test // SPR-9655
public void acceptiPlanet() {
headers.add("Accept", "text/html");
headers.add("Accept", "text/plain");
List<MediaType> expected = Arrays.asList(new MediaType("text", "html"), new MediaType("text", "plain"));
assertEquals("Invalid Accept header", expected, headers.getAccept());
}
@Test
public void acceptCharsets() {
Charset charset1 = Charset.forName("UTF-8");
Charset charset2 = Charset.forName("ISO-8859-1");
List<Charset> charsets = new ArrayList<Charset>(2);
charsets.add(charset1);
charsets.add(charset2);
headers.setAcceptCharset(charsets);
assertEquals("Invalid Accept header", charsets, headers.getAcceptCharset());
assertEquals("Invalid Accept header", "utf-8, iso-8859-1", headers.getFirst("Accept-Charset"));
}
@Test
public void acceptCharsetWildcard() {
headers.set("Accept-Charset", "ISO-8859-1,utf-8;q=0.7,*;q=0.7");
assertEquals("Invalid Accept header", Arrays.asList(Charset.forName("ISO-8859-1"), Charset.forName("UTF-8")),
headers.getAcceptCharset());
}
@Test
public void allow() {
EnumSet<HttpMethod> methods = EnumSet.of(HttpMethod.GET, HttpMethod.POST);
headers.setAllow(methods);
assertEquals("Invalid Allow header", methods, headers.getAllow());
assertEquals("Invalid Allow header", "GET,POST", headers.getFirst("Allow"));
}
@Test
public void contentLength() {
long length = 42L;
headers.setContentLength(length);
assertEquals("Invalid Content-Length header", length, headers.getContentLength());
assertEquals("Invalid Content-Length header", "42", headers.getFirst("Content-Length"));
}
@Test
public void contentType() {
MediaType contentType = new MediaType("text", "html", Charset.forName("UTF-8"));
headers.setContentType(contentType);
assertEquals("Invalid Content-Type header", contentType, headers.getContentType());
assertEquals("Invalid Content-Type header", "text/html;charset=UTF-8", headers.getFirst("Content-Type"));
}
@Test
public void location() throws URISyntaxException {
URI location = new URI("http://www.example.com/hotels");
headers.setLocation(location);
assertEquals("Invalid Location header", location, headers.getLocation());
assertEquals("Invalid Location header", "http://www.example.com/hotels", headers.getFirst("Location"));
}
@Test
public void eTag() {
String eTag = "\"v2.6\"";
headers.setETag(eTag);
assertEquals("Invalid ETag header", eTag, headers.getETag());
assertEquals("Invalid ETag header", "\"v2.6\"", headers.getFirst("ETag"));
}
@Test(expected = IllegalArgumentException.class)
public void illegalETag() {
String eTag = "v2.6";
headers.setETag(eTag);
assertEquals("Invalid ETag header", eTag, headers.getETag());
assertEquals("Invalid ETag header", "\"v2.6\"", headers.getFirst("ETag"));
}
@Test
public void ifNoneMatch() {
String ifNoneMatch = "\"v2.6\"";
headers.setIfNoneMatch(ifNoneMatch);
assertEquals("Invalid If-None-Match header", ifNoneMatch, headers.getIfNoneMatch().get(0));
assertEquals("Invalid If-None-Match header", "\"v2.6\"", headers.getFirst("If-None-Match"));
}
@Test
public void ifNoneMatchList() {
String ifNoneMatch1 = "\"v2.6\"";
String ifNoneMatch2 = "\"v2.7\"";
List<String> ifNoneMatchList = new ArrayList<String>(2);
ifNoneMatchList.add(ifNoneMatch1);
ifNoneMatchList.add(ifNoneMatch2);
headers.setIfNoneMatch(ifNoneMatchList);
assertEquals("Invalid If-None-Match header", ifNoneMatchList, headers.getIfNoneMatch());
assertEquals("Invalid If-None-Match header", "\"v2.6\", \"v2.7\"", headers.getFirst("If-None-Match"));
}
@Test
public void date() {
Calendar calendar = new GregorianCalendar(2008, 11, 18, 11, 20);
calendar.setTimeZone(TimeZone.getTimeZone("CET"));
long date = calendar.getTimeInMillis();
headers.setDate(date);
assertEquals("Invalid Date header", date, headers.getDate());
assertEquals("Invalid Date header", "Thu, 18 Dec 2008 10:20:00 GMT", headers.getFirst("date"));
// RFC 850
headers.set("Date", "Thursday, 18-Dec-08 11:20:00 CET");
assertEquals("Invalid Date header", date, headers.getDate());
}
@Test(expected = IllegalArgumentException.class)
public void dateInvalid() {
headers.set("Date", "Foo Bar Baz");
headers.getDate();
}
@Test
public void dateOtherLocale() {
Locale defaultLocale = Locale.getDefault();
try {
Locale.setDefault(new Locale("nl", "nl"));
Calendar calendar = new GregorianCalendar(2008, 11, 18, 11, 20);
calendar.setTimeZone(TimeZone.getTimeZone("CET"));
long date = calendar.getTimeInMillis();
headers.setDate(date);
assertEquals("Invalid Date header", "Thu, 18 Dec 2008 10:20:00 GMT", headers.getFirst("date"));
assertEquals("Invalid Date header", date, headers.getDate());
}
finally {
Locale.setDefault(defaultLocale);
}
}
@Test
public void lastModified() {
Calendar calendar = new GregorianCalendar(2008, 11, 18, 11, 20);
calendar.setTimeZone(TimeZone.getTimeZone("CET"));
long date = calendar.getTimeInMillis();
headers.setLastModified(date);
assertEquals("Invalid Last-Modified header", date, headers.getLastModified());
assertEquals("Invalid Last-Modified header", "Thu, 18 Dec 2008 10:20:00 GMT",
headers.getFirst("last-modified"));
}
@Test
public void expires() {
Calendar calendar = new GregorianCalendar(2008, 11, 18, 11, 20);
calendar.setTimeZone(TimeZone.getTimeZone("CET"));
long date = calendar.getTimeInMillis();
headers.setExpires(date);
assertEquals("Invalid Expires header", date, headers.getExpires());
assertEquals("Invalid Expires header", "Thu, 18 Dec 2008 10:20:00 GMT", headers.getFirst("expires"));
}
// SPR-10648 (example is from INT-3063)
@Test
public void expiresInvalidDate() {
headers.set("Expires", "-1");
assertEquals(-1, headers.getExpires());
}
@Test
public void ifModifiedSince() {
Calendar calendar = new GregorianCalendar(2008, 11, 18, 11, 20);
calendar.setTimeZone(TimeZone.getTimeZone("CET"));
long date = calendar.getTimeInMillis();
headers.setIfModifiedSince(date);
assertEquals("Invalid If-Modified-Since header", date, headers.getIfModifiedSince());
assertEquals("Invalid If-Modified-Since header", "Thu, 18 Dec 2008 10:20:00 GMT",
headers.getFirst("if-modified-since"));
}
@Test
public void pragma() {
String pragma = "no-cache";
headers.setPragma(pragma);
assertEquals("Invalid Pragma header", pragma, headers.getPragma());
assertEquals("Invalid Pragma header", "no-cache", headers.getFirst("pragma"));
}
@Test
public void cacheControl() {
String cacheControl = "no-cache";
headers.setCacheControl(cacheControl);
assertEquals("Invalid Cache-Control header", cacheControl, headers.getCacheControl());
assertEquals("Invalid Cache-Control header", "no-cache", headers.getFirst("cache-control"));
}
@Test
public void contentDisposition() {
headers.setContentDispositionFormData("name", null);
assertEquals("Invalid Content-Disposition header", "form-data; name=\"name\"",
headers.getFirst("Content-Disposition"));
headers.setContentDispositionFormData("name", "filename");
assertEquals("Invalid Content-Disposition header", "form-data; name=\"name\"; filename=\"filename\"",
headers.getFirst("Content-Disposition"));
}
@Test // SPR-11917
public void getAllowEmptySet() {
headers.setAllow(Collections.<HttpMethod> emptySet());
assertThat(headers.getAllow(), Matchers.emptyCollectionOf(HttpMethod.class));
}
}
| apache-2.0 |
zyj1609wz/AndroidFilePreferences | filePreferences/src/main/java/com/zyj/filepreferences/lib/util/LogUtil.java | 2811 | package com.zyj.filepreferences.lib.util;
import android.util.Log;
/**
* Created by ${zyj} on 2016/9/2.
*/
public class LogUtil {
/**
* true: open all log
* false: close all log
*/
private static boolean OPEN_LOG = false ;
/**
* TAG 名称
*/
private static String tag = "filePerferences";
private String mClassName;
private static LogUtil log;
private static final String USER_NAME = "@tool@";
private LogUtil(String name) {
mClassName = name;
}
/**
* Get The Current Function Name
*
* @return Name
*/
private String getFunctionName() {
StackTraceElement[] sts = Thread.currentThread().getStackTrace();
if (sts == null) {
return null;
}
for (StackTraceElement st : sts) {
if (st.isNativeMethod()) {
continue;
}
if (st.getClassName().equals(Thread.class.getName())) {
continue;
}
if (st.getClassName().equals(this.getClass().getName())) {
continue;
}
return mClassName + "[ " + Thread.currentThread().getName() + ": "
+ st.getFileName() + ":" + st.getLineNumber() + " "
+ st.getMethodName() + " ]";
}
return null;
}
public static void i(Object str) {
print(Log.INFO, str);
}
public static void d(Object str) {
print(Log.DEBUG, str);
}
public static void v(Object str) {
print(Log.VERBOSE, str);
}
public static void w(Object str) {
print(Log.WARN, str);
}
public static void e(Object str) {
print(Log.ERROR, str);
}
/**
* 用于区分不同接口数据 打印传入参数
*
* @param index
* @param str
*/
private static void print(int index, Object str) {
if (!OPEN_LOG) {
return;
}
if (log == null) {
log = new LogUtil(USER_NAME);
}
String name = log.getFunctionName();
if (name != null) {
str = name + " - " + str;
}
switch (index) {
case Log.VERBOSE:
Log.v(tag, str.toString());
break;
case Log.DEBUG:
Log.d(tag, str.toString());
break;
case Log.INFO:
Log.i(tag, str.toString());
break;
case Log.WARN:
Log.w(tag, str.toString());
break;
case Log.ERROR:
Log.e(tag, str.toString());
break;
default:
break;
}
}
public static void setLog(Boolean log){
OPEN_LOG = log ;
}
}
| apache-2.0 |
nuwand/carbon-apimgt | components/apimgt/org.wso2.carbon.apimgt.rest.api.admin.v1/src/main/java/org/wso2/carbon/apimgt/rest/api/admin/v1/impl/ApplicationsApiServiceImpl.java | 6062 | /*
* Copyright (c) 2020, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.wso2.carbon.apimgt.rest.api.admin.v1.impl;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.cxf.jaxrs.ext.MessageContext;
import org.wso2.carbon.apimgt.api.APIAdmin;
import org.wso2.carbon.apimgt.api.APIConsumer;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.model.Application;
import org.wso2.carbon.apimgt.impl.APIAdminImpl;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.APIManagerFactory;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import org.wso2.carbon.apimgt.rest.api.admin.v1.ApplicationsApiService;
import org.wso2.carbon.apimgt.rest.api.admin.v1.dto.ApplicationListDTO;
import org.wso2.carbon.apimgt.rest.api.admin.v1.utils.mappings.ApplicationMappingUtil;
import org.wso2.carbon.apimgt.rest.api.util.RestApiConstants;
import org.wso2.carbon.apimgt.rest.api.util.utils.RestApiUtil;
import org.wso2.carbon.utils.multitenancy.MultitenantUtils;
import javax.ws.rs.core.Response;
public class ApplicationsApiServiceImpl implements ApplicationsApiService {
private static final Log log = LogFactory.getLog(ApplicationsApiServiceImpl.class);
@Override
public Response applicationsApplicationIdChangeOwnerPost(String owner, String applicationId,
MessageContext messageContext) {
APIConsumer apiConsumer = null;
try {
apiConsumer = APIManagerFactory.getInstance().getAPIConsumer(owner);
Application application = apiConsumer.getApplicationByUUID(applicationId);
boolean applicationUpdated = apiConsumer.updateApplicationOwner(owner, application);
if (applicationUpdated) {
return Response.ok().build();
} else {
RestApiUtil.handleInternalServerError("Error while updating application owner " + applicationId, log);
}
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error while updating application owner " + applicationId, e, log);
}
return null;
}
@Override
public Response applicationsGet(String user, Integer limit, Integer offset, String accept, String ifNoneMatch,
String appTenantDomain, MessageContext messageContext) {
// To store the initial value of the user (specially if it is null or empty)
String givenUser = user;
// if no username provided user associated with access token will be used
if (user == null || StringUtils.isEmpty(user)) {
user = RestApiUtil.getLoggedInUsername();
}
limit = limit != null ? limit : RestApiConstants.PAGINATION_LIMIT_DEFAULT;
offset = offset != null ? offset : RestApiConstants.PAGINATION_OFFSET_DEFAULT;
ApplicationListDTO applicationListDTO;
try {
Application[] allMatchedApps;
boolean migrationMode = Boolean.getBoolean(RestApiConstants.MIGRATION_MODE);
if (!migrationMode) { // normal non-migration flow
if (!MultitenantUtils.getTenantDomain(user).equals(RestApiUtil.getLoggedInUserTenantDomain())) {
String errorMsg = "User " + user + " is not available for the current tenant domain";
log.error(errorMsg);
return Response.status(Response.Status.FORBIDDEN).entity(errorMsg).build();
}
APIConsumer apiConsumer = APIManagerFactory.getInstance().getAPIConsumer(user);
// If no user is passed, get the applications for the tenant (not only for the user)
if (givenUser == null || StringUtils.isEmpty(givenUser)) {
APIAdmin apiAdmin = new APIAdminImpl();
int tenantId = APIUtil.getTenantId(user);
allMatchedApps = apiAdmin.getApplicationsByTenantIdWithPagination(tenantId, 0, limit, "", "",
APIConstants.APPLICATION_NAME, RestApiConstants.DEFAULT_SORT_ORDER).toArray(new Application[0]);
} else {
allMatchedApps = apiConsumer.getApplicationsByOwner(user);
}
} else { // flow at migration process
if (StringUtils.isEmpty(appTenantDomain)) {
appTenantDomain = MultitenantUtils.getTenantDomain(user);
}
RestApiUtil.handleMigrationSpecificPermissionViolations(appTenantDomain,
RestApiUtil.getLoggedInUsername());
APIAdmin apiAdmin = new APIAdminImpl();
allMatchedApps = apiAdmin.getAllApplicationsOfTenantForMigration(appTenantDomain);
}
//allMatchedApps are already sorted to application name
applicationListDTO = ApplicationMappingUtil.fromApplicationsToDTO(allMatchedApps, limit, offset);
ApplicationMappingUtil.setPaginationParams(applicationListDTO, limit, offset, allMatchedApps.length);
return Response.ok().entity(applicationListDTO).build();
} catch (APIManagementException e) {
RestApiUtil.handleInternalServerError("Error while retrieving applications of the user " + user, e, log);
}
return null;
}
}
| apache-2.0 |
mdavid/SuperSocket | mainline/Common/Logging/Log4NetLog.cs | 14921 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace SuperSocket.Common.Logging
{
/// <summary>
/// Log4NetLog
/// </summary>
public class Log4NetLog : ILog
{
private log4net.ILog m_Log;
/// <summary>
/// Initializes a new instance of the <see cref="Log4NetLog"/> class.
/// </summary>
/// <param name="log">The log.</param>
public Log4NetLog(log4net.ILog log)
{
if (log == null)
throw new ArgumentNullException("log");
m_Log = log;
}
/// <summary>
/// Gets a value indicating whether this instance is debug enabled.
/// </summary>
/// <value>
/// <c>true</c> if this instance is debug enabled; otherwise, <c>false</c>.
/// </value>
public bool IsDebugEnabled
{
get { return m_Log.IsDebugEnabled; }
}
/// <summary>
/// Gets a value indicating whether this instance is error enabled.
/// </summary>
/// <value>
/// <c>true</c> if this instance is error enabled; otherwise, <c>false</c>.
/// </value>
public bool IsErrorEnabled
{
get { return m_Log.IsErrorEnabled; }
}
/// <summary>
/// Gets a value indicating whether this instance is fatal enabled.
/// </summary>
/// <value>
/// <c>true</c> if this instance is fatal enabled; otherwise, <c>false</c>.
/// </value>
public bool IsFatalEnabled
{
get { return m_Log.IsFatalEnabled; }
}
/// <summary>
/// Gets a value indicating whether this instance is info enabled.
/// </summary>
/// <value>
/// <c>true</c> if this instance is info enabled; otherwise, <c>false</c>.
/// </value>
public bool IsInfoEnabled
{
get { return m_Log.IsInfoEnabled; }
}
/// <summary>
/// Gets a value indicating whether this instance is warn enabled.
/// </summary>
/// <value>
/// <c>true</c> if this instance is warn enabled; otherwise, <c>false</c>.
/// </value>
public bool IsWarnEnabled
{
get { return m_Log.IsWarnEnabled; }
}
/// <summary>
/// Logs the debug message.
/// </summary>
/// <param name="message">The message.</param>
public void Debug(object message)
{
m_Log.Debug(message);
}
/// <summary>
/// Logs the debug message.
/// </summary>
/// <param name="message">The message.</param>
/// <param name="exception">The exception.</param>
public void Debug(object message, Exception exception)
{
m_Log.Debug(message, exception);
}
/// <summary>
/// Logs the debug message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
public void DebugFormat(string format, object arg0)
{
m_Log.DebugFormat(format, arg0);
}
/// <summary>
/// Logs the debug message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void DebugFormat(string format, params object[] args)
{
m_Log.DebugFormat(format, args);
}
/// <summary>
/// Logs the debug message.
/// </summary>
/// <param name="provider">The provider.</param>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void DebugFormat(IFormatProvider provider, string format, params object[] args)
{
m_Log.DebugFormat(provider, format, args);
}
/// <summary>
/// Logs the debug message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
public void DebugFormat(string format, object arg0, object arg1)
{
m_Log.DebugFormat(format, arg0, arg1);
}
/// <summary>
/// Logs the debug message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
/// <param name="arg2">The arg2.</param>
public void DebugFormat(string format, object arg0, object arg1, object arg2)
{
m_Log.DebugFormat(format, arg0, arg1, arg2);
}
/// <summary>
/// Logs the error message.
/// </summary>
/// <param name="message">The message.</param>
public void Error(object message)
{
m_Log.Error(message);
}
/// <summary>
/// Logs the error message.
/// </summary>
/// <param name="message">The message.</param>
/// <param name="exception">The exception.</param>
public void Error(object message, Exception exception)
{
m_Log.Error(message, exception);
}
/// <summary>
/// Logs the error message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
public void ErrorFormat(string format, object arg0)
{
m_Log.ErrorFormat(format, arg0);
}
/// <summary>
/// Logs the error message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void ErrorFormat(string format, params object[] args)
{
m_Log.ErrorFormat(format, args);
}
/// <summary>
/// Logs the error message.
/// </summary>
/// <param name="provider">The provider.</param>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void ErrorFormat(IFormatProvider provider, string format, params object[] args)
{
m_Log.ErrorFormat(provider, format, args);
}
/// <summary>
/// Logs the error message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
public void ErrorFormat(string format, object arg0, object arg1)
{
m_Log.ErrorFormat(format, arg0, arg1);
}
/// <summary>
/// Logs the error message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
/// <param name="arg2">The arg2.</param>
public void ErrorFormat(string format, object arg0, object arg1, object arg2)
{
m_Log.ErrorFormat(format, arg0, arg1, arg2);
}
/// <summary>
/// Logs the fatal error message.
/// </summary>
/// <param name="message">The message.</param>
public void Fatal(object message)
{
m_Log.Fatal(message);
}
/// <summary>
/// Logs the fatal error message.
/// </summary>
/// <param name="message">The message.</param>
/// <param name="exception">The exception.</param>
public void Fatal(object message, Exception exception)
{
m_Log.Fatal(message, exception);
}
/// <summary>
/// Logs the fatal error message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
public void FatalFormat(string format, object arg0)
{
m_Log.FatalFormat(format, arg0);
}
/// <summary>
/// Logs the fatal error message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void FatalFormat(string format, params object[] args)
{
m_Log.FatalFormat(format, args);
}
/// <summary>
/// Logs the fatal error message.
/// </summary>
/// <param name="provider">The provider.</param>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void FatalFormat(IFormatProvider provider, string format, params object[] args)
{
m_Log.FatalFormat(provider, format, args);
}
/// <summary>
/// Logs the fatal error message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
public void FatalFormat(string format, object arg0, object arg1)
{
m_Log.FatalFormat(format, arg0, arg1);
}
/// <summary>
/// Logs the fatal error message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
/// <param name="arg2">The arg2.</param>
public void FatalFormat(string format, object arg0, object arg1, object arg2)
{
m_Log.FatalFormat(format, arg0, arg1, arg2);
}
/// <summary>
/// Logs the info message.
/// </summary>
/// <param name="message">The message.</param>
public void Info(object message)
{
m_Log.Info(message);
}
/// <summary>
/// Logs the info message.
/// </summary>
/// <param name="message">The message.</param>
/// <param name="exception">The exception.</param>
public void Info(object message, Exception exception)
{
m_Log.Info(message, exception);
}
/// <summary>
/// Logs the info message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
public void InfoFormat(string format, object arg0)
{
m_Log.InfoFormat(format, arg0);
}
/// <summary>
/// Logs the info message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void InfoFormat(string format, params object[] args)
{
m_Log.InfoFormat(format, args);
}
/// <summary>
/// Logs the info message.
/// </summary>
/// <param name="provider">The provider.</param>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void InfoFormat(IFormatProvider provider, string format, params object[] args)
{
m_Log.InfoFormat(provider, format, args);
}
/// <summary>
/// Logs the info message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
public void InfoFormat(string format, object arg0, object arg1)
{
m_Log.InfoFormat(format, arg0, arg1);
}
/// <summary>
/// Logs the info message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
/// <param name="arg2">The arg2.</param>
public void InfoFormat(string format, object arg0, object arg1, object arg2)
{
m_Log.InfoFormat(format, arg0, arg1, arg2);
}
/// <summary>
/// Logs the warning message.
/// </summary>
/// <param name="message">The message.</param>
public void Warn(object message)
{
m_Log.Warn(message);
}
/// <summary>
/// Logs the warning message.
/// </summary>
/// <param name="message">The message.</param>
/// <param name="exception">The exception.</param>
public void Warn(object message, Exception exception)
{
m_Log.Warn(message, exception);
}
/// <summary>
/// Logs the warning message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
public void WarnFormat(string format, object arg0)
{
m_Log.WarnFormat(format, arg0);
}
/// <summary>
/// Logs the warning message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void WarnFormat(string format, params object[] args)
{
m_Log.WarnFormat(format, args);
}
/// <summary>
/// Logs the warning message.
/// </summary>
/// <param name="provider">The provider.</param>
/// <param name="format">The format.</param>
/// <param name="args">The args.</param>
public void WarnFormat(IFormatProvider provider, string format, params object[] args)
{
m_Log.WarnFormat(provider, format, args);
}
/// <summary>
/// Logs the warning message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
public void WarnFormat(string format, object arg0, object arg1)
{
m_Log.WarnFormat(format, arg0, arg1);
}
/// <summary>
/// Logs the warning message.
/// </summary>
/// <param name="format">The format.</param>
/// <param name="arg0">The arg0.</param>
/// <param name="arg1">The arg1.</param>
/// <param name="arg2">The arg2.</param>
public void WarnFormat(string format, object arg0, object arg1, object arg2)
{
m_Log.WarnFormat(format, arg0, arg1, arg2);
}
}
}
| apache-2.0 |
tscjavawg/S00Common | src/main/resources/jp/co/tokaisoftware/form/S100A020Form.java | 714 | /*
* Copyright 2004-2008 the Seasar Foundation and the Others.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package jp.co.tokaisoftware.form;
public class S100A020Form {
}
| apache-2.0 |
Meisolsson/GitHubSdk | library/src/main/java/com/meisolsson/githubsdk/model/request/pull_request/EditPullRequest.java | 1710 | /*
* Copyright 2015 Henrik Olsson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.meisolsson.githubsdk.model.request.pull_request;
import android.os.Parcelable;
import android.support.annotation.Nullable;
import com.google.auto.value.AutoValue;
import com.squareup.moshi.JsonAdapter;
import com.squareup.moshi.Moshi;
@AutoValue
public abstract class EditPullRequest implements Parcelable {
@Nullable
public abstract String title();
@Nullable
public abstract String body();
@Nullable
public abstract String state();
@Nullable
public abstract String base();
public static JsonAdapter<EditPullRequest> jsonAdapter(Moshi moshi) {
return new AutoValue_EditPullRequest.MoshiJsonAdapter(moshi);
}
public static Builder builder() {
return new AutoValue_EditPullRequest.Builder();
}
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder title(String title);
public abstract Builder body(String body);
public abstract Builder state(String state);
public abstract Builder base(String base);
public abstract EditPullRequest build();
}
}
| apache-2.0 |
cadeeper/my-spring | src/main/java/indi/nut/myspring/web/servlet/handler/AbstractHandlerMapping.java | 1657 | package indi.nut.myspring.web.servlet.handler;
import indi.nut.myspring.ioc.context.support.ApplicationObjectSupport;
import indi.nut.myspring.util.PathMatcher;
import indi.nut.myspring.util.TruePathMatcher;
import indi.nut.myspring.web.servlet.HandlerExecutionChain;
import indi.nut.myspring.web.servlet.HandlerMapping;
import javax.servlet.http.HttpServletRequest;
/**
* Created by nut on 2016/12/29.
*/
public abstract class AbstractHandlerMapping extends ApplicationObjectSupport implements HandlerMapping {
private Object defaultHandler;
private PathMatcher pathMatcher = new TruePathMatcher();
@Override
public HandlerExecutionChain getHandler(HttpServletRequest request) throws Exception {
Object handler = getHandlerInternal(request);
if(handler == null){
handler = getDefaultHandler();
}
if(handler == null){
return null;
}
HandlerExecutionChain executionChain = getHandlerExecutionChain(handler, request);
return executionChain;
}
protected HandlerExecutionChain getHandlerExecutionChain(Object handler, HttpServletRequest request) {
HandlerExecutionChain executionChain = new HandlerExecutionChain(handler);
//TODO set interceptors
return executionChain;
}
public Object getDefaultHandler() {
return this.defaultHandler;
}
public void setPathMatcher(PathMatcher pathMatcher) {
this.pathMatcher = pathMatcher;
}
public PathMatcher getPathMatcher() {
return pathMatcher;
}
protected abstract Object getHandlerInternal(HttpServletRequest request);
}
| apache-2.0 |
sibvisions/jvx.spring.security | jvx-spring-security-client/src/main/java/com/sibvisions/auth/spring/SpringAuthenticator.java | 1807 | /*
* Copyright 2015 SIB Visions GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*
* History
*
* 04.02.2015 - [TK] - creation
*/
package com.sibvisions.auth.spring;
import java.util.Hashtable;
import javax.rad.application.ILauncher;
import javax.rad.remote.AbstractConnection;
import com.sibvisions.apps.auth.IAuthenticator;
/**
* The <code>SpringAuthenticator</code> will be used for spring security pre authentication.
*
* @author Thomas Krautinger
*/
public class SpringAuthenticator implements IAuthenticator
{
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// Interface implementation
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
* {@inheritDoc}
*/
public Hashtable<String, Object> getCredentials(ILauncher pLauncher)
{
Hashtable<String, Object> htCredentials = new Hashtable<String, Object>();
String sValue = pLauncher.getParameter(ILauncher.PARAM_APPLICATIONNAME);
if (sValue != null)
{
htCredentials.put(APPLICATION, sValue);
}
return htCredentials;
}
/**
* {@inheritDoc}
*/
public void setAuthenticated(ILauncher pLauncher, AbstractConnection pConnection)
{
// Do nothing
}
} // SpringAuthenticator
| apache-2.0 |
j-coll/opencga | opencga-storage/opencga-storage-mongodb/src/main/java/org/opencb/opencga/storage/mongodb/variant/adaptors/VariantMongoDBAdaptor.java | 68984 | /*
* Copyright 2015-2017 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.storage.mongodb.variant.adaptors;
import com.mongodb.BasicDBList;
import com.mongodb.MongoClient;
import com.mongodb.client.model.Filters;
import com.mongodb.client.model.Projections;
import htsjdk.variant.vcf.VCFConstants;
import org.apache.commons.lang3.time.StopWatch;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.bson.json.JsonMode;
import org.bson.json.JsonWriterSettings;
import org.opencb.biodata.models.core.Region;
import org.opencb.biodata.models.variant.StudyEntry;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.biodata.models.variant.avro.AdditionalAttribute;
import org.opencb.biodata.models.variant.avro.VariantAnnotation;
import org.opencb.biodata.models.variant.stats.VariantStats;
import org.opencb.commons.datastore.core.*;
import org.opencb.commons.datastore.mongodb.*;
import org.opencb.opencga.core.response.VariantQueryResult;
import org.opencb.opencga.storage.core.config.StorageConfiguration;
import org.opencb.opencga.storage.core.config.StorageEngineConfiguration;
import org.opencb.opencga.storage.core.metadata.VariantStorageMetadataManager;
import org.opencb.opencga.storage.core.metadata.models.ProjectMetadata;
import org.opencb.opencga.storage.core.metadata.models.StudyMetadata;
import org.opencb.opencga.storage.core.variant.VariantStorageOptions;
import org.opencb.opencga.storage.core.variant.adaptors.*;
import org.opencb.opencga.storage.core.variant.adaptors.iterators.VariantDBIterator;
import org.opencb.opencga.storage.core.variant.annotation.VariantAnnotationManager;
import org.opencb.opencga.storage.core.variant.query.ParsedVariantQuery;
import org.opencb.opencga.storage.core.variant.query.projection.VariantQueryProjection;
import org.opencb.opencga.storage.core.variant.query.projection.VariantQueryProjectionParser;
import org.opencb.opencga.storage.core.variant.stats.VariantStatsWrapper;
import org.opencb.opencga.storage.mongodb.auth.MongoCredentials;
import org.opencb.opencga.storage.mongodb.variant.MongoDBVariantStorageEngine;
import org.opencb.opencga.storage.mongodb.variant.converters.*;
import org.opencb.opencga.storage.mongodb.variant.converters.stage.StageDocumentToVariantConverter;
import org.opencb.opencga.storage.mongodb.variant.converters.trash.DocumentToTrashVariantConverter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.UnknownHostException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import static com.mongodb.client.model.Filters.*;
import static com.mongodb.client.model.Updates.*;
import static org.opencb.commons.datastore.mongodb.MongoDBCollection.*;
import static org.opencb.opencga.storage.core.variant.VariantStorageOptions.LOADED_GENOTYPES;
import static org.opencb.opencga.storage.core.variant.adaptors.VariantField.AdditionalAttributes.GROUP_NAME;
import static org.opencb.opencga.storage.core.variant.adaptors.VariantField.AdditionalAttributes.VARIANT_ID;
import static org.opencb.opencga.storage.core.variant.adaptors.VariantQueryParam.*;
import static org.opencb.opencga.storage.core.variant.query.VariantQueryUtils.*;
import static org.opencb.opencga.storage.mongodb.variant.MongoDBVariantStorageOptions.*;
import static org.opencb.opencga.storage.mongodb.variant.converters.DocumentToStudyVariantEntryConverter.*;
import static org.opencb.opencga.storage.mongodb.variant.search.MongoDBVariantSearchIndexUtils.getSetIndexNotSynchronized;
/**
* @author Ignacio Medina <[email protected]>
* @author Jacobo Coll <[email protected]>
* @author Cristina Yenyxe Gonzalez Garcia <[email protected]>
*/
public class VariantMongoDBAdaptor implements VariantDBAdaptor {
private boolean closeConnection;
private final MongoDataStoreManager mongoManager;
private final MongoDataStore db;
private final String collectionName;
private final MongoDBCollection variantsCollection;
private final StorageConfiguration storageConfiguration;
private final MongoCredentials credentials;
private final VariantMongoDBQueryParser queryParser;
private VariantStorageMetadataManager metadataManager;
private final ObjectMap configuration;
// private CacheManager cacheManager;
private static Logger logger = LoggerFactory.getLogger(VariantMongoDBAdaptor.class);
public static final int CHUNK_SIZE_SMALL = 1000;
public static final int CHUNK_SIZE_BIG = 10000;
// Number of opened dbAdaptors
public static final AtomicInteger NUMBER_INSTANCES = new AtomicInteger(0);
public VariantMongoDBAdaptor(MongoCredentials credentials, String variantsCollectionName,
VariantStorageMetadataManager variantStorageMetadataManager, StorageConfiguration storageConfiguration)
throws UnknownHostException {
this(new MongoDataStoreManager(credentials.getDataStoreServerAddresses()), credentials, variantsCollectionName,
variantStorageMetadataManager, storageConfiguration);
this.closeConnection = true;
}
public VariantMongoDBAdaptor(MongoDataStoreManager mongoManager, MongoCredentials credentials, String variantsCollectionName,
VariantStorageMetadataManager variantStorageMetadataManager,
StorageConfiguration storageConfiguration) throws UnknownHostException {
// MongoDB configuration
this.closeConnection = false;
this.credentials = credentials;
this.mongoManager = mongoManager;
db = mongoManager.get(credentials.getMongoDbName(), credentials.getMongoDBConfiguration());
collectionName = variantsCollectionName;
variantsCollection = db.getCollection(collectionName);
this.metadataManager = variantStorageMetadataManager;
this.storageConfiguration = storageConfiguration;
StorageEngineConfiguration storageEngineConfiguration =
storageConfiguration.getVariantEngine(MongoDBVariantStorageEngine.STORAGE_ENGINE_ID);
this.configuration = storageEngineConfiguration == null || storageEngineConfiguration.getOptions() == null
? new ObjectMap()
: storageEngineConfiguration.getOptions();
queryParser = new VariantMongoDBQueryParser(variantStorageMetadataManager);
NUMBER_INSTANCES.incrementAndGet();
}
public MongoDBCollection getVariantsCollection() {
return variantsCollection;
}
public MongoDBCollection getStageCollection(int studyId) {
String stageCollectionName = configuration.getString(COLLECTION_STAGE.key(), COLLECTION_STAGE.defaultValue());
// Ensure retro-compatibility.
// If a "stage" collection exists, continue using one single stage collection for all the studies.
// Otherwise, build the stage collection name as: 'stage_study_<study-id>'
if (db.getCollectionNames().contains(stageCollectionName)) {
return db.getCollection(stageCollectionName);
} else {
return db.getCollection(stageCollectionName + "_study_" + studyId);
}
}
public MongoDBCollection getStudiesCollection() {
return db.getCollection(configuration.getString(COLLECTION_STUDIES.key(), COLLECTION_STUDIES.defaultValue()));
}
public MongoDBCollection getAnnotationCollection(String name) {
return db.getCollection(getAnnotationCollectionName(name));
}
public String getAnnotationCollectionName(String name) {
ProjectMetadata.VariantAnnotationMetadata saved = getMetadataManager().getProjectMetadata()
.getAnnotation().getSaved(name);
return configuration.getString(COLLECTION_ANNOTATION.key(), COLLECTION_ANNOTATION.defaultValue()) + "_" + saved.getId();
}
public void dropAnnotationCollection(String name) {
String annotationCollectionName = getAnnotationCollectionName(name);
db.dropCollection(annotationCollectionName);
}
private MongoDBCollection getTrashCollection() {
return db.getCollection(configuration.getString(COLLECTION_TRASH.key(), COLLECTION_TRASH.defaultValue()));
}
protected MongoDataStore getDB() {
return db;
}
protected MongoCredentials getCredentials() {
return credentials;
}
/**
* Remove all the variants from the database resulting of executing the query.
*
* @param query Query to be executed in the database
* @param options Query modifiers, accepted values are: include, exclude, limit, skip, sort and count
* @return A DataResult with the number of deleted variants
*/
public DataResult remove(Query query, QueryOptions options) {
Bson mongoQuery = queryParser.parseQuery(query);
logger.debug("Delete to be executed: '{}'", mongoQuery.toString());
return variantsCollection.remove(mongoQuery, options);
}
/**
* Remove the given file from the database with all the samples it has.
*
* @param study The study where the file belong
* @param files The file name to be deleted, it must belong to the study
* @param timestamp Timestamp of the operation
* @param options Query modifiers, accepted values are: include, exclude, limit, skip, sort and count
* @return A DataResult with the file deleted
*/
public DataResult removeFiles(String study, List<String> files, long timestamp, QueryOptions options) {
StudyMetadata studyMetadata = metadataManager.getStudyMetadata(study);
Integer studyId = studyMetadata.getId();
List<Integer> fileIds = metadataManager.getFileIds(studyId, files);
LinkedHashSet<Integer> otherIndexedFiles = metadataManager.getIndexedFiles(studyMetadata.getId());
otherIndexedFiles.removeAll(fileIds);
// First, remove the study entry that only contains the files to remove
if (otherIndexedFiles.isEmpty()) {
// If we are deleting all the files in the study, delete the whole study
return removeStudy(study, timestamp, new QueryOptions("purge", true));
}
// Remove all the study entries that does not contain any of the other indexed files.
// This include studies only with the files to remove and with negated fileIds (overlapped files)
Bson studiesToRemoveQuery = elemMatch(DocumentToVariantConverter.STUDIES_FIELD,
and(
eq(STUDYID_FIELD, studyId),
// in(FILES_FIELD + '.' + FILEID_FIELD, fileIds),
nin(FILES_FIELD + '.' + FILEID_FIELD, otherIndexedFiles)
)
);
removeFilesFromStageCollection(studiesToRemoveQuery, studyId, fileIds);
return removeFilesFromVariantsCollection(studiesToRemoveQuery, studyMetadata, fileIds, timestamp);
}
private void removeFilesFromStageCollection(Bson studiesToRemoveQuery, Integer studyId, List<Integer> fileIds) {
int batchSize = 500;
logger.info("Remove files from stage collection - step 1/3"); // Remove study if only contains removed files
MongoDBCollection stageCollection = getStageCollection(studyId);
int updatedStageDocuments = 0;
try (MongoDBIterator<Document> cursor = getVariantsCollection()
.nativeQuery()
.find(studiesToRemoveQuery, Projections.include("_id"), new QueryOptions(MongoDBCollection.BATCH_SIZE, batchSize))) {
List<String> ids = new ArrayList<>(batchSize);
int i = 0;
while (cursor.hasNext()) {
ids.add(cursor.next().getString("_id"));
Bson updateStage = combine(
pull(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyId.toString()),
unset(studyId.toString()));
if (ids.size() == batchSize || !cursor.hasNext()) {
updatedStageDocuments += stageCollection.update(in("_id", ids), updateStage, new QueryOptions(MULTI, true))
.getNumUpdated();
i++;
logger.debug(i + " : clear stage ids = " + ids);
ids.clear();
}
}
}
List<Bson> studyUpdate = new ArrayList<>(fileIds.size());
logger.info("Remove files from stage collection - step 2/3"); // Other studies
for (Integer fileId : fileIds) {
studyUpdate.add(unset(String.valueOf(studyId) + '.' + fileId));
}
updatedStageDocuments += stageCollection.update(eq(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyId.toString()),
combine(studyUpdate), new QueryOptions(MULTI, true)).getNumUpdated();
logger.info("Remove files from stage collection - step 3/3"); // purge
long removedStageDocuments = removeEmptyVariantsFromStage(studyId);
logger.info("Updated " + updatedStageDocuments + " documents from stage");
logger.info("Removed " + removedStageDocuments + " documents from stage");
}
private DataResult removeFilesFromVariantsCollection(Bson studiesToRemoveQuery, StudyMetadata sm,
List<Integer> fileIds, long timestamp) {
Set<Integer> sampleIds = new HashSet<>();
for (Integer fileId : fileIds) {
sampleIds.addAll(metadataManager.getFileMetadata(sm.getId(), fileId).getSamples());
}
// Update and remove variants from variants collection
int studyId = sm.getId();
logger.info("Remove files from variants collection - step 1/3"); // Remove study if only contains removed files
long updatedVariantsDocuments = removeStudyFromVariants(studyId, studiesToRemoveQuery, timestamp).getNumUpdated();
// Remove also negated fileIds
List<Integer> negatedFileIds = fileIds.stream().map(i -> -i).collect(Collectors.toList());
fileIds.addAll(negatedFileIds);
Bson query;
// If default genotype is not the unknown genotype, we must iterate over all the documents in the study
if (!sm.getAttributes().getString(DEFAULT_GENOTYPE.key()).equals(GenotypeClass.UNKNOWN_GENOTYPE)) {
query = eq(DocumentToVariantConverter.STUDIES_FIELD + '.' + STUDYID_FIELD, studyId);
} else {
query = elemMatch(DocumentToVariantConverter.STUDIES_FIELD,
and(
eq(STUDYID_FIELD, studyId),
in(FILES_FIELD + '.' + FILEID_FIELD, fileIds)
)
);
}
List<Bson> updates = new ArrayList<>();
updates.add(
pull(DocumentToVariantConverter.STUDIES_FIELD + ".$." + FILES_FIELD,
in(FILEID_FIELD, fileIds)));
for (String gt : sm.getAttributes().getAsStringList(LOADED_GENOTYPES.key())) {
updates.add(
pullByFilter(
in(DocumentToVariantConverter.STUDIES_FIELD + ".$." + GENOTYPES_FIELD + '.' + gt, sampleIds)));
}
Bson update = combine(updates);
logger.debug("removeFile: query = " + query.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()));
logger.debug("removeFile: update = " + update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()));
logger.info("Remove files from variants collection - step 2/3"); // Other studies
DataResult result2 = getVariantsCollection().update(query, update, new QueryOptions(MULTI, true));
logger.debug("removeFile: matched = " + result2.getNumMatches());
logger.debug("removeFile: modified = " + result2.getNumUpdated());
logger.info("Remove files from variants collection - step 3/3"); // purge
long removedVariantsDocuments = removeEmptyVariants();
logger.info("Updated " + (updatedVariantsDocuments + result2.getNumUpdated()) + " documents from variants");
logger.info("Removed " + removedVariantsDocuments + " documents from variants");
return result2;
}
/**
* Remove the given study from the database.
*
* @param studyName The study name to delete
* @param timestamp Timestamp of the operation
* @param options Query modifiers, accepted values are: purge
* @return A DataResult with the study deleted
*/
public DataResult removeStudy(String studyName, long timestamp, QueryOptions options) {
if (options == null) {
options = new QueryOptions();
}
Integer studyId = metadataManager.getStudyId(studyName);
Bson query = queryParser.parseQuery(new Query(STUDY.key(), studyId));
boolean purge = options.getBoolean("purge", true);
logger.info("Remove study from variants collection - step 1/" + (purge ? '2' : '1'));
DataResult result = removeStudyFromVariants(studyId, query, timestamp);
if (purge) {
logger.info("Remove study from variants collection - step 2/2");
removeEmptyVariants();
}
Bson eq = eq(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyId.toString());
Bson combine = combine(pull(StageDocumentToVariantConverter.STUDY_FILE_FIELD, studyId.toString()), unset(studyId.toString()));
logger.debug("removeStudy: stage query = " + eq.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()));
logger.debug("removeStudy: stage update = " + combine.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()));
logger.info("Remove study from stage collection - step 1/" + (purge ? '2' : '1'));
getStageCollection(studyId).update(eq, combine, new QueryOptions(MULTI, true));
if (purge) {
logger.info("Remove study from stage collection - step 2/2");
removeEmptyVariantsFromStage(studyId);
}
return result;
}
private DataResult removeStudyFromVariants(int studyId, Bson query, long timestamp) {
// { $pull : { files : { sid : <studyId> } } }
Bson update = combine(
pull(DocumentToVariantConverter.STUDIES_FIELD, eq(STUDYID_FIELD, studyId)),
pull(DocumentToVariantConverter.STATS_FIELD, eq(DocumentToVariantStatsConverter.STUDY_ID, studyId)),
getSetIndexNotSynchronized(timestamp)
);
logger.debug("removeStudy: query = {}", query.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()));
logger.debug("removeStudy: update = {}", update.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()));
DataResult result = variantsCollection.update(query, update, new QueryOptions(MULTI, true));
logger.debug("removeStudy: matched = {}", result.getNumMatches());
logger.debug("removeStudy: modified = {}", result.getNumUpdated());
return result;
}
/**
* Remove empty variants from the variants collection, and move to the trash collection.
*
* @return number of removed variants.
*/
private long removeEmptyVariants() {
long ts = System.currentTimeMillis();
Bson purgeQuery = exists(DocumentToVariantConverter.STUDIES_FIELD + '.' + STUDYID_FIELD, false);
MongoPersistentCursor iterator = new MongoPersistentCursor(variantsCollection, purgeQuery, queryParser.createProjection(
new Query(),
new QueryOptions(QueryOptions.INCLUDE, DocumentToVariantConverter.REQUIRED_FIELDS_SET)), new QueryOptions());
MongoDBCollection trashCollection = getTrashCollection();
trashCollection.createIndex(new Document(DocumentToTrashVariantConverter.TIMESTAMP_FIELD, 1), new ObjectMap());
long deletedDocuments = 0;
int deleteBatchSize = 1000;
List<String> documentsToDelete = new ArrayList<>(deleteBatchSize);
List<Document> documentsToInsert = new ArrayList<>(deleteBatchSize);
while (iterator.hasNext()) {
Document next = iterator.next();
documentsToDelete.add(next.getString("_id"));
next.append(DocumentToTrashVariantConverter.TIMESTAMP_FIELD, ts);
documentsToInsert.add(next);
if (documentsToDelete.size() == deleteBatchSize || !iterator.hasNext()) {
if (documentsToDelete.isEmpty()) {
// Really unlikely, but may happen if the total number of variants to remove was multiple of "deleteBatchSize"
break;
}
// First, update the deletedVariants Collection
List<Bson> queries = documentsToDelete.stream().map(id -> Filters.eq("_id", id)).collect(Collectors.toList());
trashCollection.update(queries, documentsToInsert, new QueryOptions(UPSERT, true).append(REPLACE, true));
// Then, remove the documents from the variants collection
long deletedCount = variantsCollection.remove(and(purgeQuery, in("_id", documentsToDelete)), new QueryOptions(MULTI, true))
.getNumDeleted();
// Check if there were some errors
if (deletedCount != documentsToDelete.size()) {
throw new IllegalStateException("Some variants were not deleted!");
}
deletedDocuments += deletedCount;
documentsToDelete.clear();
documentsToInsert.clear();
}
}
return deletedDocuments;
}
public VariantDBIterator trashedVariants(long timeStamp) {
MongoDBCollection collection = getTrashCollection();
return VariantMongoDBIterator.persistentIterator(
collection,
lte(DocumentToTrashVariantConverter.TIMESTAMP_FIELD, timeStamp),
new Document(),
new QueryOptions(),
new DocumentToTrashVariantConverter());
}
public long cleanTrash(long timeStamp) {
MongoDBCollection collection = getTrashCollection();
// Try to get one variant beyond the ts. If exists, remove by query. Otherwise, remove the whole collection.
QueryOptions queryOptions = new QueryOptions(QueryOptions.LIMIT, 1);
int results = collection.find(gt(DocumentToTrashVariantConverter.TIMESTAMP_FIELD, timeStamp), queryOptions).getNumResults();
if (results > 0) {
return collection.remove(lte(DocumentToTrashVariantConverter.TIMESTAMP_FIELD, timeStamp), null).getNumDeleted();
} else {
long numElements = collection.count().getNumMatches();
db.dropCollection(configuration.getString(COLLECTION_TRASH.key(), COLLECTION_TRASH.defaultValue()));
return numElements;
}
}
private long removeEmptyVariantsFromStage(int studyId) {
Bson purgeQuery = eq(StageDocumentToVariantConverter.STUDY_FILE_FIELD, Collections.emptyList());
return getStageCollection(studyId).remove(purgeQuery, new QueryOptions(MULTI, true)).getNumDeleted();
}
@Override
public VariantQueryResult<Variant> get(ParsedVariantQuery variantQuery, QueryOptions options) {
if (options == null) {
options = new QueryOptions();
} else {
options = new QueryOptions(options);
}
if (options.getBoolean(QueryOptions.COUNT) && options.getInt(QueryOptions.LIMIT, -1) == 0) {
DataResult<Long> count = count(variantQuery);
DataResult<Variant> result = new DataResult<>(count.getTime(), count.getEvents(), 0, Collections.emptyList(), count.first());
return addSamplesMetadataIfRequested(result, variantQuery.getQuery(), options, getMetadataManager());
} else if (!options.getBoolean(QueryOptions.COUNT) && options.getInt(QueryOptions.LIMIT, -1) == 0) {
DataResult<Variant> result = new DataResult<>(0, Collections.emptyList(), 0, Collections.emptyList(), -1);
return addSamplesMetadataIfRequested(result, variantQuery.getQuery(), options, getMetadataManager());
}
VariantQueryProjection variantQueryProjection = variantQuery.getProjection();
Document mongoQuery = queryParser.parseQuery(variantQuery.getQuery());
Document projection = queryParser.createProjection(variantQuery.getQuery(), options, variantQueryProjection);
if (options.getBoolean("explain", false)) {
Document explain = variantsCollection.nativeQuery().explain(mongoQuery, projection, options);
logger.debug("MongoDB Explain = {}", explain.toJson(new JsonWriterSettings(JsonMode.SHELL, true)));
}
DocumentToVariantConverter converter = getDocumentToVariantConverter(variantQuery.getQuery(), variantQueryProjection);
return addSamplesMetadataIfRequested(variantsCollection.find(mongoQuery, projection, converter, options),
variantQuery.getQuery(), options, getMetadataManager());
}
@Override
public VariantQueryResult<Variant> getPhased(String varStr, String studyName, String sampleName, QueryOptions options,
int windowsSize) {
StopWatch watch = StopWatch.createStarted();
Variant variant = new Variant(varStr);
Region region = new Region(variant.getChromosome(), variant.getStart(), variant.getEnd());
Query query = new Query(REGION.key(), region)
.append(REFERENCE.key(), variant.getReference())
.append(ALTERNATE.key(), variant.getAlternate())
.append(STUDY.key(), studyName)
.append(INCLUDE_STUDY.key(), studyName)
.append(INCLUDE_SAMPLE.key(), sampleName);
VariantQueryResult<Variant> queryResult = get(query, new QueryOptions());
variant = queryResult.first();
if (variant != null && !variant.getStudies().isEmpty()) {
StudyEntry studyEntry = variant.getStudies().get(0);
Integer psIdx = studyEntry.getSampleDataKeyPosition(VCFConstants.PHASE_SET_KEY);
if (psIdx != null) {
String ps = studyEntry.getSamples().get(0).getData().get(psIdx);
if (!ps.equals(DocumentToSamplesConverter.UNKNOWN_FIELD)) {
sampleName = studyEntry.getOrderedSamplesName().get(0);
region.setStart(region.getStart() > windowsSize ? region.getStart() - windowsSize : 0);
region.setEnd(region.getEnd() + windowsSize);
query.remove(REFERENCE.key());
query.remove(ALTERNATE.key());
query.remove(INCLUDE_STUDY.key());
query.remove(INCLUDE_SAMPLE.key());
queryResult = get(query, new QueryOptions(QueryOptions.SORT, true));
Iterator<Variant> iterator = queryResult.getResults().iterator();
while (iterator.hasNext()) {
Variant next = iterator.next();
if (!next.getStudies().isEmpty()) {
if (!ps.equals(next.getStudies().get(0).getSampleData(sampleName, VCFConstants.PHASE_SET_KEY))) {
iterator.remove();
}
}
}
queryResult.setNumResults(queryResult.getResults().size());
queryResult.setNumMatches(queryResult.getResults().size());
watch.stop();
queryResult.setTime(((int) watch.getTime()));
return addSamplesMetadataIfRequested(queryResult, query, options, metadataManager);
}
}
}
watch.stop();
return new VariantQueryResult<>(((int) watch.getTime()), 0, 0, null, Collections.emptyList(), null,
MongoDBVariantStorageEngine.STORAGE_ENGINE_ID);
}
@Override
public DataResult<VariantAnnotation> getAnnotation(String name, Query query, QueryOptions options) {
query = query == null ? new Query() : query;
validateAnnotationQuery(query);
options = validateAnnotationQueryOptions(options);
Document mongoQuery = queryParser.parseQuery(query);
Document projection = queryParser.createProjection(query, options);
MongoDBCollection annotationCollection;
if (name.equals(VariantAnnotationManager.CURRENT)) {
annotationCollection = getVariantsCollection();
} else {
annotationCollection = getAnnotationCollection(name);
}
VariantQueryProjection selectVariantElements = VariantQueryProjectionParser.parseVariantQueryFields(
query, new QueryOptions(QueryOptions.INCLUDE, VariantField.ANNOTATION), metadataManager);
DocumentToVariantConverter converter = getDocumentToVariantConverter(new Query(), selectVariantElements);
DataResult<Variant> result = annotationCollection.find(mongoQuery, projection, converter, options);
List<VariantAnnotation> annotations = result.getResults()
.stream()
.map(Variant::getAnnotation)
.filter(Objects::nonNull)
.collect(Collectors.toList());
return new DataResult<>(result.getTime(), result.getEvents(), annotations.size(), annotations, result.getNumMatches());
}
@Override
public DataResult<Long> count(ParsedVariantQuery variantQuery) {
Document mongoQuery = queryParser.parseQuery(variantQuery.getQuery());
DataResult<Long> count = variantsCollection.count(mongoQuery);
count.setResults(Collections.singletonList(count.getNumMatches()));
return count;
}
@Override
public DataResult distinct(Query query, String field) {
String documentPath;
switch (field) {
case "gene":
case "ensemblGene":
documentPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_ENSEMBL_GENE_ID_FIELD;
break;
case "ensemblTranscript":
documentPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_ENSEMBL_TRANSCRIPT_ID_FIELD;
break;
case "ct":
case "consequence_type":
documentPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_SO_ACCESSION_FIELD;
break;
default:
documentPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_GENE_NAME_FIELD;
break;
}
Document mongoQuery = queryParser.parseQuery(query);
return variantsCollection.distinct(documentPath, mongoQuery);
}
@Override
public VariantDBIterator iterator(ParsedVariantQuery variantQuery, QueryOptions options) {
if (options == null) {
options = new QueryOptions();
}
return iteratorFinal(variantQuery, options);
}
private VariantDBIterator iteratorFinal(final ParsedVariantQuery variantQuery, final QueryOptions options) {
VariantQueryProjection variantQueryProjection = variantQuery.getProjection();
Document mongoQuery = queryParser.parseQuery(variantQuery);
Document projection = queryParser.createProjection(variantQuery.getQuery(), options, variantQueryProjection);
DocumentToVariantConverter converter = getDocumentToVariantConverter(variantQuery.getQuery(), variantQueryProjection);
options.putIfAbsent(MongoDBCollection.BATCH_SIZE, 100);
// Short unsorted queries with timeout or limit don't need the persistent cursor.
if (options.containsKey(QueryOptions.TIMEOUT)
|| options.containsKey(QueryOptions.LIMIT)
|| !options.getBoolean(QueryOptions.SORT, false)) {
StopWatch stopWatch = StopWatch.createStarted();
VariantMongoDBIterator dbIterator = new VariantMongoDBIterator(
() -> variantsCollection.nativeQuery().find(mongoQuery, projection, options), converter);
dbIterator.setTimeFetching(dbIterator.getTimeFetching() + stopWatch.getNanoTime());
return dbIterator;
} else {
logger.debug("Using mongodb persistent iterator");
return VariantMongoDBIterator.persistentIterator(variantsCollection, mongoQuery, projection, options, converter);
}
}
public MongoDBIterator<Document> nativeIterator(Query query, QueryOptions options, boolean persistent) {
if (query == null) {
query = new Query();
}
if (options == null) {
options = new QueryOptions();
}
Document mongoQuery = queryParser.parseQuery(query);
Document projection = queryParser.createProjection(query, options);
options.putIfAbsent(MongoDBCollection.BATCH_SIZE, 100);
if (persistent) {
logger.debug("Using mongodb persistent iterator");
return new MongoDBIterator<>(new MongoPersistentCursor(variantsCollection, mongoQuery, projection, options), -1);
} else {
return variantsCollection.nativeQuery().find(mongoQuery, projection, options);
}
}
@Override
public DataResult getFrequency(ParsedVariantQuery query, Region region, int regionIntervalSize) {
// db.variants.aggregate( { $match: { $and: [ {chr: "1"}, {start: {$gt: 251391, $lt: 2701391}} ] }},
// { $group: { _id: { $subtract: [ { $divide: ["$start", 20000] }, { $divide: [{$mod: ["$start", 20000]},
// 20000] } ] },
// totalCount: {$sum: 1}}})
QueryOptions options = new QueryOptions();
// If interval is not provided is set to the value that returns 200 values
if (regionIntervalSize <= 0) {
// regionIntervalSize = options.getInt("interval", (region.getEnd() - region.getStart()) / 200);
regionIntervalSize = (region.getEnd() - region.getStart()) / 200;
}
Document start = new Document("$gt", region.getStart());
start.append("$lt", region.getEnd());
BasicDBList andArr = new BasicDBList();
andArr.add(new Document(DocumentToVariantConverter.CHROMOSOME_FIELD, region.getChromosome()));
andArr.add(new Document(DocumentToVariantConverter.START_FIELD, start));
// Parsing the rest of options
Document mongoQuery = queryParser.parseQuery(query);
if (!mongoQuery.isEmpty()) {
andArr.add(mongoQuery);
}
Document match = new Document("$match", new Document("$and", andArr));
// qb.and("_at.chunkIds").in(chunkIds);
// qb.and(DBObjectToVariantConverter.END_FIELD).greaterThanEquals(region.getStart());
// qb.and(DBObjectToVariantConverter.START_FIELD).lessThanEquals(region.getEnd());
//
// List<String> chunkIds = getChunkIds(region);
// DBObject regionObject = new Document("_at.chunkIds", new Document("$in", chunkIds))
// .append(DBObjectToVariantConverter.END_FIELD, new Document("$gte", region.getStart()))
// .append(DBObjectToVariantConverter.START_FIELD, new Document("$lte", region.getEnd()));
BasicDBList divide1 = new BasicDBList();
divide1.add("$start");
divide1.add(regionIntervalSize);
BasicDBList divide2 = new BasicDBList();
divide2.add(new Document("$mod", divide1));
divide2.add(regionIntervalSize);
BasicDBList subtractList = new BasicDBList();
subtractList.add(new Document("$divide", divide1));
subtractList.add(new Document("$divide", divide2));
Document subtract = new Document("$subtract", subtractList);
Document totalCount = new Document("$sum", 1);
Document g = new Document("_id", subtract);
g.append("features_count", totalCount);
Document group = new Document("$group", g);
Document sort = new Document("$sort", new Document("_id", 1));
// logger.info("getAllIntervalFrequencies - (>·_·)>");
// System.out.println(options.toString());
// System.out.println(match.toString());
// System.out.println(group.toString());
// System.out.println(sort.toString());
long dbTimeStart = System.currentTimeMillis();
DataResult output = variantsCollection.aggregate(/*"$histogram", */Arrays.asList(match, group, sort), options);
long dbTimeEnd = System.currentTimeMillis();
Map<Long, Document> ids = new HashMap<>();
// Create DBObject for intervals with features inside them
for (Document intervalObj : (List<Document>) output.getResults()) {
Long auxId = Math.round((Double) intervalObj.get("_id")); //is double
Document intervalVisited = ids.get(auxId);
if (intervalVisited == null) {
intervalObj.put("_id", auxId);
intervalObj.put("start", queryParser.getChunkStart(auxId.intValue(), regionIntervalSize));
intervalObj.put("end", queryParser.getChunkEnd(auxId.intValue(), regionIntervalSize));
intervalObj.put("chromosome", region.getChromosome());
intervalObj.put("features_count", Math.log((int) intervalObj.get("features_count")));
ids.put(auxId, intervalObj);
} else {
Double sum = (Double) intervalVisited.get("features_count") + Math.log((int) intervalObj.get("features_count"));
intervalVisited.put("features_count", sum.intValue());
}
}
// Create DBObject for intervals without features inside them
BasicDBList resultList = new BasicDBList();
int firstChunkId = queryParser.getChunkId(region.getStart(), regionIntervalSize);
int lastChunkId = queryParser.getChunkId(region.getEnd(), regionIntervalSize);
Document intervalObj;
for (int chunkId = firstChunkId; chunkId <= lastChunkId; chunkId++) {
intervalObj = ids.get((long) chunkId);
if (intervalObj == null) {
intervalObj = new Document();
intervalObj.put("_id", chunkId);
intervalObj.put("start", queryParser.getChunkStart(chunkId, regionIntervalSize));
intervalObj.put("end", queryParser.getChunkEnd(chunkId, regionIntervalSize));
intervalObj.put("chromosome", region.getChromosome());
intervalObj.put("features_count", 0);
}
resultList.add(intervalObj);
}
return new DataResult(((Long) (dbTimeEnd - dbTimeStart)).intValue(), Collections.emptyList(), resultList.size(), resultList,
resultList.size());
}
@Override
public DataResult rank(Query query, String field, int numResults, boolean asc) {
QueryOptions options = new QueryOptions();
options.put("limit", numResults);
options.put("count", true);
options.put("order", (asc) ? 1 : -1); // MongoDB: 1 = ascending, -1 = descending
return groupBy(query, field, options);
}
@Override
public DataResult groupBy(Query query, String field, QueryOptions options) {
if (options == null) {
options = new QueryOptions();
} else {
options = new QueryOptions(options); // Copy given QueryOptions.
}
String documentPath;
String unwindPath;
int numUnwinds = 2;
switch (field) {
case "gene":
case "ensemblGene":
documentPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_ENSEMBL_GENE_ID_FIELD;
unwindPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD;
break;
case "ct":
case "consequence_type":
documentPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_SO_ACCESSION_FIELD;
unwindPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD;
numUnwinds = 3;
break;
default:
documentPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_GENE_NAME_FIELD;
unwindPath = DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD;
break;
}
Document mongoQuery = queryParser.parseQuery(query);
boolean count = options.getBoolean("count", false);
int order = options.getInt("order", -1);
Document project;
Document projectAndCount;
if (count) {
project = new Document("$project", new Document("field", "$" + documentPath));
projectAndCount = new Document("$project", new Document()
.append("id", "$_id")
.append("_id", 0)
.append("count", new Document("$size", "$values")));
} else {
project = new Document("$project", new Document()
.append("field", "$" + documentPath)
//.append("_id._id", "$_id")
.append("_id.start", "$" + DocumentToVariantConverter.START_FIELD)
.append("_id.end", "$" + DocumentToVariantConverter.END_FIELD)
.append("_id.chromosome", "$" + DocumentToVariantConverter.CHROMOSOME_FIELD)
.append("_id.alternate", "$" + DocumentToVariantConverter.ALTERNATE_FIELD)
.append("_id.reference", "$" + DocumentToVariantConverter.REFERENCE_FIELD)
.append("_id.ids", "$" + DocumentToVariantConverter.IDS_FIELD));
projectAndCount = new Document("$project", new Document()
.append("id", "$_id")
.append("_id", 0)
.append("values", "$values")
.append("count", new Document("$size", "$values")));
}
Document match = new Document("$match", mongoQuery);
Document unwindField = new Document("$unwind", "$field");
Document notNull = new Document("$match", new Document("field", new Document("$ne", null)));
Document groupAndAddToSet = new Document("$group", new Document("_id", "$field")
.append("values", new Document("$addToSet", "$_id"))); // sum, count, avg, ...?
Document sort = new Document("$sort", new Document("count", order)); // 1 = ascending, -1 = descending
int skip = options.getInt(QueryOptions.SKIP, -1);
Document skipStep = skip > 0 ? new Document("$skip", skip) : null;
int limit = options.getInt(QueryOptions.LIMIT, -1) > 0 ? options.getInt(QueryOptions.LIMIT) : 10;
options.remove(QueryOptions.LIMIT); // Remove limit or Datastore will add a new limit step
Document limitStep = new Document("$limit", limit);
List<Bson> operations = new LinkedList<>();
operations.add(match);
operations.add(project);
for (int i = 0; i < numUnwinds; i++) {
operations.add(unwindField);
}
operations.add(notNull);
operations.add(groupAndAddToSet);
operations.add(projectAndCount);
operations.add(sort);
if (skipStep != null) {
operations.add(skipStep);
}
operations.add(limitStep);
logger.debug("db." + collectionName + ".aggregate( " + operations + " )");
DataResult<Document> queryResult = variantsCollection.aggregate(operations, options);
// List<Map<String, Object>> results = new ArrayList<>(queryResult.getResults().size());
// results.addAll(queryResult.getResults().stream().map(dbObject -> new ObjectMap("id", dbObject.get("_id")).append("count",
// dbObject.get("count"))).collect(Collectors.toList()));
return queryResult;
}
@Override
public DataResult groupBy(Query query, List<String> fields, QueryOptions options) {
String warningMsg = "Unimplemented VariantMongoDBAdaptor::groupBy list of fields. Using field[0] : '" + fields.get(0) + "'";
logger.warn(warningMsg);
DataResult queryResult = groupBy(query, fields.get(0), options);
queryResult.setEvents(Collections.singletonList(new Event(Event.Type.WARNING, warningMsg)));
return queryResult;
}
@Override
public DataResult updateStats(List<VariantStatsWrapper> variantStatsWrappers, String studyName, long timestamp, QueryOptions options) {
StudyMetadata sm = metadataManager.getStudyMetadata(studyName);
return updateStats(variantStatsWrappers, sm, timestamp, options);
}
@Override
public DataResult updateStats(List<VariantStatsWrapper> variantStatsWrappers, StudyMetadata studyMetadata,
long timestamp, QueryOptions options) {
// MongoCollection<Document> coll = db.getDb().getCollection(collectionName);
// BulkWriteOperation pullBuilder = coll.initializeUnorderedBulkOperation();
// BulkWriteOperation pushBuilder = coll.initializeUnorderedBulkOperation();
List<Bson> pullQueriesBulkList = new LinkedList<>();
List<Bson> pullUpdatesBulkList = new LinkedList<>();
List<Bson> pushQueriesBulkList = new LinkedList<>();
List<Bson> pushUpdatesBulkList = new LinkedList<>();
long start = System.nanoTime();
DocumentToVariantStatsConverter statsConverter = new DocumentToVariantStatsConverter(metadataManager);
// VariantSource variantSource = queryOptions.get(VariantStorageEngine.VARIANT_SOURCE, VariantSource.class);
DocumentToVariantConverter variantConverter = getDocumentToVariantConverter(new Query(), options);
boolean overwrite = options.getBoolean(VariantStorageOptions.STATS_OVERWRITE.key(), false);
// TODO make unset of 'st' if already present?
for (VariantStatsWrapper wrapper : variantStatsWrappers) {
List<VariantStats> cohortStats = wrapper.getCohortStats();
if (cohortStats.isEmpty()) {
continue;
}
List<Document> cohorts = statsConverter.convertCohortsToStorageType(cohortStats, studyMetadata.getId());
// remove when we remove fileId
// List cohorts = statsConverter.convertCohortsToStorageType(cohortStats, variantSource.getStudyId()); // TODO use when we
// remove fileId
// add cohorts, overwriting old values if that cid, fid and sid already exists: remove and then add
// db.variants.update(
// {_id:<id>},
// {$pull:{st:{cid:{$in:["Cohort 1","cohort 2"]}, fid:{$in:["file 1", "file 2"]}, sid:{$in:["study 1", "study 2"]}}}}
// )
// db.variants.update(
// {_id:<id>},
// {$push:{st:{$each: [{cid:"Cohort 1", fid:"file 1", ... , defaultValue:3},{cid:"Cohort 2", ... , defaultValue:3}] }}}
// )
if (!cohorts.isEmpty()) {
String id = variantConverter.buildStorageId(new Variant(wrapper.getChromosome(), wrapper.getStart(), wrapper.getEnd(),
wrapper.getReference(), wrapper.getAlternate()).setSv(wrapper.getSv()));
Document find = new Document("_id", id);
if (overwrite) {
List<Document> idsList = new ArrayList<>(cohorts.size());
for (Document cohort : cohorts) {
Document ids = new Document()
.append(DocumentToVariantStatsConverter.COHORT_ID, cohort.get(DocumentToVariantStatsConverter.COHORT_ID))
.append(DocumentToVariantStatsConverter.STUDY_ID, cohort.get(DocumentToVariantStatsConverter.STUDY_ID));
idsList.add(ids);
}
Document pull = new Document("$pull",
new Document(DocumentToVariantConverter.STATS_FIELD,
new Document("$or", idsList)));
pullQueriesBulkList.add(find);
pullUpdatesBulkList.add(pull);
}
Bson push = combine(pushEach(DocumentToVariantConverter.STATS_FIELD, cohorts), getSetIndexNotSynchronized(timestamp));
pushQueriesBulkList.add(find);
pushUpdatesBulkList.add(push);
}
}
// TODO handle if the variant didn't had that studyId in the files array
// TODO check the substitution is done right if the stats are already present
if (overwrite) {
variantsCollection.update(pullQueriesBulkList, pullUpdatesBulkList, new QueryOptions());
}
DataResult writeResult = variantsCollection.update(pushQueriesBulkList, pushUpdatesBulkList, new QueryOptions());
if (writeResult.getNumMatches() != pushQueriesBulkList.size()) {
logger.warn("Could not update stats from some variants: {} != {}, {} non loaded stats", writeResult.getNumMatches(),
pushQueriesBulkList.size(), (pushQueriesBulkList.size() - writeResult.getNumMatches()));
}
return writeResult;
}
public DataResult removeStats(String studyName, String cohortName, QueryOptions options) {
StudyMetadata sm = metadataManager.getStudyMetadata(studyName);
int cohortId = metadataManager.getCohortId(sm.getId(), cohortName);
// { st : { $elemMatch : { sid : <studyId>, cid : <cohortId> } } }
Document query = new Document(DocumentToVariantConverter.STATS_FIELD,
new Document("$elemMatch",
new Document(DocumentToVariantStatsConverter.STUDY_ID, sm.getId())
.append(DocumentToVariantStatsConverter.COHORT_ID, cohortId)));
// { $pull : { st : { sid : <studyId>, cid : <cohortId> } } }
Document update = new Document(
"$pull",
new Document(DocumentToVariantConverter.STATS_FIELD,
new Document(DocumentToVariantStatsConverter.STUDY_ID, sm.getId())
.append(DocumentToVariantStatsConverter.COHORT_ID, cohortId)
)
);
logger.debug("deleteStats: query = {}", query);
logger.debug("deleteStats: update = {}", update);
return variantsCollection.update(query, update, new QueryOptions(MULTI, true));
}
@Override
public DataResult updateAnnotations(List<VariantAnnotation> variantAnnotations, long timestamp, QueryOptions queryOptions) {
List<Bson> queries = new LinkedList<>();
List<Bson> updates = new LinkedList<>();
StopWatch watch = StopWatch.createStarted();
DocumentToVariantConverter variantConverter = getDocumentToVariantConverter(new Query(), queryOptions);
for (VariantAnnotation variantAnnotation : variantAnnotations) {
String id;
if (variantAnnotation.getAdditionalAttributes() != null
&& variantAnnotation.getAdditionalAttributes().containsKey(GROUP_NAME.key())) {
String variantString = variantAnnotation.getAdditionalAttributes()
.get(GROUP_NAME.key())
.getAttribute()
.get(VARIANT_ID.key());
id = variantConverter.buildStorageId(new Variant(variantString));
} else {
id = variantConverter.buildStorageId(variantAnnotation.getChromosome(), variantAnnotation.getStart(),
variantAnnotation.getReference(), variantAnnotation.getAlternate());
}
Document find = new Document("_id", id);
int currentAnnotationId = getMetadataManager().getProjectMetadata().getAnnotation().getCurrent().getId();
DocumentToVariantAnnotationConverter converter = new DocumentToVariantAnnotationConverter(currentAnnotationId);
Document convertedVariantAnnotation = converter.convertToStorageType(variantAnnotation);
Bson update = combine(
set(DocumentToVariantConverter.ANNOTATION_FIELD + ".0", convertedVariantAnnotation),
getSetIndexNotSynchronized(timestamp));
queries.add(find);
updates.add(update);
}
return variantsCollection.update(queries, updates, null);
}
@Override
public DataResult updateCustomAnnotations(Query query, String name, AdditionalAttribute attribute, long timeStamp,
QueryOptions options) {
Document queryDocument = queryParser.parseQuery(query);
Document updateDocument = DocumentToVariantAnnotationConverter.convertToStorageType(attribute);
return variantsCollection.update(queryDocument,
combine(set(DocumentToVariantConverter.CUSTOM_ANNOTATION_FIELD + '.' + name, updateDocument),
getSetIndexNotSynchronized(timeStamp)),
new QueryOptions(MULTI, true));
}
public DataResult removeAnnotation(String annotationId, Query query, QueryOptions queryOptions) {
Document mongoQuery = queryParser.parseQuery(query);
logger.debug("deleteAnnotation: query = {}", mongoQuery);
Document update = new Document("$set", new Document(DocumentToVariantConverter.ANNOTATION_FIELD + ".0", null));
logger.debug("deleteAnnotation: update = {}", update);
return variantsCollection.update(mongoQuery, update, new QueryOptions(MULTI, true));
}
@Override
public void close() throws IOException {
if (closeConnection) {
mongoManager.close();
}
metadataManager.close();
NUMBER_INSTANCES.decrementAndGet();
}
private DocumentToVariantConverter getDocumentToVariantConverter(Query query, QueryOptions options) {
return getDocumentToVariantConverter(query, VariantQueryProjectionParser.parseVariantQueryFields(query, options, metadataManager));
}
private DocumentToVariantConverter getDocumentToVariantConverter(Query query, VariantQueryProjection selectVariantElements) {
List<Integer> returnedStudies = selectVariantElements.getStudyIds();
DocumentToSamplesConverter samplesConverter;
samplesConverter = new DocumentToSamplesConverter(metadataManager, selectVariantElements);
samplesConverter.setSampleDataKeys(getIncludeSampleData(query));
samplesConverter.setIncludeSampleId(query.getBoolean(INCLUDE_SAMPLE_ID.key()));
if (query.containsKey(UNKNOWN_GENOTYPE.key())) {
samplesConverter.setUnknownGenotype(query.getString(UNKNOWN_GENOTYPE.key()));
}
DocumentToStudyVariantEntryConverter studyEntryConverter;
studyEntryConverter = new DocumentToStudyVariantEntryConverter(false, selectVariantElements.getFiles(), samplesConverter);
studyEntryConverter.setMetadataManager(metadataManager);
ProjectMetadata projectMetadata = getMetadataManager().getProjectMetadata();
Map<Integer, String> annotationIds;
if (projectMetadata != null) {
annotationIds = projectMetadata.getAnnotation().getSaved()
.stream()
.collect(Collectors.toMap(
ProjectMetadata.VariantAnnotationMetadata::getId,
ProjectMetadata.VariantAnnotationMetadata::getName));
ProjectMetadata.VariantAnnotationMetadata current = projectMetadata.getAnnotation().getCurrent();
if (current != null) {
annotationIds.put(current.getId(), current.getName());
}
} else {
annotationIds = Collections.emptyMap();
}
return new DocumentToVariantConverter(studyEntryConverter,
new DocumentToVariantStatsConverter(metadataManager), returnedStudies, annotationIds);
}
public void createIndexes(QueryOptions options) {
createIndexes(options, variantsCollection);
}
/**
* Create missing indexes on the given VariantsCollection.
* Variant indices
* - ChunkID
* - Chromosome + start + end
* - IDs
* <p>
* Study indices
* - StudyId
* - FileId
* <p>
* Stats indices
* - StatsMaf
* - StatsMgf
* <p>
* Annotation indices
* - XRef.id
* - ConsequenceType.so
* - _gn_so : SPARSE
* - PopulationFrequency Study + Population + AlternateFrequency : SPARSE
* - Clinical.Clinvar.clinicalSignificance : SPARSE
* ConservedRegionScore
* - phastCons.score
* - phylop.score
* - gerp.score
* FunctionalScore
* - cadd_scaled
* - cadd_raw
* - Drugs.name : SPARSE
* ProteinSubstitution
* - polyphen.score : SPARSE
* - polyphen.description : SPARSE
* - sift.score : SPARSE
* - sift.description : SPARSE
* - ProteinVariantAnnotation.keywords : SPARSE
* - TranscriptAnnotationFlags : SPARSE
* SearchIndex
* - _index.ts
*
* @param options Unused Options.
* @param variantsCollection MongoDBCollection
*/
public static void createIndexes(QueryOptions options, MongoDBCollection variantsCollection) {
logger.info("Start creating indexes");
ObjectMap onBackground = new ObjectMap(MongoDBCollection.BACKGROUND, true);
ObjectMap onBackgroundSparse = new ObjectMap(MongoDBCollection.BACKGROUND, true).append(MongoDBCollection.SPARSE, true);
// Variant indices
////////////////
variantsCollection.createIndex(new Document(DocumentToVariantConverter.AT_FIELD + '.'
+ DocumentToVariantConverter.CHUNK_IDS_FIELD, 1), onBackground);
variantsCollection.createIndex(new Document(DocumentToVariantConverter.CHROMOSOME_FIELD, 1)
.append(DocumentToVariantConverter.START_FIELD, 1)
.append(DocumentToVariantConverter.END_FIELD, 1), onBackground);
variantsCollection.createIndex(new Document(DocumentToVariantConverter.IDS_FIELD, 1), onBackground);
// Study indices
////////////////
variantsCollection.createIndex(
new Document(DocumentToVariantConverter.STUDIES_FIELD + '.' + STUDYID_FIELD, 1), onBackground);
variantsCollection.createIndex(
new Document(DocumentToVariantConverter.STUDIES_FIELD + '.' + FILES_FIELD + '.' + FILEID_FIELD, 1), onBackground);
// Stats indices
////////////////
variantsCollection.createIndex(new Document(DocumentToVariantConverter.STATS_FIELD + '.' + DocumentToVariantStatsConverter
.MAF_FIELD, 1), onBackground);
variantsCollection.createIndex(new Document(DocumentToVariantConverter.STATS_FIELD + '.' + DocumentToVariantStatsConverter
.MGF_FIELD, 1), onBackground);
// Annotation indices
////////////////
// XRefs.id
variantsCollection.createIndex(new Document()
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.XREFS_FIELD
+ '.' + DocumentToVariantAnnotationConverter.XREF_ID_FIELD, 1),
onBackground);
// ConsequenceType.so
variantsCollection.createIndex(new Document()
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_SO_ACCESSION_FIELD, 1),
onBackground);
// _gn_so : SPARSE
variantsCollection.createIndex(new Document()
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.GENE_SO_FIELD, 1),
onBackgroundSparse);
// Population frequency : SPARSE
variantsCollection.createIndex(new Document()
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.POPULATION_FREQUENCIES_FIELD
+ '.' + DocumentToVariantAnnotationConverter.POPULATION_FREQUENCY_STUDY_FIELD, 1)
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.POPULATION_FREQUENCIES_FIELD
+ '.' + DocumentToVariantAnnotationConverter.POPULATION_FREQUENCY_POP_FIELD, 1)
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.POPULATION_FREQUENCIES_FIELD
+ '.' + DocumentToVariantAnnotationConverter.POPULATION_FREQUENCY_ALTERNATE_FREQUENCY_FIELD, 1),
new ObjectMap(onBackgroundSparse).append(NAME, "pop_freq"));
// Clinical clinvar : SPARSE
variantsCollection.createIndex(new Document()
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CLINICAL_DATA_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CLINICAL_CLINVAR_FIELD
+ ".clinicalSignificance", 1),
new ObjectMap(onBackgroundSparse).append(NAME, "clinvar"));
// Conserved region score (phastCons, phylop, gerp)
variantsCollection.createIndex(new Document(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSERVED_REGION_GERP_FIELD
+ '.' + DocumentToVariantAnnotationConverter.SCORE_SCORE_FIELD, 1),
onBackground);
variantsCollection.createIndex(new Document(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSERVED_REGION_PHYLOP_FIELD
+ '.' + DocumentToVariantAnnotationConverter.SCORE_SCORE_FIELD, 1),
onBackground);
variantsCollection.createIndex(new Document(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSERVED_REGION_PHASTCONS_FIELD
+ '.' + DocumentToVariantAnnotationConverter.SCORE_SCORE_FIELD, 1),
onBackground);
// Functional score (cadd_scaled, cadd_raw)
variantsCollection.createIndex(new Document(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.FUNCTIONAL_CADD_SCALED_FIELD
+ '.' + DocumentToVariantAnnotationConverter.SCORE_SCORE_FIELD, 1),
onBackground);
variantsCollection.createIndex(new Document(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.FUNCTIONAL_CADD_RAW_FIELD
+ '.' + DocumentToVariantAnnotationConverter.SCORE_SCORE_FIELD, 1),
onBackground);
// Drugs : SPARSE
variantsCollection.createIndex(new Document()
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.DRUG_FIELD
+ '.' + DocumentToVariantAnnotationConverter.DRUG_NAME_FIELD, 1),
onBackgroundSparse);
// Protein substitution score (polyphen , sift) : SPARSE
variantsCollection.createIndex(new Document(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_PROTEIN_POLYPHEN_FIELD
+ '.' + DocumentToVariantAnnotationConverter.SCORE_SCORE_FIELD, 1),
onBackgroundSparse);
variantsCollection.createIndex(new Document(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_PROTEIN_SIFT_FIELD
+ '.' + DocumentToVariantAnnotationConverter.SCORE_SCORE_FIELD, 1),
onBackgroundSparse);
// Protein substitution score description (polyphen , sift) : SPARSE
variantsCollection.createIndex(new Document(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_PROTEIN_POLYPHEN_FIELD
+ '.' + DocumentToVariantAnnotationConverter.SCORE_DESCRIPTION_FIELD, 1),
onBackgroundSparse);
variantsCollection.createIndex(new Document(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_PROTEIN_SIFT_FIELD
+ '.' + DocumentToVariantAnnotationConverter.SCORE_DESCRIPTION_FIELD, 1),
onBackgroundSparse);
// Protein Keywords : SPARSE
variantsCollection.createIndex(new Document()
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_PROTEIN_KEYWORDS, 1),
onBackgroundSparse);
// TranscriptAnnotationFlags : SPARSE
variantsCollection.createIndex(new Document()
.append(DocumentToVariantConverter.ANNOTATION_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CONSEQUENCE_TYPE_FIELD
+ '.' + DocumentToVariantAnnotationConverter.CT_TRANSCRIPT_ANNOT_FLAGS, 1),
onBackgroundSparse);
// _index.ts
variantsCollection.createIndex(new Document()
.append(DocumentToVariantConverter.INDEX_FIELD + '.' + DocumentToVariantConverter.INDEX_TIMESTAMP_FIELD, 1),
onBackground);
logger.debug("sent order to create indices");
}
@Override
public VariantStorageMetadataManager getMetadataManager() {
return metadataManager;
}
@Override
public void setVariantStorageMetadataManager(VariantStorageMetadataManager variantStorageMetadataManager) {
this.metadataManager = variantStorageMetadataManager;
}
}
| apache-2.0 |
aliayhan/todomanager | src/main/java/com/todomanager/package-info.java | 133 | /**
* This is the root package of the demo application "ToDo Manager".
*
* @author Ayhan Dardagan
*
*/
package com.todomanager; | apache-2.0 |
applyke/zebro | module/Application/src/Application/Entity/Issue.php | 2179 | <?php
namespace Application\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity(repositoryClass="Application\Repository\IssueRepository"))
* @ORM\HasLifecycleCallbacks
* @ORM\Table(name="`issue`")
*/
class Issue extends EntityAbstract
{
/**
* @ORM\Id
* @ORM\GeneratedValue(strategy="AUTO")
* @ORM\Column(type="integer")
*/
protected $id;
/**
* @ORM\ManyToOne(targetEntity="Project")
* @ORM\JoinColumn(name="project_id", referencedColumnName="id")
*/
protected $project;
/** task's name**/
/** @ORM\Column(type="string", length=128) */
protected $summary;
/** @ORM\Column(type="string", length=1024) */
protected $description;
/**
* @ORM\ManyToOne(targetEntity="IssueType")
* @ORM\JoinColumn(name="type_id", referencedColumnName="id")
*/
protected $type;
/** sequence number of elements in column */
/** @ORM\Column(type="string", length=128) */
protected $sequence_number=0;
/**
* @ORM\ManyToOne(targetEntity="IssuePriority")
* @ORM\JoinColumn(name="priority_id", referencedColumnName="id")
*/
protected $priority;
/**
* Who working with task now
*
* @ORM\ManyToOne(targetEntity="User")
* @ORM\JoinColumn(name="user_to_assignee", referencedColumnName="id")
*/
protected $assignee;
/**
* @ORM\ManyToOne(targetEntity="Status")
* @ORM\JoinColumn(name="status_id", referencedColumnName="id")
*/
protected $status;
/**
* @ORM\ManyToOne(targetEntity="Sprint")
* @ORM\JoinColumn(name="sprint_id", referencedColumnName="id", nullable=true)
*/
protected $sprint;
// /** */
// /** @ORM\Column(type="string", length=256) */
// protected $labels;
/** @ORM\Column(type="datetime") */
protected $created;
/** @ORM\Column(type="datetime", nullable=true) */
protected $updated;
/**
* @ORM\PrePersist
*/
public function prePersist()
{
$this->created = new \DateTime();
}
/**
* @ORM\PreUpdate
*/
public function preUpdate()
{
$this->updated = new \DateTime();
}
}
| apache-2.0 |
open-power/eCMD | dllNetwork/server/BrkptInstruction.C | 17693 | //IBM_PROLOG_BEGIN_TAG
/*
* Copyright 2019 IBM International Business Machines Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//IBM_PROLOG_END_TAG
//--------------------------------------------------------------------
// Includes
//--------------------------------------------------------------------
#include <BrkptInstruction.H>
#include <ecmdSharedUtils.H>
#include <arpa/inet.h>
#include <sstream>
#include <iomanip>
#include <string.h>
#include <stdio.h>
#include <errno.h>
#ifdef OTHER_USE
#include <OutputLite.H>
extern OutputLite out;
#else
#include <CronusData.H>
#endif
static void packReturnData(const ecmdDataBuffer & i_virtualAddress,
const std::list<cipBrkptTableEntry> & i_brkptTableEntries,
ecmdDataBuffer & o_data);
static void packReturnData(const std::list<cipSoftwareEvent_t> & i_events,
ecmdDataBuffer & o_data);
/*****************************************************************************/
/* BrkptInstruction Implementation *******************************************/
/*****************************************************************************/
BrkptInstruction::BrkptInstruction(void) : Instruction(),
deviceString(""),
timeout(0),
steps(0)
{
version = 0x1;
type = BRKPT;
}
BrkptInstruction::~BrkptInstruction(void)
{
}
uint32_t BrkptInstruction::setup(InstructionCommand i_command, std::string &i_deviceString, const ecmdChipTarget & i_target, const ecmdDataBuffer & i_address, const cipXlateVariables i_xlateVars, uint32_t i_flags)
{
deviceString = i_deviceString;
target = i_target;
address = i_address;
xlateVars = i_xlateVars;
command = i_command;
flags = i_flags | INSTRUCTION_FLAG_DEVSTR;
return 0;
}
uint32_t BrkptInstruction::setup(InstructionCommand i_command, uint32_t i_timeout, uint32_t i_flags)
{
timeout = i_timeout;
command = i_command;
flags = i_flags;
return 0;
}
uint32_t BrkptInstruction::setup(InstructionCommand i_command, const ecmdChipTarget & i_target, uint32_t i_steps, uint32_t i_flags)
{
target = i_target;
command = i_command;
steps = i_steps;
flags = i_flags;
return 0;
}
uint32_t BrkptInstruction::execute(ecmdDataBuffer & o_data, InstructionStatus & o_status, Handle ** io_handle)
{
int rc = 0;
/* set the version of this instruction in the status */
o_status.instructionVersion = version;
/* check for any previous errors to report back */
if (error)
{
rc = o_status.rc = error;
return rc;
}
//o_status.errorMessage.append(dumpInstruction());
switch(command)
{
case BRKPT_SET:
case BRKPT_CLEAR:
case BRKPT_GET:
{
std::list<cipBrkptTableEntry> l_brkptTableEntries;
ecmdDataBuffer l_virtualAddress;
rc = brkpt_general(*io_handle, o_status, l_brkptTableEntries, l_virtualAddress);
if (rc == 0)
{
packReturnData(l_virtualAddress, l_brkptTableEntries, o_data);
rc = o_status.rc = SERVER_COMMAND_COMPLETE;
}
else
o_status.rc = rc;
}
break;
case BRKPT_WAIT:
{
std::list<cipSoftwareEvent_t> l_events;
rc = brkpt_wait(*io_handle, o_status, l_events);
if (rc == 0)
{
packReturnData(l_events, o_data);
rc = o_status.rc = SERVER_COMMAND_COMPLETE;
}
else
o_status.rc = rc;
}
break;
case BRKPT_INSTR_START:
case BRKPT_INSTR_STOP:
case BRKPT_INSTR_STEP:
rc = brkpt_instr_general(*io_handle, o_status);
if (rc == 0)
rc = o_status.rc = SERVER_COMMAND_COMPLETE;
else
o_status.rc = rc;
break;
default:
rc = o_status.rc = SERVER_COMMAND_NOT_SUPPORTED;
break;
}
return (uint32_t) rc;
}
uint32_t BrkptInstruction::flatten(uint8_t * o_data, uint32_t i_len) const
{
uint32_t rc = 0;
uint32_t * o_ptr = (uint32_t *) o_data;
if (i_len < flattenSize())
{
out.error("BrkptInstruction::flatten", "i_len %d bytes is too small to flatten\n", i_len);
rc = 1;
}
else
{
// clear memory
memset(o_data, 0, flattenSize());
o_ptr[0] = htonl(version);
o_ptr[1] = htonl(command);
o_ptr[2] = htonl(flags);
if (command == BRKPT_WAIT)
{
o_ptr[3] = htonl(timeout);
}
else if ((command == BRKPT_SET) ||
(command == BRKPT_CLEAR) ||
(command == BRKPT_GET))
{
uint32_t deviceStringSize = deviceString.size() + 1;
if (deviceStringSize % sizeof(uint32_t))
deviceStringSize += (sizeof(uint32_t) - (deviceStringSize % sizeof(uint32_t)));
o_ptr[3] = htonl(deviceStringSize);
uint32_t targetSize = target.flattenSize();
o_ptr[4] = htonl(targetSize);
uint32_t addressSize = address.flattenSize();
o_ptr[5] = htonl(addressSize);
uint32_t xlateVarsSize = xlateVars.flattenSize();
o_ptr[6] = htonl(xlateVarsSize);
uint32_t offset = 7;
if (deviceString.size() > 0)
strcpy(((char *)(o_ptr + offset)), deviceString.c_str());
offset += deviceStringSize / sizeof(uint32_t);
target.flatten((uint8_t *) (o_ptr + offset), targetSize);
offset += targetSize / sizeof(uint32_t);
address.flatten((uint8_t *) (o_ptr + offset), addressSize);
offset += addressSize / sizeof(uint32_t);
xlateVars.flatten((uint8_t *) (o_ptr + offset), xlateVarsSize);
}
else if ((command == BRKPT_INSTR_START) ||
(command == BRKPT_INSTR_STOP) ||
(command == BRKPT_INSTR_STEP))
{
o_ptr[3] = htonl(steps);
uint32_t targetSize = target.flattenSize();
o_ptr[4] = htonl(targetSize);
uint32_t offset = 5;
target.flatten((uint8_t *) (o_ptr + offset), targetSize);
}
}
return rc;
}
uint32_t BrkptInstruction::unflatten(const uint8_t * i_data, uint32_t i_len)
{
uint32_t rc = 0;
uint32_t * i_ptr = (uint32_t *) i_data;
version = ntohl(i_ptr[0]);
if(version == 0x1)
{
command = (InstructionCommand) ntohl(i_ptr[1]);
flags = ntohl(i_ptr[2]);
if (command == BRKPT_WAIT)
{
timeout = ntohl(i_ptr[3]);
}
else if ((command == BRKPT_SET) ||
(command == BRKPT_CLEAR) ||
(command == BRKPT_GET))
{
uint32_t deviceStringSize = ntohl(i_ptr[3]);
uint32_t targetSize = ntohl(i_ptr[4]);
uint32_t addressSize = ntohl(i_ptr[5]);
uint32_t xlateVarsSize = ntohl(i_ptr[6]);
uint32_t offset = 7;
if (deviceStringSize > 0)
deviceString = ((char *)(i_ptr + offset));
offset += deviceStringSize / sizeof(uint32_t);
rc = target.unflatten((uint8_t *) (i_ptr + offset), targetSize);
if (rc) { error = rc; }
offset += targetSize / sizeof(uint32_t);
rc = address.unflatten((uint8_t *) (i_ptr + offset), addressSize);
if (rc) { error = rc; }
offset += addressSize / sizeof(uint32_t);
rc = xlateVars.unflatten((uint8_t *) (i_ptr + offset), xlateVarsSize);
if (rc) { error = rc; }
}
else if ((command == BRKPT_INSTR_START) ||
(command == BRKPT_INSTR_STOP) ||
(command == BRKPT_INSTR_STEP))
{
steps = ntohl(i_ptr[3]);
uint32_t targetSize = ntohl(i_ptr[4]);
uint32_t offset = 5;
rc = target.unflatten((uint8_t *) (i_ptr + offset), targetSize);
if (rc) { error = rc; }
}
}
else
{
error = rc = SERVER_UNKNOWN_INSTRUCTION_VERSION;
}
return rc;
}
uint32_t BrkptInstruction::flattenSize(void) const
{
uint32_t size = 3 * sizeof(uint32_t); // version, command, flags
if (command == BRKPT_WAIT)
{
size += sizeof(uint32_t); // timeout
}
else if ((command == BRKPT_SET) ||
(command == BRKPT_CLEAR) ||
(command == BRKPT_GET))
{
size += 4 * sizeof(uint32_t); // sizes of flattened objects
uint32_t deviceStringSize = deviceString.size() + 1;
if (deviceStringSize % sizeof(uint32_t))
deviceStringSize += (sizeof(uint32_t) - (deviceStringSize % sizeof(uint32_t)));
size += deviceStringSize; // deviceString
size += target.flattenSize(); // target
size += address.flattenSize(); // address
size += xlateVars.flattenSize(); // xlateVars
}
else if ((command == BRKPT_INSTR_START) ||
(command == BRKPT_INSTR_STOP) ||
(command == BRKPT_INSTR_STEP))
{
size += sizeof(uint32_t); // steps
size += sizeof(uint32_t); // target size
size += target.flattenSize(); // target
}
return size;
}
std::string BrkptInstruction::dumpInstruction(void) const
{
std::ostringstream oss;
oss << "BrkptInstruction" << std::endl;
oss << "version : " << version << std::endl;
oss << "command : " << InstructionCommandToString(command) << std::endl;
oss << "type : " << InstructionTypeToString(type) << std::endl;
oss << "flags : " << InstructionFlagToString(flags) << std::endl;
if (command == BRKPT_WAIT)
oss << "timeout : " << timeout << std::endl;
else
oss << "target : " << ecmdWriteTarget(target, ECMD_DISPLAY_TARGET_HYBRID) << std::endl;
if ((command == BRKPT_SET) ||
(command == BRKPT_CLEAR) ||
(command == BRKPT_GET))
{
oss << "deviceString : " << deviceString << std::endl;
oss << "address : " << ((address.getBitLength() > 0) ? address.genHexLeftStr() : "" ) << std::endl;
oss << "xlateVars.tagsActive : " << (xlateVars.tagsActive ? "true" : "false") << std::endl;
oss << "xlateVars.mode32bit : " << (xlateVars.mode32bit ? "true" : "false") << std::endl;
oss << "xlateVars.writeECC : " << (xlateVars.writeECC ? "true" : "false") << std::endl;
oss << "xlateVars.manualXlateFlag : " << (xlateVars.manualXlateFlag ? "true" : "false") << std::endl;
oss << "xlateVars.addrType : " << xlateVars.addrType << std::endl;
oss << "xlateVars.partitionId : " << xlateVars.partitionId << std::endl;
}
if ((command == BRKPT_INSTR_START) ||
(command == BRKPT_INSTR_STOP) ||
(command == BRKPT_INSTR_STEP))
oss << "steps : " << steps << std::endl;
return oss.str();
}
uint64_t BrkptInstruction::getHash(void) const {
uint32_t devstrhash = 0x0;
uint32_t rc = 0;
if (deviceString.size() != 0)
rc = devicestring_genhash(deviceString, devstrhash);
uint64_t hash64 = 0x0ull;
hash64 |= ((0x0000000Full & type) << 60);
if (rc == 0) {
hash64 |= ((uint64_t) devstrhash);
}
return hash64;
}
uint32_t BrkptInstruction::closeHandle(Handle ** i_handle)
{
uint32_t rc = 0;
*i_handle = NULL;
return rc;
}
std::string BrkptInstruction::getInstructionVars(const InstructionStatus & i_status) const
{
std::ostringstream oss;
oss << std::hex << std::setfill('0');
oss << "rc: " << std::setw(8) << i_status.rc;
if (i_status.data.getWordLength() > 0) {
oss << " status: " << std::setw(8) << i_status.data.getWord(0);
}
oss << " devstr: " << deviceString;
return oss.str();
}
void BrkptInstruction::unpackReturnData(const ecmdDataBuffer & i_data, std::list<cipBrkptTableEntry> & o_brkptTableEntries, ecmdDataBuffer & o_virtualAddress)
{
uint32_t word_offset = 0;
uint32_t l_virtualAddress_flat_size = i_data.getWord(word_offset);
word_offset += 1;
uint8_t * l_data = new uint8_t[l_virtualAddress_flat_size];
i_data.extract(l_data, word_offset * 32, l_virtualAddress_flat_size * 8);
o_virtualAddress.unflatten(l_data, l_virtualAddress_flat_size);
delete [] l_data;
word_offset += l_virtualAddress_flat_size / sizeof(uint32_t);
uint32_t l_list_size = i_data.getWord(word_offset);
word_offset += 1;
cipBrkptTableEntry l_empty_entry;
for (uint32_t l_list_entry = 0; l_list_entry < l_list_size; l_list_entry++)
{
uint32_t l_entry_flat_size = i_data.getWord(word_offset);
word_offset += 1;
uint8_t * l_data = new uint8_t[l_entry_flat_size];
i_data.extract(l_data, word_offset * 32, l_entry_flat_size * 8);
o_brkptTableEntries.push_back(l_empty_entry);
o_brkptTableEntries.back().unflatten(l_data, l_entry_flat_size);
delete [] l_data;
word_offset += l_entry_flat_size / sizeof(uint32_t);
}
}
void BrkptInstruction::unpackReturnData(const ecmdDataBuffer & i_data, std::list<cipSoftwareEvent_t> & o_events)
{
uint32_t word_offset = 0;
uint32_t l_list_size = i_data.getWord(word_offset);
word_offset += 1;
cipSoftwareEvent_t l_empty_entry;
for (uint32_t l_list_entry = 0; l_list_entry < l_list_size; l_list_entry++)
{
uint32_t l_entry_flat_size = i_data.getWord(word_offset);
word_offset += 1;
uint8_t * l_data = new uint8_t[l_entry_flat_size];
i_data.extract(l_data, word_offset * 32, l_entry_flat_size * 8);
o_events.push_back(l_empty_entry);
o_events.back().unflatten(l_data, l_entry_flat_size);
delete [] l_data;
word_offset += l_entry_flat_size / sizeof(uint32_t);
}
}
static void packReturnData(const ecmdDataBuffer & i_virtualAddress, const std::list<cipBrkptTableEntry> & i_brkptTableEntries, ecmdDataBuffer & o_data)
{
// byte size of flattened l_virtualAddress (word)
uint32_t l_virtualAddress_flat_size = i_virtualAddress.flattenSize();
// data for l_virtualAddress
// number of elements in list (word)
uint32_t l_list_size = i_brkptTableEntries.size();
uint32_t l_list_flat_size = 0;
for (std::list<cipBrkptTableEntry>::const_iterator entry = i_brkptTableEntries.begin();
entry != i_brkptTableEntries.end();
entry++)
{
// byte size of element n (word)
l_list_flat_size += entry->flattenSize();
// data for element n
}
uint32_t total_size = (2 + l_list_size) * sizeof(uint32_t);
total_size += l_virtualAddress_flat_size;
total_size += l_list_flat_size;
o_data.setByteLength(total_size);
uint32_t word_offset = 0;
o_data.setWord(word_offset, l_virtualAddress_flat_size);
word_offset += 1;
uint8_t * l_data = new uint8_t[l_virtualAddress_flat_size];
i_virtualAddress.flatten(l_data, l_virtualAddress_flat_size);
o_data.insert(l_data, word_offset * 32, l_virtualAddress_flat_size * 8);
delete [] l_data;
word_offset += l_virtualAddress_flat_size / sizeof(uint32_t);
o_data.setWord(word_offset, l_list_size);
word_offset += 1;
for (std::list<cipBrkptTableEntry>::const_iterator entry = i_brkptTableEntries.begin();
entry != i_brkptTableEntries.end();
entry++)
{
uint32_t l_entry_flat_size = entry->flattenSize();
o_data.setWord(word_offset, l_entry_flat_size);
word_offset += 1;
uint8_t * l_data = new uint8_t[l_entry_flat_size];
entry->flatten(l_data, l_entry_flat_size);
o_data.insert(l_data, word_offset * 32, l_entry_flat_size * 8);
delete [] l_data;
word_offset += l_entry_flat_size / sizeof(uint32_t);
}
}
static void packReturnData(const std::list<cipSoftwareEvent_t> & i_events, ecmdDataBuffer & o_data)
{
// number of elements in list (word)
uint32_t l_list_size = i_events.size();
uint32_t l_list_flat_size = 0;
for (std::list<cipSoftwareEvent_t>::const_iterator entry = i_events.begin();
entry != i_events.end();
entry++)
{
// byte size of element n (word)
l_list_flat_size += entry->flattenSize();
// data for element n
}
uint32_t total_size = (1 + l_list_size) * sizeof(uint32_t);
total_size += l_list_flat_size;
o_data.setByteLength(total_size);
uint32_t word_offset = 0;
o_data.setWord(word_offset, l_list_size);
word_offset += 1;
for (std::list<cipSoftwareEvent_t>::const_iterator entry = i_events.begin();
entry != i_events.end();
entry++)
{
uint32_t l_entry_flat_size = entry->flattenSize();
o_data.setWord(word_offset, l_entry_flat_size);
word_offset += 1;
uint8_t * l_data = new uint8_t[l_entry_flat_size];
entry->flatten(l_data, l_entry_flat_size);
o_data.insert(l_data, word_offset * 32, l_entry_flat_size * 8);
delete [] l_data;
word_offset += l_entry_flat_size / sizeof(uint32_t);
}
}
| apache-2.0 |
mpeuster/son-emu | src/emuvim/api/openstack/helper.py | 1433 | # Copyright (c) 2015 SONATA-NFV and Paderborn University
# ALL RIGHTS RESERVED.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Neither the name of the SONATA-NFV, Paderborn University
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# This work has been performed in the framework of the SONATA project,
# funded by the European Commission under Grant number 671517 through
# the Horizon 2020 and 5G-PPP programmes. The authors would like to
# acknowledge the contributions of their colleagues of the SONATA
# partner consortium (www.sonata-nfv.eu).
from urlparse import urlparse
import logging
LOG = logging.getLogger("api.openstack.helper")
def get_host(r):
try:
return urlparse(r.base_url).hostname
except BaseException:
LOG.error("Could not get host part of request URL.")
return "0.0.0.0"
| apache-2.0 |
openstack/networking-plumgrid | networking_plumgrid/neutron/plugins/db/policy/policy_rule_db.py | 17117 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from networking_plumgrid.neutron.plugins.common import \
policy_exceptions as p_excep
from networking_plumgrid.neutron.plugins.db.policy.endpoint_group_db \
import EndpointGroup, SecurityPolicyTagBinding
from networking_plumgrid.neutron.plugins.db.policy.policy_service_db \
import PolicyService
from networking_plumgrid.neutron.plugins.db.policy.policy_tag_db \
import PolicyTag
from neutron.api.v2 import attributes
from neutron.db import common_db_mixin
from neutron.db.models.securitygroup import SecurityGroup
from neutron.db import models_v2
from neutron_lib.db import model_base
from oslo_log import log as logging
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.orm import exc
LOG = logging.getLogger(__name__)
class PolicyRule(model_base.BASEV2, models_v2.HasId,
models_v2.HasTenant):
"""DB definition for PLUMgrid policy rule object"""
__tablename__ = "pg_policy_rules"
name = sa.Column(sa.String(attributes.NAME_MAX_LEN))
src_grp_epg = sa.Column(sa.String(36),
sa.ForeignKey("pg_endpoint_groups.id",
ondelete="CASCADE"),
nullable=True)
dst_grp_epg = sa.Column(sa.String(36),
sa.ForeignKey("pg_endpoint_groups.id",
ondelete="CASCADE"),
nullable=True)
src_grp_sg = sa.Column(sa.String(36),
sa.ForeignKey("securitygroups.id",
ondelete="CASCADE"),
nullable=True)
dst_grp_sg = sa.Column(sa.String(36),
sa.ForeignKey("securitygroups.id",
ondelete="CASCADE"),
nullable=True)
protocol = sa.Column(sa.Enum('any', 'icmp', 'tcp', 'udp',
name='pg_policy_rules_protocol'))
src_port_range = sa.Column(sa.String(attributes.NAME_MAX_LEN))
dst_port_range = sa.Column(sa.String(attributes.NAME_MAX_LEN))
action = sa.Column(sa.Enum('copy', 'allow',
name='pg_policies_rules_action'))
action_target_service = sa.Column(sa.String(36),
sa.ForeignKey("pg_policy_services.id",
ondelete="CASCADE"),
nullable=True)
action_target_tenant_id = sa.Column(sa.String(36), nullable=True)
tag = sa.Column(sa.String(36),
sa.ForeignKey("pg_policy_tags.id",
ondelete="CASCADE"),
nullable=True)
source_epg = orm.relationship(EndpointGroup,
backref=orm.backref('src_epg',
cascade='all,delete'),
primaryjoin="EndpointGroup.id==PolicyRule.src_grp_epg")
destination_epg = orm.relationship(EndpointGroup,
backref=orm.backref('dst_epg',
cascade='all,delete'),
primaryjoin="EndpointGroup.id==PolicyRule.dst_grp_epg")
source_sg = orm.relationship(SecurityGroup,
backref=orm.backref('src_sg',
cascade='all,delete'),
primaryjoin="SecurityGroup.id==PolicyRule.src_grp_sg")
destination_sg = orm.relationship(SecurityGroup,
backref=orm.backref('dst_sg',
cascade='all,delete'),
primaryjoin="SecurityGroup.id==PolicyRule.dst_grp_sg")
send_to_service = orm.relationship(PolicyService,
backref=orm.backref('service_binding',
cascade='all,delete'),
primaryjoin="PolicyService.id==PolicyRule.action_target_service")
policy_tag = orm.relationship(PolicyTag,
backref=orm.backref('tag_binding',
cascade='all,delete'),
primaryjoin="PolicyTag.id==PolicyRule.tag")
class PolicyRuleMixin(common_db_mixin.CommonDbMixin):
def create_policy_rule(self, context, policy_rule):
"""
creates a policy rule
Args:
policy_ryke:
JSON object with policy rule attributes
name : display name policy rule
tenant_id : tenant uuid
id : policy rule uuid
src_grp : source endpoint group for policy rule
dst_grp: destination endpoint group for policy rule
protocol: protocol for policy rule
action: action to be perfromed my policy rule
src_port_range: source port range of policy rule
dst_port_range: destination port range of policy rule
action_target: uuid of target policy service
tag: uuid of policy tag for policy rule
"""
pr = policy_rule["policy_rule"]
self._validate_src_grp_policy_rule_config(context, pr)
self._validate_dst_grp_policy_rule_config(context, pr)
self._configure_policy_rule_endpoint_groups(pr)
self._validate_action_target(context, pr)
self._validate_security_group_config(context, pr)
if "tag" in pr:
self._configure_policy_tag(context, pr)
with context.session.begin(subtransactions=True):
pr_db = PolicyRule(tenant_id=pr["tenant_id"],
name=pr["name"],
src_grp_epg=pr['src_grp_epg'],
dst_grp_epg=pr['dst_grp_epg'],
src_grp_sg=pr['src_grp_sg'],
dst_grp_sg= pr['dst_grp_sg'],
protocol=pr['protocol'],
src_port_range=pr['src_port_range'],
dst_port_range=pr['dst_port_range'],
action=pr['action'],
action_target_service=pr["action_target_service"],
action_target_tenant_id=pr["action_target_tenant_id"],
tag=pr['tag'])
context.session.add(pr_db)
return self._make_pr_dict(pr_db)
def get_policy_rule(self, context, pr_id, fields=None):
"""
Gets an existing policy rule
"""
try:
query = self._model_query(context, PolicyRule)
pr_db = query.filter_by(id=pr_id).one()
except exc.NoResultFound:
raise p_excep.NoPolicyRuleFound(id=pr_id)
return self._make_pr_dict(pr_db, fields)
def get_policy_rules(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None, page_reverse=None):
"""
Gets the list of all the existing policy rules
"""
return self._get_collection(context, PolicyRule,
self._make_pr_dict, filters=filters,
sorts=sorts, limit=limit,
marker_obj=marker, fields=fields,
page_reverse=page_reverse)
def delete_policy_rule(self, context, pr_id):
"""
Deletes an existing policy rule
"""
try:
query = context.session.query(PolicyRule)
pr_db = query.filter_by(id=pr_id).first()
except exc.NoResultFound:
raise p_excep.NoPolicyRuleFound(id=pr_id)
context.session.delete(pr_db)
def _validate_src_grp_policy_rule_config(self, context, pr):
"""
Validate source group config for policy rule
"""
if pr["src_grp"] is not None:
is_security_group = False
if self._check_endpoint_group_for_policy_rule(context,
pr["src_grp"]):
pr["src_security_group"] = is_security_group
return
if self._check_security_group_for_policy_rule(context,
pr["src_grp"]):
is_security_group = True
pr["src_security_group"] = is_security_group
return
raise p_excep.InvalidPolicyRuleConfig(epg='source-group')
else:
pr["src_security_group"] = None
def _validate_dst_grp_policy_rule_config(self, context, pr):
"""
Validate destination group config for policy rule
"""
if pr["dst_grp"] is not None:
is_security_group = False
if self._check_endpoint_group_for_policy_rule(context,
pr["dst_grp"]):
pr["dst_security_group"] = is_security_group
return
if self._check_security_group_for_policy_rule(context,
pr["dst_grp"]):
is_security_group = True
pr["dst_security_group"] = is_security_group
return
raise p_excep.InvalidPolicyRuleConfig(epg='destination-group')
else:
pr["dst_security_group"] = None
def _check_endpoint_group_for_policy_rule(self, context, epg_id):
try:
query = context.session.query(EndpointGroup)
epg_db = query.filter_by(id=epg_id).one()
if epg_db["id"] == epg_id:
return True
except exc.NoResultFound:
return False
def _check_security_group_for_policy_rule(self, context, sg_id):
try:
query = context.session.query(SecurityGroup)
sg_db = query.filter_by(id=sg_id).one()
if sg_db["id"] == sg_id:
return True
except exc.NoResultFound:
return False
def _configure_policy_rule_endpoint_groups(self, pr):
if pr['src_security_group'] is None:
pr['src_grp_epg'] = None
pr['src_grp_sg'] = None
elif pr['src_security_group']:
pr['src_grp_epg'] = None
pr['src_grp_sg'] = pr['src_grp']
else:
pr['src_grp_epg'] = pr['src_grp']
pr['src_grp_sg'] = None
if pr['dst_security_group'] is None:
pr['dst_grp_epg'] = None
pr['dst_grp_sg'] = None
elif pr['dst_security_group']:
pr['dst_grp_epg'] = None
pr['dst_grp_sg'] = pr['dst_grp']
else:
pr['dst_grp_epg'] = pr['dst_grp']
pr['dst_grp_sg'] = None
def _make_pr_dict(self, pr, fields=None):
if pr.src_grp_epg is None and pr.src_grp_sg is None:
src_grp = None
elif pr.src_grp_epg is None:
src_grp = pr.src_grp_sg
else:
src_grp = pr.src_grp_epg
if pr.dst_grp_epg is None and pr.dst_grp_sg is None:
dst_grp = None
elif pr.dst_grp_epg is None:
dst_grp = pr.dst_grp_sg
else:
dst_grp = pr.dst_grp_epg
action_target = ((str(pr.action_target_tenant_id) + ":" if
pr.action_target_tenant_id else "") +
(pr.action_target_service if
pr.action_target_service else ""))
if not action_target:
action_target = None
pr_dict = {"id": pr.id,
"name": pr.name,
"src_grp": src_grp,
"dst_grp": dst_grp,
"protocol": pr.protocol,
"src_port_range": pr.src_port_range,
"dst_port_range": pr.dst_port_range,
"action": pr.action,
"action_target": action_target,
"tag": pr.tag,
"tenant_id": pr.tenant_id}
return self._fields(pr_dict, fields)
def _validate_action_target(self, context, pr):
tenant_id = None
action_target_id = None
if pr["action_target"] is None:
pr["action_target_service"] = None
pr["action_target_tenant_id"] = None
else:
action_target = pr["action_target"].split(":")
if len(action_target) == 1:
action_target_id = action_target[0]
elif len(action_target) == 2:
tenant_id = action_target[0]
action_target_id = action_target[1]
else:
raise p_excep.InvalidFormatActionTarget()
if tenant_id is None:
try:
query = context.session.query(PolicyService)
ps_db = query.filter_by(id=action_target_id).one()
if ps_db["id"] == action_target_id:
pr["action_target_service"] = pr["action_target"]
pr["action_target_tenant_id"] = None
except exc.NoResultFound:
raise p_excep.NoActionTargetFound(at=action_target_id)
else:
try:
query = context.session.query(PolicyService)
ps_db = query.filter_by(id=action_target_id,
tenant_id=tenant_id).one()
if ps_db["id"] == action_target_id:
pr["action_target_service"] = action_target_id
pr["action_target_tenant_id"] = tenant_id
except exc.NoResultFound:
raise p_excep.NoActionTargetFound(at=action_target_id)
def _configure_policy_tag(self, context, pr):
ep_grp = pr["tag"]
if ep_grp is None:
return None
else:
try:
query = context.session.query(PolicyTag)
pt_db = query.filter_by(id=ep_grp).one()
pr["tag"] = pt_db["id"]
except exc.NoResultFound:
try:
query = context.session.query(EndpointGroup)
epg_db = query.filter_by(id=ep_grp).one()
if ("policy_tag_id" not in epg_db or
epg_db["policy_tag_id"] is None):
raise p_excep.NoPolicyTagFoundEndpointGroup(epg=ep_grp)
pr["tag"] = epg_db["policy_tag_id"]
except exc.NoResultFound:
raise p_excep.NoPolicyTagFoundEndpointGroup(epg=ep_grp)
def _get_security_policy_tag_binding(self, context, sg_id):
query = self._model_query(context,
SecurityPolicyTagBinding)
sg_map = {}
try:
sg_map = query.filter_by(security_group_id=sg_id).one()
return sg_map
except exc.NoResultFound:
return sg_map
def _validate_security_group_config(self, context, pr):
action = pr['action']
src_grp_tag = False
dst_grp_tag = False
is_src_security_group = False
is_dst_security_group = False
if action == 'allow':
if 'src_grp' in pr:
src_grp = pr['src_grp']
try:
sg_map = self._get_security_policy_tag_binding(context,
src_grp)
if sg_map:
src_grp_tag = True
except Exception:
pass
if 'dst_grp' in pr:
dst_grp = pr['dst_grp']
try:
sg_map = self._get_security_policy_tag_binding(context,
dst_grp)
if sg_map:
dst_grp_tag = True
except Exception:
pass
if self._check_security_group_for_policy_rule(context,
pr["src_grp"]):
is_src_security_group = True
if self._check_security_group_for_policy_rule(context,
pr["dst_grp"]):
is_dst_security_group = True
if ((is_src_security_group and is_dst_security_group) and
(not src_grp_tag and not dst_grp_tag)):
operation = "Policy Rule creation"
raise p_excep.OperationNotAllowed(operation=operation,
id=src_grp)
| apache-2.0 |
didclab/onedatashare | src/main/java/org/onedatashare/server/module/vfs/VfsSession.java | 5320 | /**
##**************************************************************
##
## Copyright (C) 2018-2020, OneDataShare Team,
## Department of Computer Science and Engineering,
## University at Buffalo, Buffalo, NY, 14260.
##
## Licensed under the Apache License, Version 2.0 (the "License"); you
## may not use this file except in compliance with the License. You may
## obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
##**************************************************************
*/
package org.onedatashare.server.module.vfs;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.vfs2.*;
import org.apache.commons.vfs2.auth.StaticUserAuthenticator;
import org.apache.commons.vfs2.impl.DefaultFileSystemConfigBuilder;
import org.apache.commons.vfs2.provider.ftp.FtpFileSystemConfigBuilder;
import org.apache.commons.vfs2.provider.sftp.SftpFileSystemConfigBuilder;
import org.onedatashare.server.model.core.Credential;
import org.onedatashare.server.model.core.Session;
import org.onedatashare.server.model.credential.UserInfoCredential;
import org.onedatashare.server.model.error.AuthenticationRequired;
import org.onedatashare.server.model.useraction.IdMap;
import reactor.core.publisher.Mono;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
public class VfsSession extends Session<VfsSession, VfsResource> {
FileSystemManager fileSystemManager;
FileSystemOptions fileSystemOptions;
public VfsSession(URI uri, Credential credential) {
super(uri, credential);
}
@Override
public Mono<VfsResource> select(String path) {
FileObject fo = null;
try {
fo = fileSystemManager.resolveFile(path, fileSystemOptions);
} catch (FileSystemException e) {
e.printStackTrace();
}
return initialize().then(Mono.just(new VfsResource(this, path, fo)));
}
public static URI getURIWithPortNumber(URI buildItem, String portNum){
if(StringUtils.isNumeric(portNum) && portNum.length() <= 5 && portNum.length() > 0){
try {
int portNumber = Integer.parseInt(portNum);
URI historyItem = new URI(buildItem.getScheme(),
buildItem.getUserInfo(), buildItem.getHost(), portNumber,
buildItem.getPath(), buildItem.getQuery(), buildItem.getFragment());
return historyItem;
}catch(URISyntaxException e){
e.printStackTrace();
return buildItem;
}
}
return buildItem;
}
@Override
public Mono<VfsResource> select(String path, String portNum) {
FileObject fo = null;
path = path.replace(" ", "%20");
String pathWithPort = getURIWithPortNumber(URI.create(path), portNum).toString();
try {
fo = fileSystemManager.resolveFile(pathWithPort, fileSystemOptions);
} catch (FileSystemException e) {
e.printStackTrace();
}
return initialize().then(Mono.just(new VfsResource(this, pathWithPort, fo)));
}
@Override
public Mono<VfsResource> select(String path, String id, ArrayList<IdMap> idMap) {
FileObject fo = null;
try {
fo = fileSystemManager.resolveFile(path, fileSystemOptions);
} catch (FileSystemException e) {
e.printStackTrace();
}
return initialize().then(Mono.just(new VfsResource(this, path, fo)));
}
@Override
public Mono<VfsSession> initialize() {
return Mono.create(s -> {
fileSystemOptions = new FileSystemOptions();
FtpFileSystemConfigBuilder.getInstance().setPassiveMode(fileSystemOptions, true);
SftpFileSystemConfigBuilder sfscb = SftpFileSystemConfigBuilder.getInstance();
sfscb.setPreferredAuthentications(fileSystemOptions,"password,keyboard-interactive");
if(getCredential() instanceof UserInfoCredential && ((UserInfoCredential) getCredential()).getUsername() != null) {
UserInfoCredential cred = (UserInfoCredential) getCredential();
StaticUserAuthenticator auth = new StaticUserAuthenticator(getUri().getHost(), cred.getUsername(), cred.getPassword());
try {
DefaultFileSystemConfigBuilder.getInstance().setUserAuthenticator(fileSystemOptions, auth);
fileSystemManager = VFS.getManager();
s.success(this);
} catch (FileSystemException e) {
e.printStackTrace();
s.error(new AuthenticationRequired("Invalid credential"));
}
}
else {
try {
fileSystemManager = VFS.getManager();
s.success(this);
} catch (FileSystemException e) {
s.error(new AuthenticationRequired("userinfo"));
}catch (Exception e){
e.printStackTrace();
}
}
});
}
}
| apache-2.0 |
moosbusch/xbLIDO | src/net/opengis/gml/ObservationType.java | 9850 | /*
* Copyright 2013 Gunnar Kappei.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.opengis.gml;
/**
* An XML ObservationType(@http://www.opengis.net/gml).
*
* This is a complex type.
*/
public interface ObservationType extends net.opengis.gml.AbstractFeatureType
{
public static final org.apache.xmlbeans.SchemaType type = (org.apache.xmlbeans.SchemaType)
org.apache.xmlbeans.XmlBeans.typeSystemForClassLoader(ObservationType.class.getClassLoader(), "schemaorg_apache_xmlbeans.system.s6E28D279B6C224D74769DB8B98AF1665").resolveHandle("observationtypec7c7type");
/**
* Gets the "validTime" element
*/
net.opengis.gml.TimePrimitivePropertyType getValidTime();
/**
* Sets the "validTime" element
*/
void setValidTime(net.opengis.gml.TimePrimitivePropertyType validTime);
/**
* Appends and returns a new empty "validTime" element
*/
net.opengis.gml.TimePrimitivePropertyType addNewValidTime();
/**
* Gets the "using" element
*/
net.opengis.gml.FeaturePropertyType getUsing();
/**
* True if has "using" element
*/
boolean isSetUsing();
/**
* Sets the "using" element
*/
void setUsing(net.opengis.gml.FeaturePropertyType using);
/**
* Appends and returns a new empty "using" element
*/
net.opengis.gml.FeaturePropertyType addNewUsing();
/**
* Unsets the "using" element
*/
void unsetUsing();
/**
* Gets the "target" element
*/
net.opengis.gml.TargetPropertyType getTarget();
/**
* True if has "target" element
*/
boolean isSetTarget();
/**
* Sets the "target" element
*/
void setTarget(net.opengis.gml.TargetPropertyType target);
/**
* Appends and returns a new empty "target" element
*/
net.opengis.gml.TargetPropertyType addNewTarget();
/**
* Unsets the "target" element
*/
void unsetTarget();
/**
* Gets the "resultOf" element
*/
net.opengis.gml.AssociationType getResultOf();
/**
* Sets the "resultOf" element
*/
void setResultOf(net.opengis.gml.AssociationType resultOf);
/**
* Appends and returns a new empty "resultOf" element
*/
net.opengis.gml.AssociationType addNewResultOf();
/**
* A factory class with static methods for creating instances
* of this type.
*/
public static final class Factory
{
public static net.opengis.gml.ObservationType newInstance() {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, null ); }
public static net.opengis.gml.ObservationType newInstance(org.apache.xmlbeans.XmlOptions options) {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newInstance( type, options ); }
/** @param xmlAsString the string value to parse */
public static net.opengis.gml.ObservationType parse(java.lang.String xmlAsString) throws org.apache.xmlbeans.XmlException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, null ); }
public static net.opengis.gml.ObservationType parse(java.lang.String xmlAsString, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xmlAsString, type, options ); }
/** @param file the file from which to load an xml document */
public static net.opengis.gml.ObservationType parse(java.io.File file) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, null ); }
public static net.opengis.gml.ObservationType parse(java.io.File file, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( file, type, options ); }
public static net.opengis.gml.ObservationType parse(java.net.URL u) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, null ); }
public static net.opengis.gml.ObservationType parse(java.net.URL u, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( u, type, options ); }
public static net.opengis.gml.ObservationType parse(java.io.InputStream is) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, null ); }
public static net.opengis.gml.ObservationType parse(java.io.InputStream is, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( is, type, options ); }
public static net.opengis.gml.ObservationType parse(java.io.Reader r) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, null ); }
public static net.opengis.gml.ObservationType parse(java.io.Reader r, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, java.io.IOException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( r, type, options ); }
public static net.opengis.gml.ObservationType parse(javax.xml.stream.XMLStreamReader sr) throws org.apache.xmlbeans.XmlException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, null ); }
public static net.opengis.gml.ObservationType parse(javax.xml.stream.XMLStreamReader sr, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( sr, type, options ); }
public static net.opengis.gml.ObservationType parse(org.w3c.dom.Node node) throws org.apache.xmlbeans.XmlException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, null ); }
public static net.opengis.gml.ObservationType parse(org.w3c.dom.Node node, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( node, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static net.opengis.gml.ObservationType parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static net.opengis.gml.ObservationType parse(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return (net.opengis.gml.ObservationType) org.apache.xmlbeans.XmlBeans.getContextTypeLoader().parse( xis, type, options ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, null ); }
/** @deprecated {@link org.apache.xmlbeans.xml.stream.XMLInputStream} */
@Deprecated
public static org.apache.xmlbeans.xml.stream.XMLInputStream newValidatingXMLInputStream(org.apache.xmlbeans.xml.stream.XMLInputStream xis, org.apache.xmlbeans.XmlOptions options) throws org.apache.xmlbeans.XmlException, org.apache.xmlbeans.xml.stream.XMLStreamException {
return org.apache.xmlbeans.XmlBeans.getContextTypeLoader().newValidatingXMLInputStream( xis, type, options ); }
private Factory() { } // No instance of this class allowed
}
}
| apache-2.0 |
jpallas/beakerx | js/notebook/test/src/tableDisplay/dataGrid/highlighter/ValuesHighlighter.spec.ts | 2344 | /*
* Copyright 2017 TWO SIGMA OPEN SOURCE, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { expect } from 'chai';
import ValueHighlighter from "@beakerx/tableDisplay/dataGrid/highlighter/ValueHighlighter";
import DataGridColumn from "@beakerx/tableDisplay/dataGrid/column/DataGridColumn";
import highlighterStateMock from "../mock/highlighterStateMock";
import { BeakerXDataGrid } from "@beakerx/tableDisplay/dataGrid/BeakerXDataGrid";
import modelStateMock from "../mock/modelStateMock";
import columnOptionsMock from "../mock/columnOptionsMock";
import cellConfigMock from "../mock/cellConfigMock";
import { HIGHLIGHTER_TYPE } from "@beakerx/tableDisplay/dataGrid/interface/IHighlighterState";
import createStore from "@beakerx/tableDisplay/dataGrid/store/BeakerXDataStore";
describe('ValueHighlighter', () => {
const dataStore = createStore(modelStateMock);
const dataGrid = new BeakerXDataGrid({}, dataStore);
const column = new DataGridColumn(
columnOptionsMock,
dataGrid,
dataGrid.columnManager
);
let valueHighlighter = new ValueHighlighter(
column,
{ ...highlighterStateMock, type: HIGHLIGHTER_TYPE.value }
);
it('should be an instance of highlighter', () => {
expect(valueHighlighter).to.be.an.instanceof(ValueHighlighter);
});
it('should have the getBackgroundColor method', () => {
expect(valueHighlighter).to.have.property('getBackgroundColor');
});
it('should have the midColor state property', () => {
expect(valueHighlighter.state).to.have.property('colors');
});
it('should return proper backgroud color', () => {
expect(valueHighlighter.getBackgroundColor(cellConfigMock))
.to.equal('#ff0000');
expect(valueHighlighter.getBackgroundColor({ ...cellConfigMock, row: 1 }))
.to.equal('#00ff00');
});
});
| apache-2.0 |
suited/suited.js | src/main/assets/js/plugins/modes/builtins/lecture/index.js | 5938 | /**
* @Author: Roberts Karl <Karl_Roberts>
* @Date: 2016-Aug-02
* @Project: suited
* @Last modified by: robertk
* @Last modified time: 2016-Aug-15
* @License: Copyright 2016 Karl Roberts <[email protected]> and Dirk van Rensburg <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import constants from '../../../../konstantes.js'
import utils from '../../../../utils.js'
import modeutils from '../../utils';
import Mode from '../../mode';
import zoom from '../../../../zoom'
let name = "lecture";
let body = window.document.body;
let autozoom = false;
function beforeSlide(slideId, state, evData) {
if(zoom.zoomLevel() !== 1) {
console.log("lectire before slide change zoom out")
zoom.out; }
}
function afterSlide(slideId, state, evData) {
if(!!autozoom && zoom.zoomLevel() == 1) {
//add a timout to allow for scroll and one to allow for unzoom first
var scrollDelay = (parseInt(constants.SCROLL_DELAY_DURATION) + parseInt(constants.SCROLL_DURATION));
var unzoomDelay = parseInt(constants.ZOOM_DURATION);
var elId = state.currentSlideName()
var target = document.getElementById(elId);
window.setTimeout(
function(){ console.log("about to zoom after delay"); zoom.to({ element: target, pan: false }); },
(scrollDelay + unzoomDelay)
);
}
}
function beforeModeChange() {
console.log("lectureMode beforeModeChange")
var slides = utils.selects("section[data-slide]");
for (var i = 0; i < slides.length; ++i) {
utils.classed(slides[i], "not-displayed", true);
}
zoom.setup();
}
function cleanUp() {
console.log("lectureMode teardown");
zoom.out();
zoom.teardown();
}
function toggleZoom(state, event){
if(zoom.zoomLevel() !== 1) {
console.log("zoom level = " + zoom.zoomLevel());
event.preventDefault();
zoom.out();
} else {
event.preventDefault();
//TODO need to look up state currentSlide() to pass as target
console.log("togglezoom zoomin level = " + zoom.zoomLevel())
var elId = state.currentSlideName()
var target = document.getElementById(elId);
console.log("togglezoom elId = " + elId + " target="+ target);
zoom.to({ element: target, pan: false });
}
}
function delayForZoom(callback, arg1){
if(zoom.zoomLevel() !== 1) {
zoom.out();
window.setTimeout(callback, constants.ZOOM_DURATION, arg1)
} else {
callback(arg1);
}
};
var transitions = [];
var scrollNZoom = { "name": "scrollNZoom" }
scrollNZoom.top = function(elId) { delayForZoom( constants.defaultTransitions.scroll.top, elId ) }
scrollNZoom.left = function(elId) { delayForZoom( constants.defaultTransitions.scroll.left, elId ) }
scrollNZoom.right = function(elId) { delayForZoom( constants.defaultTransitions.scroll.right, elId ) }
scrollNZoom.up = function(elId) { delayForZoom( constants.defaultTransitions.scroll.left, elId ) }
scrollNZoom.down = function(elId) { delayForZoom( constants.defaultTransitions.scroll.right, elId ) }
transitions.push( scrollNZoom );
transitions.push(constants.defaultTransitions.jump);
let mode = new Mode(name, beforeSlide, afterSlide, beforeModeChange, null, cleanUp, modeutils.getShouldShowSlideFunction(name), transitions);
mode.handlesEvent = function (eventName) {
return eventName === "ESC";
}
//Handle custon Events
mode.addCallback("ESC", function() {
console.log("lecture mode seen ESC event");
if(zoom.zoomLevel() !== 1) {
console.log("zoom level = " + zoom.zoomLevel());
zoom.out();
}
});
mode.addCallback("ENTER", function(state, event) {
console.log("lecture mode seen ENTER event. toggle zoom");
toggleZoom(state, event);
});
mode.addCallback("CLICK", function(state, event) {
console.log("+++++++++++++ lecture mode seen CLICK event: "+ JSON.stringify(event));
event.preventDefault();
zoom.to({ element: event.target, pan: false });
});
// shift z pressed - toggle autozoom
mode.addCallback("KEY_PRESSED_90", function(state, event) {
if (event.shiftKey) {
console.log("lecture mode seen Shift-z click event");
event.preventDefault();
autozoom = !autozoom;
}
});
//cycle transition
mode.addCallback("KEY_PRESSED_89", function(state, event) {
console.log("lecture mode seen 'y' click event");
event.preventDefault();
var currentT = mode.selectedTransition;
console.log(" currentT "+ currentT);
var nextI = 0;
if(!!currentT) {
transitions.forEach(function (d,i,a){
if(d.name === currentT) {
// console.log("lecture mode selectedTransition: found " + currentT + " at i="+ i);
nextI = ++i;
// console.log("nextI: " + nextI);
}
})
}
if(nextI >= transitions.length) {
// console.log(" nexti "+ nextI +" >= "+ transitions.length);
mode.selectedTransition = transitions[0].name
// console.log("lecture mode selectedTransition: " + mode.selectedTransition);
} else {
// console.log(" nexti "+ nextI +" ! >= "+ transitions.length);
mode.selectedTransition = transitions[nextI].name
// console.log("lecture mode selectedTransition: " + mode.selectedTransition);
}
});
mode.addCallback("ModeCSSFree", function(state, eventdata) {
/** Uncomment below for an example of mode specific style **/
// var modeCss = document.getElementById(eventdata.styleId)
// modeCss.innerHTML="h1, h2, h3, h4, h5 { color: red; }"
});
export default mode;
| apache-2.0 |
Xmaoyh/TC1.0 | app/src/main/java/com/example/tcwl_manage/models/enties/TabEntity.java | 696 | package com.example.tcwl_manage.models.enties;
import com.flyco.tablayout.listener.CustomTabEntity;
public class TabEntity implements CustomTabEntity {
public String title;
public int selectedIcon;
public int unSelectedIcon;
public TabEntity(String title, int selectedIcon, int unSelectedIcon) {
this.title = title;
this.selectedIcon = selectedIcon;
this.unSelectedIcon = unSelectedIcon;
}
@Override
public String getTabTitle() {
return title;
}
@Override
public int getTabSelectedIcon() {
return selectedIcon;
}
@Override
public int getTabUnselectedIcon() {
return unSelectedIcon;
}
}
| apache-2.0 |
lslluks/projetoteste | src/br/com/hdev/exceptions/DBCommitException.java | 870 | package br.com.hdev.exceptions;
/**
* Classe que herda de Exception, responsável pelo tratamento de exceções do Banco de Dados.
* @author HDEV
*
*/
public class DBCommitException extends Exception {
public DBCommitException() {
super();
// TODO Auto-generated constructor stub
}
public DBCommitException(String message, Throwable cause,
boolean enableSuppression, boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
// TODO Auto-generated constructor stub
}
public DBCommitException(String message, Throwable cause) {
super(message, cause);
// TODO Auto-generated constructor stub
}
public DBCommitException(String message) {
super(message);
// TODO Auto-generated constructor stub
}
public DBCommitException(Throwable cause) {
super(cause);
// TODO Auto-generated constructor stub
}
}
| apache-2.0 |
HuangLS/neo4j | community/server/src/test/java/org/neo4j/server/webadmin/rest/ConfigureEnabledManagementConsolesDocIT.java | 2449 | /*
* Copyright (c) 2002-2018 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.server.webadmin.rest;
import org.junit.After;
import org.junit.Test;
import org.neo4j.server.NeoServer;
import org.neo4j.server.configuration.Configurator;
import org.neo4j.server.rest.JaxRsResponse;
import org.neo4j.server.rest.RestRequest;
import org.neo4j.test.server.ExclusiveServerTestBase;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.neo4j.server.helpers.CommunityServerBuilder.server;
public class ConfigureEnabledManagementConsolesDocIT extends ExclusiveServerTestBase
{
private NeoServer server;
@After
public void stopTheServer()
{
server.stop();
}
@Test
public void shouldBeAbleToExplicitlySetConsolesToEnabled() throws Exception
{
server = server().withProperty( Configurator.MANAGEMENT_CONSOLE_ENGINES, "" )
.usingDatabaseDir( folder.directory( name.getMethodName() ).getAbsolutePath() )
.build();
server.start();
assertThat( exec( "ls", "shell" ).getStatus(), is( 400 ) );
}
@Test
public void shellConsoleShouldBeEnabledByDefault() throws Exception
{
server = server().usingDatabaseDir( folder.directory( name.getMethodName() ).getAbsolutePath() ).build();
server.start();
assertThat( exec( "ls", "shell" ).getStatus(), is( 200 ) );
}
private JaxRsResponse exec( String command, String engine )
{
return RestRequest.req().post( server.baseUri() + "db/manage/server/console", "{" +
"\"engine\":\"" + engine + "\"," +
"\"command\":\"" + command + "\\n\"}" );
}
}
| apache-2.0 |
jkulabuha/akulabuhov | chapter_001/src/main/java/ru/job4j/loop/package-info.java | 137 | /**
* Package for Counter.
*
* @author akulabuhov (mailto:[email protected])
* @version 1
* @since 30.07.2017
*/
package ru.job4j.loop; | apache-2.0 |
isper3at/incubator-rya | common/rya.api/src/test/java/mvm/rya/api/resolver/impl/DoubleRyaTypeResolverTest.java | 1560 | package mvm.rya.api.resolver.impl;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import junit.framework.TestCase;
import mvm.rya.api.domain.RyaType;
import org.openrdf.model.vocabulary.XMLSchema;
import java.util.Random;
/**
* Date: 7/20/12
* Time: 9:43 AM
*/
public class DoubleRyaTypeResolverTest extends TestCase {
public void testDoubleSerialization() throws Exception {
Double d = randomDouble();
RyaType ryaType = new RyaType(XMLSchema.DOUBLE, d.toString());
byte[] serialize = new DoubleRyaTypeResolver().serialize(ryaType);
assertEquals(d, Double.parseDouble(new DoubleRyaTypeResolver().deserialize(serialize).getData()));
}
private double randomDouble() {
return new Random(System.currentTimeMillis()).nextDouble();
}
}
| apache-2.0 |
UltraCart/rest_api_v2_sdk_csharp | src/com.ultracart.admin.v2/Model/PaymentsThemeTransactionType.cs | 7166 | /*
* UltraCart Rest API V2
*
* UltraCart REST API Version 2
*
* OpenAPI spec version: 2.0.0
* Contact: [email protected]
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Text.RegularExpressions;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System.ComponentModel.DataAnnotations;
using SwaggerDateConverter = com.ultracart.admin.v2.Client.SwaggerDateConverter;
namespace com.ultracart.admin.v2.Model
{
/// <summary>
/// PaymentsThemeTransactionType
/// </summary>
[DataContract]
public partial class PaymentsThemeTransactionType : IEquatable<PaymentsThemeTransactionType>, IValidatableObject
{
/// <summary>
/// The credit card transaction type for this theme
/// </summary>
/// <value>The credit card transaction type for this theme</value>
[JsonConverter(typeof(StringEnumConverter))]
public enum CreditCardTransactionTypeEnum
{
/// <summary>
/// Enum Andcapture for value: auth and capture
/// </summary>
[EnumMember(Value = "auth and capture")]
Andcapture = 1,
/// <summary>
/// Enum Thencapture for value: auth then capture
/// </summary>
[EnumMember(Value = "auth then capture")]
Thencapture = 2,
/// <summary>
/// Enum Only for value: auth only
/// </summary>
[EnumMember(Value = "auth only")]
Only = 3
}
/// <summary>
/// The credit card transaction type for this theme
/// </summary>
/// <value>The credit card transaction type for this theme</value>
[DataMember(Name="credit_card_transaction_type", EmitDefaultValue=false)]
public CreditCardTransactionTypeEnum? CreditCardTransactionType { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="PaymentsThemeTransactionType" /> class.
/// </summary>
/// <param name="code">External human readable identifier for a theme.</param>
/// <param name="creditCardTransactionType">The credit card transaction type for this theme.</param>
/// <param name="screenBrandingThemeOid">Internal identifier for a theme.</param>
public PaymentsThemeTransactionType(string code = default(string), CreditCardTransactionTypeEnum? creditCardTransactionType = default(CreditCardTransactionTypeEnum?), int? screenBrandingThemeOid = default(int?))
{
this.Code = code;
this.CreditCardTransactionType = creditCardTransactionType;
this.ScreenBrandingThemeOid = screenBrandingThemeOid;
}
/// <summary>
/// External human readable identifier for a theme
/// </summary>
/// <value>External human readable identifier for a theme</value>
[DataMember(Name="code", EmitDefaultValue=false)]
public string Code { get; set; }
/// <summary>
/// Internal identifier for a theme
/// </summary>
/// <value>Internal identifier for a theme</value>
[DataMember(Name="screen_branding_theme_oid", EmitDefaultValue=false)]
public int? ScreenBrandingThemeOid { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class PaymentsThemeTransactionType {\n");
sb.Append(" Code: ").Append(Code).Append("\n");
sb.Append(" CreditCardTransactionType: ").Append(CreditCardTransactionType).Append("\n");
sb.Append(" ScreenBrandingThemeOid: ").Append(ScreenBrandingThemeOid).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public virtual string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="input">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object input)
{
return this.Equals(input as PaymentsThemeTransactionType);
}
/// <summary>
/// Returns true if PaymentsThemeTransactionType instances are equal
/// </summary>
/// <param name="input">Instance of PaymentsThemeTransactionType to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(PaymentsThemeTransactionType input)
{
if (input == null)
return false;
return
(
this.Code == input.Code ||
(this.Code != null &&
this.Code.Equals(input.Code))
) &&
(
this.CreditCardTransactionType == input.CreditCardTransactionType ||
(this.CreditCardTransactionType != null &&
this.CreditCardTransactionType.Equals(input.CreditCardTransactionType))
) &&
(
this.ScreenBrandingThemeOid == input.ScreenBrandingThemeOid ||
(this.ScreenBrandingThemeOid != null &&
this.ScreenBrandingThemeOid.Equals(input.ScreenBrandingThemeOid))
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
unchecked // Overflow is fine, just wrap
{
int hashCode = 41;
if (this.Code != null)
hashCode = hashCode * 59 + this.Code.GetHashCode();
if (this.CreditCardTransactionType != null)
hashCode = hashCode * 59 + this.CreditCardTransactionType.GetHashCode();
if (this.ScreenBrandingThemeOid != null)
hashCode = hashCode * 59 + this.ScreenBrandingThemeOid.GetHashCode();
return hashCode;
}
}
/// <summary>
/// To validate all properties of the instance
/// </summary>
/// <param name="validationContext">Validation context</param>
/// <returns>Validation Result</returns>
IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext)
{
yield break;
}
}
}
| apache-2.0 |
zouzhberk/ambaridemo | demo-server/src/main/resources/common-services/HDFS/2.1.0.2.0/package/scripts/hdfs_datanode.py | 2877 | """
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from resource_management import *
from resource_management.libraries.functions.dfs_datanode_helper import handle_dfs_data_dir
from utils import service
from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl
from ambari_commons import OSConst
def create_dirs(data_dir, params):
"""
:param data_dir: The directory to create
:param params: parameters
"""
Directory(data_dir,
recursive=True,
cd_access="a",
mode=0755,
owner=params.hdfs_user,
group=params.user_group,
ignore_failures=True
)
@OsFamilyFuncImpl(os_family=OsFamilyImpl.DEFAULT)
def datanode(action=None):
if action == "configure":
import params
Directory(params.dfs_domain_socket_dir,
recursive=True,
mode=0751,
owner=params.hdfs_user,
group=params.user_group)
if not os.path.isdir(os.path.dirname(params.data_dir_mount_file)):
Directory(os.path.dirname(params.data_dir_mount_file),
recursive=True,
mode=0755,
owner=params.hdfs_user,
group=params.user_group)
data_dir_to_mount_file_content = handle_dfs_data_dir(create_dirs, params)
File(params.data_dir_mount_file,
owner=params.hdfs_user,
group=params.user_group,
mode=0644,
content=data_dir_to_mount_file_content
)
elif action == "start" or action == "stop":
import params
service(
action=action, name="datanode",
user=params.hdfs_user,
create_pid_dir=True,
create_log_dir=True
)
elif action == "status":
import status_params
check_process_status(status_params.datanode_pid_file)
@OsFamilyFuncImpl(os_family=OSConst.WINSRV_FAMILY)
def datanode(action=None):
if action == "configure":
pass
elif(action == "start" or action == "stop"):
import params
Service(params.datanode_win_service_name, action=action)
elif action == "status":
import status_params
check_windows_service_status(status_params.datanode_win_service_name) | apache-2.0 |
slamdata/slamengine | impl/src/test/scala/quasar/impl/datasources/middleware/ConditionReportingMiddlewareSpec.scala | 3157 | /*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.impl.datasources.middleware
import slamdata.Predef.{Boolean, List, None, Option, Unit}
import quasar.{Condition, ConditionMatchers, ScalarStages}
import quasar.api.datasource.DatasourceType
import quasar.api.resource._
import quasar.connector.Datasource
import quasar.impl.datasources.ManagedDatasource
import quasar.qscript.InterpretedRead
import java.lang.{Exception, IllegalArgumentException}
import scala.concurrent.ExecutionContext.Implicits.global
import cats.effect.IO
import cats.effect.concurrent.Ref
import eu.timepit.refined.auto._
import shims._
object ConditionReportingMiddlewareSpec extends quasar.EffectfulQSpec[IO] with ConditionMatchers {
type T[_[_]] = Unit
val thatsRoot = new IllegalArgumentException("THAT'S ROOT!")
object TestDs extends Datasource[IO, List, InterpretedRead[ResourcePath], Unit] {
val kind: DatasourceType = DatasourceType("tester", 7L)
def evaluate(query: InterpretedRead[ResourcePath]): IO[Unit] =
if (ResourcePath.root.nonEmpty(query.path))
IO.raiseError(thatsRoot)
else
IO.pure(())
def pathIsResource(path: ResourcePath): IO[Boolean] =
IO.pure(false)
def prefixedChildPaths(path: ResourcePath)
: IO[Option[List[(ResourceName, ResourcePathType)]]] =
IO.pure(None)
}
val managedTester = ManagedDatasource.lightweight[T](TestDs)
"initial condition is normal" >>* {
for {
r <- Ref[IO].of(List[Condition[Exception]]())
ds <- ConditionReportingMiddleware[IO, Unit]((_, c) => r.update(c :: _))((), managedTester)
cs <- r.get
} yield {
cs must_=== List(Condition.normal())
}
}
"operations that succeed emit normal" >>* {
for {
r <- Ref[IO].of(List[Condition[Exception]]())
ds <- ConditionReportingMiddleware[IO, Unit]((_, c) => r.update(c :: _))((), managedTester)
_ <- ds.pathIsResource(ResourcePath.root())
cs <- r.get
} yield {
cs must_=== List(Condition.normal(), Condition.normal())
}
}
"operations that throw emit abnormal" >>* {
for {
r <- Ref[IO].of(List[Condition[Exception]]())
ds <- ConditionReportingMiddleware[IO, Unit]((_, c) => r.update(c :: _))((), managedTester)
res = ds match {
case ManagedDatasource.ManagedLightweight(lw) => lw.evaluate(InterpretedRead(ResourcePath.root(), ScalarStages.Id))
case _ => IO.pure(())
}
_ <- res.attempt
cs <- r.get
} yield {
cs must_=== List(Condition.abnormal(thatsRoot), Condition.normal())
}
}
}
| apache-2.0 |
raviprakashgiri/Internship_Portal | src/com/iit/commons/Commons.java | 219 | /*Author Priyanka & Ravi
*/
package com.iit.commons;
import java.sql.Date;
public class Commons {
public static Date stringToSqlDate(String date){
Date sql = java.sql.Date.valueOf(date);
return sql;
}
}
| apache-2.0 |
yintaoxue/read-open-source-code | kettle4.3/src/org/pentaho/di/trans/steps/sql/ExecSQLMeta.java | 16547 | /*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.sql;
import java.util.List;
import java.util.Map;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Counter;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.Database;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMeta;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.shared.SharedObjectInterface;
import org.pentaho.di.trans.DatabaseImpact;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.w3c.dom.Node;
/*******************************************************************************
* Contains meta-data to execute arbitrary SQL, optionally each row again.
*
* Created on 10-sep-2005
*/
public class ExecSQLMeta extends BaseStepMeta implements StepMetaInterface
{
private static Class<?> PKG = ExecSQLMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
private DatabaseMeta databaseMeta;
private String sql;
private boolean executedEachInputRow;
private String[] arguments;
private String updateField;
private String insertField;
private String deleteField;
private String readField;
private boolean singleStatement;
private boolean replaceVariables;
public ExecSQLMeta()
{
super();
}
/**
* @return Returns the database.
*/
public DatabaseMeta getDatabaseMeta()
{
return databaseMeta;
}
/**
* @param database
* The database to set.
*/
public void setDatabaseMeta(DatabaseMeta database)
{
this.databaseMeta = database;
}
/**
* @return Returns the sql.
*/
public String getSql()
{
return sql;
}
/**
* @param sql
* The sql to set.
*/
public void setSql(String sql)
{
this.sql = sql;
}
/**
* @return Returns the arguments.
*/
public String[] getArguments()
{
return arguments;
}
/**
* @param arguments
* The arguments to set.
*/
public void setArguments(String[] arguments)
{
this.arguments = arguments;
}
/**
* @return Returns the executedEachInputRow.
*/
public boolean isExecutedEachInputRow()
{
return executedEachInputRow;
}
/**
* @param executedEachInputRow
* The executedEachInputRow to set.
*/
public void setExecutedEachInputRow(boolean executedEachInputRow)
{
this.executedEachInputRow = executedEachInputRow;
}
/**
* @return Returns the deleteField.
*/
public String getDeleteField()
{
return deleteField;
}
/**
* @param deleteField
* The deleteField to set.
*/
public void setDeleteField(String deleteField)
{
this.deleteField = deleteField;
}
/**
* @return Returns the insertField.
*/
public String getInsertField()
{
return insertField;
}
/**
* @param insertField
* The insertField to set.
*/
public void setInsertField(String insertField)
{
this.insertField = insertField;
}
/**
* @return Returns the readField.
*/
public String getReadField()
{
return readField;
}
/**
* @param readField
* The readField to set.
*/
public void setReadField(String readField)
{
this.readField = readField;
}
/**
* @return Returns the updateField.
*/
public String getUpdateField()
{
return updateField;
}
/**
* @param updateField
* The updateField to set.
*/
public void setUpdateField(String updateField)
{
this.updateField = updateField;
}
public void loadXML(Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleXMLException
{
readData(stepnode, databases);
}
public Object clone()
{
ExecSQLMeta retval = (ExecSQLMeta) super.clone();
return retval;
}
public void allocate(int nrargs)
{
arguments = new String[nrargs];
}
private void readData(Node stepnode, List<? extends SharedObjectInterface> databases) throws KettleXMLException
{
try
{
String con = XMLHandler.getTagValue(stepnode, "connection"); //$NON-NLS-1$
databaseMeta = DatabaseMeta.findDatabase(databases, con);
String eachRow = XMLHandler.getTagValue(stepnode, "execute_each_row"); //$NON-NLS-1$
executedEachInputRow = "Y".equalsIgnoreCase(eachRow); //$NON-NLS-1$
singleStatement = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "single_statement")); //$NON-NLS-1$
replaceVariables = "Y".equals(XMLHandler.getTagValue(stepnode, "replace_variables"));
sql = XMLHandler.getTagValue(stepnode, "sql"); //$NON-NLS-1$
insertField = XMLHandler.getTagValue(stepnode, "insert_field"); //$NON-NLS-1$
updateField = XMLHandler.getTagValue(stepnode, "update_field"); //$NON-NLS-1$
deleteField = XMLHandler.getTagValue(stepnode, "delete_field"); //$NON-NLS-1$
readField = XMLHandler.getTagValue(stepnode, "read_field"); //$NON-NLS-1$
Node argsnode = XMLHandler.getSubNode(stepnode, "arguments"); //$NON-NLS-1$
int nrArguments = XMLHandler.countNodes(argsnode, "argument"); //$NON-NLS-1$
allocate(nrArguments);
for (int i = 0; i < nrArguments; i++)
{
Node argnode = XMLHandler.getSubNodeByNr(argsnode, "argument", i); //$NON-NLS-1$
arguments[i] = XMLHandler.getTagValue(argnode, "name"); //$NON-NLS-1$
}
} catch (Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "ExecSQLMeta.Exception.UnableToLoadStepInfoFromXML"), e); //$NON-NLS-1$
}
}
public void setDefault()
{
databaseMeta = null;
sql = ""; //$NON-NLS-1$
arguments = new String[0];
}
public void getFields(RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException
{
RowMetaAndData add = ExecSQL.getResultRow(new Result(), getUpdateField(), getInsertField(), getDeleteField(),
getReadField());
r.mergeRowMeta(add.getRowMeta());
}
public String getXML()
{
StringBuffer retval = new StringBuffer(300);
retval.append(" ").append(XMLHandler.addTagValue("connection", databaseMeta == null ? "" : databaseMeta.getName())); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
retval.append(" ").append(XMLHandler.addTagValue("execute_each_row", executedEachInputRow)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("single_statement", singleStatement)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("replace_variables", replaceVariables));
retval.append(" ").append(XMLHandler.addTagValue("sql", sql)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("insert_field", insertField)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("update_field", updateField)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("delete_field", deleteField)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("read_field", readField)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" <arguments>").append(Const.CR); //$NON-NLS-1$
for (int i = 0; i < arguments.length; i++)
{
retval.append(" <argument>").append(XMLHandler.addTagValue("name", arguments[i], false)).append("</argument>").append(Const.CR); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
}
retval.append(" </arguments>").append(Const.CR); //$NON-NLS-1$
return retval.toString();
}
public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters)
throws KettleException
{
try
{
databaseMeta = rep.loadDatabaseMetaFromStepAttribute(id_step, "id_connection", databases);
executedEachInputRow = rep.getStepAttributeBoolean(id_step, "execute_each_row"); //$NON-NLS-1$
singleStatement = rep.getStepAttributeBoolean(id_step, "single_statement"); //$NON-NLS-1$
replaceVariables = rep.getStepAttributeBoolean(id_step, "replace_variables"); //$NON-NLS-1$
sql = rep.getStepAttributeString(id_step, "sql"); //$NON-NLS-1$
insertField = rep.getStepAttributeString(id_step, "insert_field"); //$NON-NLS-1$
updateField = rep.getStepAttributeString(id_step, "update_field"); //$NON-NLS-1$
deleteField = rep.getStepAttributeString(id_step, "delete_field"); //$NON-NLS-1$
readField = rep.getStepAttributeString(id_step, "read_field"); //$NON-NLS-1$
int nrargs = rep.countNrStepAttributes(id_step, "arg_name"); //$NON-NLS-1$
allocate(nrargs);
for (int i = 0; i < nrargs; i++)
{
arguments[i] = rep.getStepAttributeString(id_step, i, "arg_name"); //$NON-NLS-1$
}
} catch (Exception e)
{
throw new KettleException(BaseMessages.getString(PKG, "ExecSQLMeta.Exception.UnexpectedErrorReadingStepInfo"), e); //$NON-NLS-1$
}
}
public void saveRep(Repository rep, ObjectId id_transformation, ObjectId id_step) throws KettleException
{
try
{
rep.saveDatabaseMetaStepAttribute(id_transformation, id_step, "id_connection", databaseMeta);
rep.saveStepAttribute(id_transformation, id_step, "sql", sql); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "execute_each_row", executedEachInputRow); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "single_statement", singleStatement); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "replace_variables", replaceVariables); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "insert_field", insertField); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "update_field", updateField); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "delete_field", deleteField); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "read_field", readField); //$NON-NLS-1$
// Also, save the step-database relationship!
if (databaseMeta != null)
rep.insertStepDatabase(id_transformation, id_step, databaseMeta.getObjectId());
for (int i = 0; i < arguments.length; i++)
{
rep.saveStepAttribute(id_transformation, id_step, i, "arg_name", arguments[i]); //$NON-NLS-1$
}
} catch (Exception e)
{
throw new KettleException(
BaseMessages.getString(PKG, "ExecSQLMeta.Exception.UnableToSaveStepInfo") + id_step, e); //$NON-NLS-1$
}
}
public void check(List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String input[], String output[], RowMetaInterface info)
{
CheckResult cr;
if (databaseMeta != null)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.ConnectionExists"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
databases = new Database[] { db }; // keep track of it for
// cancelling purposes...
try
{
db.connect();
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.DBConnectionOK"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
if (sql != null && sql.length() != 0)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.SQLStatementEntered"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
} else
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.SQLStatementMissing"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
}
} catch (KettleException e)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.ErrorOccurred") + e.getMessage(), stepMeta); //$NON-NLS-1$
remarks.add(cr);
} finally
{
db.disconnect();
}
} else
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.ConnectionNeeded"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
}
// If it's executed each row, make sure we have input
if (executedEachInputRow)
{
if (input.length > 0)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.StepReceivingInfoOK"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
} else
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.NoInputReceivedError"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
}
} else
{
if (input.length > 0)
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.SQLOnlyExecutedOnce"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
} else
{
cr = new CheckResult(CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString(PKG, "ExecSQLMeta.CheckResult.InputReceivedOKForSQLOnlyExecuteOnce"), stepMeta); //$NON-NLS-1$
remarks.add(cr);
}
}
}
public StepInterface getStep(StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr,
TransMeta transMeta, Trans trans)
{
return new ExecSQL(stepMeta, stepDataInterface, cnr, transMeta, trans);
}
public StepDataInterface getStepData()
{
return new ExecSQLData();
}
public void analyseImpact(List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMeta prev,
String input[], String output[], RowMeta info) throws KettleStepException
{
DatabaseImpact ii = new DatabaseImpact(DatabaseImpact.TYPE_IMPACT_READ_WRITE, transMeta.getName(),
stepMeta.getName(), databaseMeta.getDatabaseName(),
BaseMessages.getString(PKG, "ExecSQLMeta.DatabaseMeta.Unknown.Label"), //$NON-NLS-1$
BaseMessages.getString(PKG, "ExecSQLMeta.DatabaseMeta.Unknown2.Label"), //$NON-NLS-1$
BaseMessages.getString(PKG, "ExecSQLMeta.DatabaseMeta.Unknown3.Label"), //$NON-NLS-1$
stepMeta.getName(), sql, BaseMessages.getString(PKG, "ExecSQLMeta.DatabaseMeta.Title") //$NON-NLS-1$
);
impact.add(ii);
}
public DatabaseMeta[] getUsedDatabaseConnections()
{
if (databaseMeta != null)
{
return new DatabaseMeta[] { databaseMeta };
} else
{
return super.getUsedDatabaseConnections();
}
}
/**
* @return Returns the variableReplacementActive.
*/
public boolean isReplaceVariables()
{
return replaceVariables;
}
/**
* @param variableReplacementActive The variableReplacementActive to set.
*/
public void setVariableReplacementActive(boolean variableReplacementActive)
{
this.replaceVariables = variableReplacementActive;
}
public boolean supportsErrorHandling()
{
return true;
}
/**
* @return the singleStatement
*/
public boolean isSingleStatement() {
return singleStatement;
}
/**
* @param singleStatement the singleStatement to set
*/
public void setSingleStatement(boolean singleStatement) {
this.singleStatement = singleStatement;
}
}
| apache-2.0 |
jterry75/Docker.DotNet | src/Docker.DotNet/Models/PluginConfig.Generated.cs | 1738 | using System.Collections.Generic;
using System.Runtime.Serialization;
namespace Docker.DotNet.Models
{
[DataContract]
public class PluginConfig // (types.PluginConfig)
{
[DataMember(Name = "Args", EmitDefaultValue = false)]
public PluginConfigArgs Args { get; set; }
[DataMember(Name = "Description", EmitDefaultValue = false)]
public string Description { get; set; }
[DataMember(Name = "Documentation", EmitDefaultValue = false)]
public string Documentation { get; set; }
[DataMember(Name = "Entrypoint", EmitDefaultValue = false)]
public IList<string> Entrypoint { get; set; }
[DataMember(Name = "Env", EmitDefaultValue = false)]
public IList<PluginEnv> Env { get; set; }
[DataMember(Name = "Interface", EmitDefaultValue = false)]
public PluginConfigInterface Interface { get; set; }
[DataMember(Name = "Linux", EmitDefaultValue = false)]
public PluginConfigLinux Linux { get; set; }
[DataMember(Name = "Mounts", EmitDefaultValue = false)]
public IList<PluginMount> Mounts { get; set; }
[DataMember(Name = "Network", EmitDefaultValue = false)]
public PluginConfigNetwork Network { get; set; }
[DataMember(Name = "PropagatedMount", EmitDefaultValue = false)]
public string PropagatedMount { get; set; }
[DataMember(Name = "User", EmitDefaultValue = false)]
public PluginConfigUser User { get; set; }
[DataMember(Name = "WorkDir", EmitDefaultValue = false)]
public string WorkDir { get; set; }
[DataMember(Name = "rootfs", EmitDefaultValue = false)]
public PluginConfigRootfs Rootfs { get; set; }
}
}
| apache-2.0 |
corestoreio/csfw | net/geoip/backendgeoip/doc.go | 747 | // Copyright 2015-present, Cyrill @ Schumacher.fm and the CoreStore contributors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package backendgeoip defines the backend configuration options and element slices.
package backendgeoip
| apache-2.0 |
cexbrayat/camel | camel-core/src/main/java/org/apache/camel/management/mbean/ManagedConsumer.java | 1774 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.management.mbean;
import org.apache.camel.CamelContext;
import org.apache.camel.Consumer;
import org.apache.camel.api.management.ManagedAttribute;
import org.apache.camel.api.management.ManagedResource;
/**
* @version
*/
@ManagedResource(description = "Managed Consumer")
public class ManagedConsumer extends ManagedService {
private final Consumer consumer;
public ManagedConsumer(CamelContext context, Consumer consumer) {
super(context, consumer);
this.consumer = consumer;
}
public Consumer getConsumer() {
return consumer;
}
@ManagedAttribute(description = "Endpoint Uri")
public String getEndpointUri() {
return consumer.getEndpoint().getEndpointUri();
}
@ManagedAttribute(description = "Current number of inflight Exchanges")
public Integer getInflightExchanges() {
return getContext().getInflightRepository().size(consumer.getEndpoint());
}
}
| apache-2.0 |
dbflute-test/dbflute-test-dbms-oracle | src/main/java/org/docksidestage/oracle/dbflute/bsbhv/loader/LoaderOfSummaryMemberPurchase.java | 2487 | package org.docksidestage.oracle.dbflute.bsbhv.loader;
import java.util.List;
import org.dbflute.bhv.*;
import org.docksidestage.oracle.dbflute.exbhv.*;
import org.docksidestage.oracle.dbflute.exentity.*;
/**
* The referrer loader of (会員購入さまりまてびゅー)SUMMARY_MEMBER_PURCHASE as TABLE. <br>
* <pre>
* [primary key]
*
*
* [column]
* MEMBER_ID, ALLSUM_PURCHASE_PRICE, LATEST_PURCHASE_DATETIME
*
* [sequence]
*
*
* [identity]
*
*
* [version-no]
*
*
* [foreign table]
*
*
* [referrer table]
*
*
* [foreign property]
*
*
* [referrer property]
*
* </pre>
* @author oracleman
*/
public class LoaderOfSummaryMemberPurchase {
// ===================================================================================
// Attribute
// =========
protected List<SummaryMemberPurchase> _selectedList;
protected BehaviorSelector _selector;
protected SummaryMemberPurchaseBhv _myBhv; // lazy-loaded
// ===================================================================================
// Ready for Loading
// =================
public LoaderOfSummaryMemberPurchase ready(List<SummaryMemberPurchase> selectedList, BehaviorSelector selector)
{ _selectedList = selectedList; _selector = selector; return this; }
protected SummaryMemberPurchaseBhv myBhv()
{ if (_myBhv != null) { return _myBhv; } else { _myBhv = _selector.select(SummaryMemberPurchaseBhv.class); return _myBhv; } }
// ===================================================================================
// Pull out Foreign
// ================
// ===================================================================================
// Accessor
// ========
public List<SummaryMemberPurchase> getSelectedList() { return _selectedList; }
public BehaviorSelector getSelector() { return _selector; }
}
| apache-2.0 |
TomRoush/PdfBox-Android | library/src/main/java/com/tom_roush/pdfbox/rendering/Type1Glyph2D.java | 2897 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tom_roush.pdfbox.rendering;
import android.graphics.Path;
import android.util.Log;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import com.tom_roush.pdfbox.pdmodel.font.PDSimpleFont;
/**
* Glyph to Path conversion for Type 1 PFB and CFF, and TrueType fonts with a 'post' table.
*/
final class Type1Glyph2D implements Glyph2D
{
private final Map<Integer, Path> cache = new HashMap<Integer, Path>();
private final PDSimpleFont font;
/**
* Constructor.
*
* @param font PDF Type1 font.
*/
Type1Glyph2D(PDSimpleFont font)
{
this.font = font;
}
@Override
public Path getPathForCharacterCode(int code)
{
// cache
Path path = cache.get(code);
if (path == null)
{
// fetch
try
{
String name = font.getEncoding().getName(code);
if (!font.hasGlyph(name))
{
Log.w("PdfBox-Android", "No glyph for code " + code + " (" + name + ") in font " + font.getName());
if (code == 10 && font.isStandard14())
{
// PDFBOX-4001 return empty path for line feed on std14
path = new Path();
cache.put(code, path);
return path;
}
}
// todo: can this happen? should it be encapsulated?
path = font.getPath(name);
if (path == null)
{
path = font.getPath(".notdef");
}
// cache.put(code, path); TODO: PdfBox-Android
return path;
}
catch (IOException e)
{
// todo: escalate this error?
Log.e("PdfBox-Android", "Glyph rendering failed", e);
path = new Path();
}
}
return path;
}
@Override
public void dispose()
{
cache.clear();
}
}
| apache-2.0 |
khadgarmage/khadgarmage.github.io | site_config/site.js | 5888 | // 全局的一些配置
export default {
rootPath: '/web_site', // 发布到服务器的根目录,需以/开头但不能有尾/,如果只有/,请填写空字符串
port: 8080, // 本地开发服务器的启动端口
domain: 'khadgarmage.github.io', // 站点部署域名,无需协议和path等
defaultSearch: 'google', // 默认搜索引擎,baidu或者google
defaultLanguage: 'en-us',
'en-us': {
pageMenu: [
{
key: 'home', // 用作顶部菜单的选中
text: 'HOME',
link: '/en-us/index.html',
},
{
key: 'docs',
text: 'DOCS',
link: '/en-us/docs/user_doc/quick-start.html',
},
// {
// key: 'download',
// text: 'DOWNLOAD',
// link: 'https://github.com/apache/incubator-dolphinscheduler/releases',
// target: '_blank',
// },
{
key: 'blog',
text: 'BLOG',
link: '/en-us/blog/index.html',
},
{
key: 'development',
text: 'DEVELOPMENT',
link: '/en-us/docs/development/developers.html',
},
{
key: 'community',
text: 'COMMUNITY',
link: '/en-us/community/index.html',
}
],
disclaimer: {
title: 'Disclaimer',
content: 'Apache DolphinScheduler (incubating) is an effort undergoing incubation at The Apache Software Foundation (ASF), sponsored by Incubator. \n' +
'Incubation is required of all newly accepted projects until a further review indicates \n' +
'that the infrastructure, communications, and decision making process have stabilized in a manner consistent with other successful ASF projects. \n' +
'While incubation status is not necessarily a reflection of the completeness or stability of the code, \n' +
'it does indicate that the project has yet to be fully endorsed by the ASF.',
},
documentation: {
title: 'Documentation',
list: [
{
text: 'Overview',
link: '/en-us/docs/developer_guide/architecture-design.html',
},
{
text: 'Quick start',
link: '/en-us/docs/user_doc/quick-start.html',
},
{
text: 'Developer guide',
link: '/en-us/docs/development/developers.html',
},
],
},
asf: {
title: 'ASF',
list: [
{
text: 'Foundation',
link: 'http://www.apache.org',
},
{
text: 'License',
link: 'http://www.apache.org/licenses/',
},
{
text: 'Events',
link: 'http://www.apache.org/events/current-event',
},
{
text: 'Sponsorship',
link: 'http://www.apache.org/foundation/sponsorship.html',
},
{
text: 'Thanks',
link: 'http://www.apache.org/foundation/thanks.html',
},
],
},
copyright: 'Copyright © 2018-2019 The Apache Software Foundation. Apache DolphinScheduler, DolphinScheduler, and its feather logo are trademarks of The Apache Software Foundation.',
},
'zh-cn': {
pageMenu: [
{
key: 'home',
text: '首页',
link: '/zh-cn/index.html',
},
{
key: 'docs',
text: '文档',
link: '/zh-cn/docs/user_doc/quick-start.html',
},
// {
// key: 'download',
// text: '下载',
// link: 'https://github.com/apache/incubator-dolphinscheduler/releases',
// target: '_blank',
// },
{
key: 'blog',
text: '博客',
link: '/zh-cn/blog/index.html',
},
{
key: 'development',
text: '开发者',
link: '/zh-cn/docs/development/developers.html',
},
{
key: 'community',
text: '社区',
link: '/zh-cn/community/index.html',
},
{
key: 'community',
text: '社区',
link: '/zh-cn/community/index.html',
}
],
disclaimer: {
title: 'Disclaimer',
content: 'Apache DolphinScheduler (incubating) is an effort undergoing incubation at The Apache Software Foundation (ASF), sponsored by Incubator. \n' +
'Incubation is required of all newly accepted projects until a further review indicates \n' +
'that the infrastructure, communications, and decision making process have stabilized in a manner consistent with other successful ASF projects. \n' +
'While incubation status is not necessarily a reflection of the completeness or stability of the code, \n' +
'it does indicate that the project has yet to be fully endorsed by the ASF.',
},
documentation: {
title: '文档',
list: [
{
text: '概览',
link: '/zh-cn/docs/developer_guide/architecture-design.html',
},
{
text: '快速开始',
link: '/zh-cn/docs/user_doc/quick-start.html',
},
{
text: '开发者指南',
link: '/zh-cn/docs/development/developers.html',
},
],
},
asf: {
title: 'ASF',
list: [
{
text: '基金会',
link: 'http://www.apache.org',
},
{
text: '证书',
link: 'http://www.apache.org/licenses/',
},
{
text: '事件',
link: 'http://www.apache.org/events/current-event',
},
{
text: '赞助',
link: 'http://www.apache.org/foundation/sponsorship.html',
},
{
text: '致谢',
link: 'http://www.apache.org/foundation/thanks.html',
},
],
},
copyright: 'Copyright © 2018-2019 The Apache Software Foundation. Apache DolphinScheduler, DolphinScheduler, and its feather logo are trademarks of The Apache Software Foundation.',
},
};
| apache-2.0 |
tokings/loganalysis | loganalysis.webapp-0.1.1/src/main/webapp/js/page/history/txnplte.js | 9436 | /**
* @author tdz
* @date 2017年3月20日 下午4:42:32
*
*/
var db = CONSTANTS.stat.indice || 'stat_info';
var table = CONSTANTS.stat.table || 'STAT_DATA';
var pageNo = CONSTANTS.pageNo || 1;
var pageSize = CONSTANTS.pageSize || 15;
var queryRecentDataMinutes = CONSTANTS.queryRecentDataMinutes || 10;
$(document).ready(function() {
var now = new Date();
$('#endTime').val(parseDateTimeMillisWithFMT(now.getTime(), "yyyy-MM-dd HH:mm:ss"));
now.setTime(now.getTime() - (queryRecentDataMinutes * 60 * 1000));
$('#startTime').val(parseDateTimeMillisWithFMT(now.getTime(), "yyyy-MM-dd HH:mm:ss"));
$('.query-tab').hide();
initEvent();
doChartDataSearch();
});
function initEvent() {
$('#showChartTab').click(function() {
$('.chart-tab').show();
$('.query-tab').hide();
doChartDataSearch();
});
$('#showQueryTab').click(function() {
$('.chart-tab').hide();
$('.query-tab').show();
doQueryDataSearch();
});
$('.input-daterange input').datetimepicker({
language: "zh-CN",
format: 'yyyy-mm-dd hh:ii:ss',
todayBtn: true,
// clearBtn: true,
minuteStep: 5,
keyboardNavigation: false,
forceParse: false,
autoclose: true
});
$('body').on('click', '.quick-select', function() {
$(this).addClass('btn-warning').removeClass('btn-link')
.siblings('.btn-warning').removeClass('btn-warning').addClass('btn-link');
var minutes = $(this).attr('val');
var now = new Date();
$('#endTime').val(parseDateTimeMillisWithFMT(now.getTime(), "yyyy-MM-dd HH:mm:ss"));
now.setTime(now.getTime() - (Number(minutes)) * 60 * 1000);
$('#startTime').val(parseDateTimeMillisWithFMT(now.getTime(), "yyyy-MM-dd HH:mm:ss"));
pageNo = 1;
doQueryDataSearch();
});
$('body').on('click', '.sort', function() {
var sort = $(this).attr('sort');
var role = $(this).attr('role');
$('#sortField').val(role);
var ascCls = 'glyphicon-sort-by-alphabet';
var descCls = 'glyphicon-sort-by-alphabet-alt';
switch (sort) {
case 'asc':
$('.sort').removeClass(descCls).removeClass('text-warning font-bold').addClass(ascCls);
$(this).removeClass(ascCls).addClass(descCls).addClass('text-warning font-bold').attr({'sort':'desc','title':'降序'});
$('#sort').val('desc');
break;
case 'desc':
$('.sort').removeClass(descCls).removeClass('text-warning font-bold').addClass(ascCls);
$(this).removeClass(descCls).addClass(ascCls).addClass('text-warning font-bold').attr({'sort':'asc','title':'升序'});
$('#sort').val('asc');
break;
default:
break;
}
doQueryDataSearch();
});
$('body').on('keypress', '.form-group-sm input', function(event) {
if(event.keyCode == 13 || event.which == 13) {
pageNo = 1;
doQueryDataSearch();
}
});
$('body').on('change', '.form-group-sm select', function(event) {
pageNo = 1;
doQueryDataSearch();
});
$('#query').click(function() {
pageNo = 1;
$('.quick-select').removeClass('btn-warning').addClass('btn-link');
doQueryDataSearch();
});
$('#saveAsSample').click(function() {
saveAsSampleData();
});
$('.next-page').click(function() {
var totalNum = $('.total-size').text();
if(pageNo * pageSize > Number(totalNum)) {
warning('已经到了最后一页,请缩小范围查询!', '提示' ,true);
return false;
}
pageNo = pageNo + 1;
doQueryDataSearch();
});
$('.pre-page').click(function() {
if(pageNo <= 1) {
warning('已经到了第一页!', '提示' ,true);
return false;
}
pageNo = pageNo - 1;
doQueryDataSearch();
});
}
function doChartDataSearch() {
UI.remoteCall('/loganalysis/txnplteLog/historyStatData.do', {}, 'get', 'json', function(data) {
UI.debug(data);
if(!data) {
warning('没有查询到数据!','提示' ,true);
return;
}
initDayBasedReqChart(data.TXNPLTE_REQ_CNT_DAY);
$('#historyAvgDuration').html(tmpl('historyAvgDurationTmpl', data.TXNPLTE_DURATION_AVG_TOP10_BY_TXNCOD.reverse()));
$('#historyTxnCodCount').html(tmpl('historyTxnCodCountTmpl', data.TXNPLTE_TOTAL_TOP10_BY_TXNCOD.reverse()));
$('#historyTxnCodErrorRatio').html(tmpl('historyTxnCodErrorRatioTmpl', data.TXNPLTE_ERROR_RATIO_TOP10_BY_TXNCOD.reverse()));
});
}
function initDayBasedReqChart(reqCnt) {
if (reqCnt) {
var totalCntxAxis = [];
var legendData = [ '日交易量' ];
var totalCntData = [];
var errorRatioData = [];
var series = [ {
name : '日交易量',
type : 'line',
barWidth : '40%',
itemStyle : {
normal : {
// color : 'red'
}
},
areaStyle : {
normal : {
opacity : 0.8,
shadowColor : 'rgb(128, 128, 128)',
shadowBlur : 10,
color : new echarts.graphic.LinearGradient(0, 0, 0, 1, [ {
offset : 0,
color : '#f8ac59' // 100% 处的颜色
}, {
offset : 1,
color : '#fff' // 0% 处的颜色
} ], false)
}
},
symbol : 'circle',
symbolSize : 4,
showSymbol : true,
showAllSymbol : false,
data : totalCntData
} ];
for ( var i in reqCnt) {
totalCntxAxis.push(i);
totalCntData.push({
value : reqCnt[i],
extData : {
timePoint: i,
value: reqCnt[i]
}
});
}
var reqCntOption = {
title : {
show : false,
left : 'center',
top : 'bottom',
text : ''
},
grid : {
top : 30,
right : 30,
bottom : 20
},
legend : {
show : false,
left : 'right',
top : 30,
data : legendData
},
color : [ '#f8ac59', '#1ab394' ],
tooltip : {
trigger : 'axis',
axisPointer : { // 坐标轴指示器,坐标轴触发有效
type : 'shadow' // 默认为直线,可选为:'line' | 'shadow'
}
},
xAxis : [ {
boundaryGap : false,
type : 'category',
data : totalCntxAxis,
axisTick : {
alignWithLabel : true
}
} ],
yAxis : [ {
type : 'value',
name : '交易量(条)',
axisLabel : {
formatter : '{value}'
}
} ],
dataZoom : [ {
type : 'inside'
} ],
toolbox : {
right : 80,
top : 0,
feature : {
dataView : {
show : true,
readOnly : false
},
magicType : {
show : true,
type : [ 'line', 'bar' ]
},
restore : {
show : true
},
saveAsImage : {
show : true
}
}
},
series : series
};
echarts.init($('#daybased-dashboard-chart')[0]).setOption(reqCntOption);
}
}
function doQueryDataSearch() {
var params = {
db: db,
table: table,
pageNo : pageNo,
pageSize : pageSize,
nodeId : $('#nodeId').val().toLowerCase(),
regionId : $('#regionId').val().toLowerCase(),
txnCod : $('#txnCod').val().toLowerCase(),
msgCd : $('#msgCd').val().toLowerCase(),
statType : $('#statType').val(),
startTime : $('#startTime').val(),
endTime : $('#endTime').val(),
sortField : $('#sortField').val(),
sort : $('#sort').val()
};
UI.remoteCall('/loganalysis/txnplteLog/historyQueryData.do', params, 'get', 'json', function(data) {
UI.debug(data);
if(!data || !data.records || data.records.length < 1) {
warning('没有查询到数据!','提示' ,true);
return;
}
$('#data-body').html(tmpl('data-tmpl', data.records));
$('.total-size').text(data.count);
$('.use-time').text(data.usetime);
}, function(XMLHttpRequest, textStatus, errorThrown) {
error('加载失败!' + 'textStatus:' + textStatus + ',error:' + errorThrown.toString(), '加载数据' ,true);
}, false);
}
function saveAsSampleData() {
var params = {
db: db,
table: table,
pageNo : 1,
pageSize : 10000,
nodeId : $('#nodeId').val().toLowerCase(),
regionId : $('#regionId').val().toLowerCase(),
txnCod : $('#txnCod').val().toLowerCase(),
msgCd : $('#msgCd').val().toLowerCase(),
statType : $('#statType').val(),
startTime : $('#startTime').val(),
endTime : $('#endTime').val(),
sortField : $('#sortField').val(),
sort : $('#sort').val()
};
if(params.pageSize > 10000) {
error('参数错误', '参照样本数据集大小不能超过10000万!', true);
return false;
}
if(params.statType != 1000 && params.statType != 1001) {
error('参数错误', '参照样本数据集【统计维度】只能为Region或者Node维度!', true);
return false;
}
UI.remoteCall('/loganalysis/txnplteLog/saveAsSampleData.do', params, 'get', 'json', function(data) {
UI.debug(data);
if(data) {
switch (Number(data.result)) {
case -1:
error('设置参照样本数据集', data.msg, true);
break;
case 0:
info('设置参照样本数据集', data.msg, true);
break;
default:
warning('设置参照样本数据集', data.msg, true);
break;
}
}
}, function(XMLHttpRequest, textStatus, errorThrown) {
error('加载失败!' + 'textStatus:' + textStatus + ',error:' + errorThrown.toString(), '加载数据' ,true);
}, false);
}
function parseStatType(type) {
var ret = '';
switch (Number(type)) {
case 1000:
ret = 'Region';
break;
case 1001:
ret = 'RegionNode';
break;
case 1002:
ret = 'RegNodTxnCod';
break;
case 1003:
ret = 'RegNodTxnCdMmsgCd';
break;
case 1004:
ret = 'Node';
break;
case 1005:
ret = 'TxnCod';
break;
default:
ret = '未知';
break;
}
return ret;
}
function parseRankCls(rank) {
var ret = '';
switch (Number(rank)) {
case 0:
ret = 'label label-warning';
break;
case 1:
ret = 'label label-warning';
break;
case 2:
ret = 'label label-warning';
break;
}
return ret;
}
| apache-2.0 |
nandakishorm/batatebeta | src/test/java/com/kishor/batatebeta/BatatebetaApplicationTests.java | 513 | package com.kishor.batatebeta;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = BatatebetaApplication.class)
@WebAppConfiguration
public class BatatebetaApplicationTests {
@Test
public void contextLoads() {
}
}
| apache-2.0 |
tempbottle/copycat | server/src/main/java/net/kuujo/copycat/raft/protocol/request/LeaveRequest.java | 4096 | /*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.kuujo.copycat.raft.protocol.request;
import net.kuujo.copycat.util.BuilderPool;
import net.kuujo.copycat.raft.Member;
import net.kuujo.copycat.io.BufferInput;
import net.kuujo.copycat.io.BufferOutput;
import net.kuujo.copycat.io.serializer.SerializeWith;
import net.kuujo.copycat.io.serializer.Serializer;
import net.kuujo.copycat.util.ReferenceManager;
import java.util.Objects;
/**
* Protocol leave request.
*
* @author <a href="http://github.com/kuujo">Jordan Halterman</a>
*/
@SerializeWith(id=264)
public class LeaveRequest extends AbstractRequest<LeaveRequest> {
/**
* The unique identifier for the leave request type.
*/
public static final byte TYPE = 0x0D;
private static final BuilderPool<Builder, LeaveRequest> POOL = new BuilderPool<>(Builder::new);
/**
* Returns a new leave request builder.
*
* @return A new leave request builder.
*/
public static Builder builder() {
return POOL.acquire();
}
/**
* Returns an leave request builder for an existing request.
*
* @param request The request to build.
* @return The leave request builder.
*/
public static Builder builder(LeaveRequest request) {
return POOL.acquire(request);
}
private Member member;
private LeaveRequest(ReferenceManager<LeaveRequest> referenceManager) {
super(referenceManager);
}
@Override
public byte type() {
return TYPE;
}
/**
* Returns the leaving member.
*
* @return The leaving member.
*/
public Member member() {
return member;
}
@Override
public void writeObject(BufferOutput buffer, Serializer serializer) {
serializer.writeObject(member, buffer);
}
@Override
public void readObject(BufferInput buffer, Serializer serializer) {
member = serializer.readObject(buffer);
}
@Override
public int hashCode() {
return Objects.hash(getClass(), member);
}
@Override
public boolean equals(Object object) {
if (object instanceof LeaveRequest) {
LeaveRequest request = (LeaveRequest) object;
return request.member.equals(member);
}
return false;
}
@Override
public String toString() {
return String.format("%s[member=%s]", getClass().getSimpleName(), member);
}
/**
* Leave request builder.
*/
public static class Builder extends AbstractRequest.Builder<Builder, LeaveRequest> {
protected Builder(BuilderPool<Builder, LeaveRequest> pool) {
super(pool, LeaveRequest::new);
}
@Override
public void reset() {
super.reset();
request.member = null;
}
/**
* Sets the request member.
*
* @param member The request member.
* @return The request builder.
*/
public Builder withMember(Member member) {
if (member == null)
throw new NullPointerException("member cannot be null");
request.member = member;
return this;
}
@Override
public LeaveRequest build() {
super.build();
if (request.member == null)
throw new NullPointerException("member cannot be null");
return request;
}
@Override
public int hashCode() {
return Objects.hash(request);
}
@Override
public boolean equals(Object object) {
return object instanceof Builder && ((Builder) object).request.equals(request);
}
@Override
public String toString() {
return String.format("%s[request=%s]", getClass().getCanonicalName(), request);
}
}
}
| apache-2.0 |
lesaint/experimenting-annotation-processing | experimenting-rounds/massive-count-of-annotated-classes/src/main/java/fr/javatronic/blog/massive/annotation1/sub1/Class_4125.java | 151 | package fr.javatronic.blog.massive.annotation1.sub1;
import fr.javatronic.blog.processor.Annotation_001;
@Annotation_001
public class Class_4125 {
}
| apache-2.0 |
OpenXIP/xip-libraries | src/database/core/SoXipLazyGroup.cpp | 1132 | /*
Copyright (c) 2011, Siemens Corporate Research a Division of Siemens Corporation
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include <Inventor/actions/SoGLRenderAction.h>
#include "SoXipLazyGroup.h"
SO_NODE_SOURCE( SoXipLazyGroup );
SoXipLazyGroup::SoXipLazyGroup()
{
SO_NODE_CONSTRUCTOR(SoXipLazyGroup);
mNodeID = 0;
}
void SoXipLazyGroup::initClass()
{
SO_NODE_INIT_CLASS(SoXipLazyGroup, SoGroup, "Group")
}
void SoXipLazyGroup::GLRender(SoGLRenderAction *action)
{
if(mNodeID != getNodeId())
{
mNodeID = getNodeId();
SoGroup::GLRender(action); // traverse children...
}
}
| apache-2.0 |
Naeregwen/games-librarian | src/commons/enums/DumpMode.java | 1959 | /**
* Copyright 2012-2014 Naeregwen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package commons.enums;
import javax.swing.ImageIcon;
import commons.GamesLibrarianIcons;
import commons.enums.interfaces.GamesLibrarianActionEnum;
/**
* @author Naeregwen
*
*/
public enum DumpMode implements GamesLibrarianActionEnum {
Text ("dumpModeTextLabel", "dumpModeTextMnemonic", "dumpModeTextAccelerator", GamesLibrarianIcons.dumpModeTextIcon),
XML ("dumpModeXMLLabel", "dumpModeXMLMnemonic", "dumpModeXMLAccelerator", GamesLibrarianIcons.dumpModeXMLIcon),
Both ("dumpModeBothLabel", "dumpModeBothMnemonic", "dumpModeBothAccelerator", GamesLibrarianIcons.dumpModeBothIcon);
String labelKey;
String mnemonicKey;
String acceleratorKey;
ImageIcon icon;
DumpMode(String labelKey, String mnemonicKey, String acceleratorKey, ImageIcon icon) {
this.labelKey = labelKey;
this.mnemonicKey = mnemonicKey;
this.acceleratorKey = acceleratorKey;
this.icon = icon;
}
/**
* @return the labelKey
*/
@Override
public String getLabelKey() {
return labelKey;
}
/**
* @return the mnemonicKey
*/
public String getMnemonicKey() {
return mnemonicKey;
}
/**
* @return the acceleratorKey
*/
public String getAcceleratorKey() {
return acceleratorKey;
}
/**
* @return the icon
*/
@Override
public ImageIcon getIcon() {
return icon;
}
}
| apache-2.0 |
eaglelzy/myglide | library/src/main/java/com/lizy/myglide/load/engine/bitmap_recycle/Poolable.java | 141 | package com.lizy.myglide.load.engine.bitmap_recycle;
/**
* Created by lizy on 16-4-21.
*/
public interface Poolable {
void offer();
}
| apache-2.0 |
mufaddalq/cloudstack-datera-driver | ui/scripts/ui-custom/zoneChart.js | 19593 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
(function($, cloudStack) {
/**
* Zone details chart
*/
cloudStack.uiCustom.systemChart = function(chartID) {
/**
* Make view all button
*/
var viewAllButton = function(args) {
var $viewAll = $('<div>').addClass('button view-all');
var $label = $('<span>').addClass('view-all-label').html(args.label ? args.label : 'View all');
var $browser = args.$browser;
var action = args.action;
// Launch a list view
//var $multiple-click=$viewAll.data('multiple-click',false);
$viewAll.click(function() {
if ($viewAll.data('multiple-click')) return false;
//@pranav-handling the multiple clicks by using a flag variable
$viewAll.data('multiple-click', true);
$browser.cloudBrowser('addPanel', {
title: args.title,
maximizeIfSelected: true,
complete: function($newPanel) {
$viewAll.data('multiple-click', false);
action({
$panel: $newPanel
});
}
});
});
$viewAll.append($label);
return $viewAll;
};
/**
* Chart button action generators
*/
var actions = {
/**
* Makes a list view from given zone sub-section
*/
listView: function(targetID, context) {
return function(args) {
var $elem = args.$panel;
var listView = cloudStack.sections.system.subsections[targetID];
$elem.listView($.extend(true, {}, listView, {
context: context
}));
};
},
providerListView: function(context) {
return function(args) {
var $elem = args.$panel;
var listViewArgs = cloudStack.sections.system.naas.providerListView;
$elem.listView({
context: context,
listView: listViewArgs
});
};
},
/**
* Makes details for a given traffic type
*/
trafficTypeDetails: function(targetID, context) {
return function(args) {
var $elem = args.$panel;
var detailViewArgs = cloudStack.sections.system.naas.mainNetworks[targetID].detailView;
$elem.detailView($.extend(true, {}, detailViewArgs, {
$browser: $('#browser .container'),
context: context
}));
};
}
};
/**
* Chart generators
*/
var charts = {
/**
* Compute tab
*/
compute: function(args) {
var $chart = $('<div>');
var $browser = $('#browser .container');
var context = args.context;
// Resource items
var computeResources = {
zone: {
label: 'Zone'
},
pods: {
label: 'Pods',
viewAll: {
action: actions.listView('pods', context)
}
},
clusters: {
label: 'Clusters',
viewAll: {
action: actions.listView('clusters', context)
}
},
hosts: {
label: 'Hosts',
viewAll: {
action: actions.listView('hosts', context)
}
},
primaryStorage: {
label: 'Primary Storage',
viewAll: {
action: actions.listView('primary-storage', context)
}
},
ucs: {
label: 'UCS',
viewAll: {
action: actions.listView('ucs', context)
}
},
secondaryStorage: {
label: 'Secondary Storage',
viewAll: {
action: actions.listView('secondary-storage', context)
}
}
};
var $computeResources = $('<ul>').addClass('resources');
// Make resource items
$.each(computeResources, function(id, resource) {
var $li = $('<li>');
var $label = $('<span>').addClass('label');
$li.addClass(id);
$label.html(resource.label);
$label.appendTo($li);
// View all
if (resource.viewAll) {
viewAllButton($.extend(resource.viewAll, {
title: resource.label,
$browser: $browser,
context: context
})).appendTo($li);
}
$li.appendTo($computeResources);
});
$chart.append($computeResources);
return $chart;
},
network: function(args) {
var $chart = $('<div>');
var $browser = $('#browser .container');
var $loading = $('<div>').addClass('loading-overlay');
var context = args.context;
var networkDataProvider = cloudStack.sections.system.naas.networks.dataProvider;
var trafficTypeDataProvider = cloudStack.sections.system.naas.trafficTypes.dataProvider;
$loading.appendTo($chart);
var renderChart = function(args) {
var $targetChart = args.$chart ? args.$chart : $chart;
var targetContext = $.extend(true, {}, context, {
physicalNetworks: [args.data]
});
// Get traffic type data
trafficTypeDataProvider({
context: targetContext,
response: {
success: function(args) {
var $networkChart = $('<div>').addClass('system-network-chart');
var $trafficTypes = $('<ul>').addClass('resources traffic-types');
$loading.remove();
var trafficTypes = {
'public': {
label: _l('label.public'),
configure: {
action: actions.trafficTypeDetails('public', targetContext)
}
},
'guest': {
label: _l('label.guest'),
configure: {
action: actions.trafficTypeDetails('guest', targetContext)
}
},
'management': {
label: _l('label.management'),
configure: {
action: actions.trafficTypeDetails('management', targetContext)
}
},
'storage': {
label: _l('label.storage'),
configure: {
action: actions.trafficTypeDetails('storage', targetContext)
}
},
'providers': {
label: _l('label.network.service.providers'),
ignoreChart: true,
dependsOn: 'guest',
configure: {
action: actions.providerListView(targetContext)
}
}
};
var validTrafficTypes = $.map(args.data, function(trafficType) {
return trafficType.name.toLowerCase();
});
// Make traffic type elems
$.each(trafficTypes, function(id, trafficType) {
if ($.inArray(id, validTrafficTypes) == -1) { //if it is not a valid traffic type
if (trafficType.dependsOn != null && trafficType.dependsOn.length > 0) { //if it has dependsOn
if ($.inArray(trafficType.dependsOn, validTrafficTypes) == -1) { //if its dependsOn is not a valid traffic type, either
return true; //skip this item
}
//else, if its dependsOn is a valid traffic type, continue to Make list item (e.g. providers.dependsOn is 'guest')
} else {
return true; //if it doesn't have dependsOn, skip this item
}
}
// Make list item
var $li = $('<li>').addClass(id);
var $label = $('<span>').addClass('label').html(trafficType.label);
var $configureButton = viewAllButton($.extend(trafficType.configure, {
label: _l('label.configure'),
title: trafficType.label,
$browser: $browser,
targetContext: targetContext
}));
$li.append($label, $configureButton);
$li.appendTo($trafficTypes);
// Make chart
if (trafficType.ignoreChart)
return true;
var $targetChartItem = $('<div>').addClass('network-chart-item').addClass(id);
$targetChartItem.appendTo($networkChart);
});
var $switchIcon = $('<div>').addClass('network-switch-icon').append(
$('<span>').html('L2/L3 switch')
);
var $circleIcon = $('<div>').addClass('base-circle-icon');
$targetChart.append($trafficTypes, $switchIcon, $networkChart, $circleIcon);
}
}
});
};
// Get network data
networkDataProvider({
context: context,
response: {
success: function(args) {
var data = args.data;
var actionFilter = args.actionFilter;
$chart.listView({
listView: $.extend(true, {}, cloudStack.sections.system.naas.networks.listView, {
dataProvider: function(args) {
args.response.success({
actionFilter: actionFilter,
data: data
});
},
detailView: {
noCompact: true,
tabs: {
network: {
title: 'Network',
custom: function(args) {
var $chart = $('<div>').addClass('system-chart network');
renderChart({
$chart: $chart,
data: args.context.physicalNetworks[0]
});
return $chart;
}
}
}
}
})
});
$loading.remove();
}
}
});
return $chart;
},
resources: function(args) {
var $chart = $('<div>').addClass('dashboard admin');
var $chartItems = $('<ul>');
var $stats = $('<div>').addClass('stats');
var $container = $('<div>').addClass('dashboard-container head');
var $top = $('<div>').addClass('top');
var $title = $('<div>').addClass('title').append($('<span>').html(_l('label.system.wide.capacity')));
var chartItems = {
// The keys are based on the internal type ID associated with each capacity
0: {
name: _l('label.memory')
},
1: {
name: _l('label.cpu')
},
2: {
name: _l('label.storage')
},
3: {
name: _l('label.primary.allocated')
},
6: {
name: _l('label.secondary.storage')
},
9: {
name: _l('label.local.storage')
},
4: {
name: _l('label.public.ips')
},
5: {
name: _l('label.management.ips')
},
8: {
name: _l('label.direct.ips')
},
7: {
name: _l('label.vlan')
}
};
$top.append($title);
$container.append($top, $stats.append($chartItems));
$chart.append($container);
var $loading = $('<div>').addClass('loading-overlay').prependTo($chart);
cloudStack.sections.system.zoneDashboard({
context: args.context,
response: {
success: function(args) {
$loading.remove();
$.each(chartItems, function(id, chartItem) {
var data = args.data[id] ? args.data[id] : {
used: 0,
total: 0,
percent: 0
};
var $item = $('<li>');
var $name = $('<div>').addClass('name').html(chartItem.name);
var $value = $('<div>').addClass('value');
var $content = $('<div>').addClass('content').html('Allocated: ');
var $allocatedValue = $('<span>').addClass('allocated').html(data.used);
var $totalValue = $('<span>').addClass('total').html(data.total);
var $chart = $('<div>').addClass('chart');
var $chartLine = $('<div>').addClass('chart-line')
.css({
width: '0%'
})
.animate({
width: data.percent + '%'
});
var $percent = $('<div>').addClass('percentage');
var $percentValue = $('<soan>').addClass('value').html(data.percent);
$chartItems.append(
$item.append(
$name,
$value.append(
$content.append(
$allocatedValue,
' / ',
$totalValue
)
),
$chart.append($chartLine),
$percent.append($percentValue, '%')
)
);
});
}
}
});
return $chart;
}
};
return function(args) {
// Fix zone context naming
args.context.zones = args.context.physicalResources;
var $chart = charts[chartID](args).addClass('system-chart').addClass(chartID);
return $chart;
};
};
})(jQuery, cloudStack);
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-codepipeline/src/main/java/com/amazonaws/services/codepipeline/model/transform/ListActionExecutionsRequestProtocolMarshaller.java | 2783 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.codepipeline.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.codepipeline.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* ListActionExecutionsRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class ListActionExecutionsRequestProtocolMarshaller implements Marshaller<Request<ListActionExecutionsRequest>, ListActionExecutionsRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/")
.httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true)
.operationIdentifier("CodePipeline_20150709.ListActionExecutions").serviceName("AWSCodePipeline").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public ListActionExecutionsRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<ListActionExecutionsRequest> marshall(ListActionExecutionsRequest listActionExecutionsRequest) {
if (listActionExecutionsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<ListActionExecutionsRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING,
listActionExecutionsRequest);
protocolMarshaller.startMarshalling();
ListActionExecutionsRequestMarshaller.getInstance().marshall(listActionExecutionsRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
GerHobbelt/lovefield | tests/index/nullable_index_test.js | 4786 | /**
* @license
* Copyright 2015 The Lovefield Project Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
goog.setTestOnly();
goog.require('goog.testing.jsunit');
goog.require('lf.Order');
goog.require('lf.index.BTree');
goog.require('lf.index.NullableIndex');
goog.require('lf.index.SimpleComparator');
goog.require('lf.index.SingleKeyRange');
goog.require('lf.testing.index.TestSingleRowNumericalKey');
goog.require('lf.testing.index.TestSingleRowStringKey');
goog.require('lf.testing.util');
/** @type {!lf.index.NullableIndex} */
var index;
function setUp() {
index = new lf.index.NullableIndex(
new lf.index.BTree(
'test',
new lf.index.SimpleComparator(lf.Order.ASC),
/* opt_unique */ false));
}
function testIndex() {
assertEquals('test', index.getName());
index.add(1, 2);
index.add(1, 3);
index.add(2, 4);
index.add(3, 5);
index.add(null, 7);
index.add(null, 8);
assertArrayEquals([2, 3], index.get(1));
assertArrayEquals([7, 8], index.get(null));
assertEquals(1, index.min()[0]);
assertEquals(3, index.max()[0]);
assertEquals(4, index.cost());
assertArrayEquals([4, 5], index.getRange(
[new lf.index.SingleKeyRange(2, 3, false, false)]));
assertArrayEquals([2, 3, 4, 5, 7, 8], index.getRange());
index.remove(2);
assertArrayEquals([], index.get(2));
index.remove(null, 7);
assertArrayEquals([8], index.get(null));
index.set(1, 10);
assertArrayEquals([10], index.get(1));
index.set(null, 9);
assertArrayEquals([9], index.get(null));
assertTrue(index.containsKey(1));
assertTrue(index.containsKey(null));
index.remove(null);
assertFalse(index.containsKey(null));
assertArrayEquals([10, 5], index.getRange());
index.set(null, 9);
assertArrayEquals([9], index.get(null));
index.clear();
assertFalse(index.containsKey(1));
assertFalse(index.containsKey(null));
}
function testSingleRow_NumericalKey() {
var test = new lf.testing.index.TestSingleRowNumericalKey(function() {
return new lf.index.NullableIndex(
new lf.index.BTree(
'test',
new lf.index.SimpleComparator(lf.Order.ASC),
/* opt_unique */ false));
});
test.run();
}
function testSingleRow_StringKey() {
var test = new lf.testing.index.TestSingleRowStringKey(function() {
return new lf.index.NullableIndex(
new lf.index.BTree(
'test',
new lf.index.SimpleComparator(lf.Order.ASC),
/* opt_unique */ false));
});
test.run();
}
function testSerialize() {
var deserializeFn = lf.index.BTree.deserialize.bind(
undefined,
new lf.index.SimpleComparator(lf.Order.ASC),
'test',
false);
index.add(null, 1);
index.add(null, 2);
index.add(1, 3);
index.add(1, 4);
index.add(2, 5);
var rows = index.serialize();
var index2 = lf.index.NullableIndex.deserialize(deserializeFn, rows);
assertArrayEquals([3, 4, 5, 1, 2], index2.getRange());
assertArrayEquals([1, 2], index2.get(null));
assertArrayEquals([3, 4], index2.get(1));
assertArrayEquals([5], index2.get(2));
}
function testUnique() {
index = new lf.index.NullableIndex(
new lf.index.BTree(
'test',
new lf.index.SimpleComparator(lf.Order.ASC),
/* opt_unique */ true));
index.add(null, 1);
index.add(1, 2);
// 201: Duplicate keys are not allowed.
lf.testing.util.assertThrowsError(201, function() { index.add(1, 3); });
lf.testing.util.assertThrowsError(201, function() { index.add(null, 2); });
}
function testStats() {
index.add(null, 1);
index.add(null, 2);
index.add(null, 7);
index.add(1, 3);
index.add(1, 4);
index.add(1, 8);
index.add(2, 5);
assertEquals(7, index.stats().totalRows);
index.remove(null, 2);
assertEquals(6, index.stats().totalRows);
index.remove(null);
assertEquals(4, index.stats().totalRows);
index.set(null, 22);
assertEquals(5, index.stats().totalRows);
index.add(null, 33);
assertEquals(6, index.stats().totalRows);
index.remove(null);
assertEquals(4, index.stats().totalRows);
index.remove(1, 3);
assertEquals(3, index.stats().totalRows);
index.remove(1);
assertEquals(1, index.stats().totalRows);
index.clear();
assertEquals(0, index.stats().totalRows);
}
| apache-2.0 |
GroupeStageSPPP/Mission2 | EntretienSPPP/EntretienSPPP.WF/Properties/Settings.Designer.cs | 1126 | //------------------------------------------------------------------------------
// <auto-generated>
// Ce code a été généré par un outil.
// Version du runtime :4.0.30319.34014
//
// Les modifications apportées à ce fichier peuvent provoquer un comportement incorrect et seront perdues si
// le code est régénéré.
// </auto-generated>
//------------------------------------------------------------------------------
namespace EntretienSPPP.WinForm.Properties {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase {
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default {
get {
return defaultInstance;
}
}
}
}
| apache-2.0 |
rhtconsulting/fuse-quickstarts | eap/amq_mdb/src/main/java/com/redhat/consulting/fusequickstarts/eap/amq/mdb/ConsumerRoute.java | 619 | package com.redhat.consulting.fusequickstarts.eap.amq.mdb;
import javax.ejb.Startup;
import javax.enterprise.context.ApplicationScoped;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.cdi.ContextName;
/*
* This consumer route receives messages sent from the Producer Template in the MDB
*/
@Startup
@ApplicationScoped
@ContextName("amq-mdb")
public class ConsumerRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
from("direct:consumer")
.routeId("consumer")
.log("Received Message: ${body.text}");
}
} | apache-2.0 |
vanilladb/vanillacore | src/test/java/org/vanilladb/core/QueryTestSuite.java | 2203 | /*******************************************************************************
* Copyright 2016, 2017 vanilladb.org contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.vanilladb.core;
import org.junit.runner.RunWith;
import org.junit.runners.Suite.SuiteClasses;
import org.vanilladb.core.IsolatedClassLoaderSuite.IsolationRoot;
import org.vanilladb.core.query.algebra.BasicQueryTest;
import org.vanilladb.core.query.algebra.index.MultiKeyIndexAlgebraTest;
import org.vanilladb.core.query.algebra.materialize.MaterializeTest;
import org.vanilladb.core.query.parse.ParseTest;
import org.vanilladb.core.query.planner.BasicQueryPlannerTest;
import org.vanilladb.core.query.planner.VerifierTest;
import org.vanilladb.core.query.planner.index.IndexUpdatePlannerTest;
import org.vanilladb.core.query.planner.opt.HeuristicQueryPlannerTest;
import org.vanilladb.core.query.planner.opt.MultiKeyIndexPlanningTest;
import org.vanilladb.core.query.planner.opt.SelingerQueryPlannerTest;
import org.vanilladb.core.server.VanillaDb;
@RunWith(IsolatedClassLoaderSuite.class)
@SuiteClasses({
// query.parse
ParseTest.class,
// query.planner
VerifierTest.class, BasicQueryPlannerTest.class,
// query.algebra
BasicQueryTest.class, MultiKeyIndexAlgebraTest.class,
// query.algebra.materialize
MaterializeTest.class,
// query.planner
IndexUpdatePlannerTest.class, HeuristicQueryPlannerTest.class,
MultiKeyIndexPlanningTest.class, SelingerQueryPlannerTest.class
})
@IsolationRoot(VanillaDb.class)
public class QueryTestSuite {
}
| apache-2.0 |
aeffrig/adventofcode15 | src/main/scala/day1/DayofCode1.scala | 396 | package day1
import scala.io.Source
object DayofCode1 {
def main(args: Array[String]) {
val answerPart1 = Source.fromFile("day1input").mkString.map { case '(' => 1
case ')' => -1 }.reduce(_ + _)
println(answerPart1)
val answerPart2 = Source.fromFile("day1input").mkString.map { case '(' => 1
case ')' => -1 }.scanLeft(0)(_ + _).indexOf(-1)
println(answerPart2)
}
}
| apache-2.0 |
couchbase/CouchbaseMock | src/main/java/com/couchbase/mock/memcached/UnknownCommandExecutor.java | 1137 | /*
* Copyright 2017 Couchbase, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.couchbase.mock.memcached;
import com.couchbase.mock.memcached.protocol.BinaryCommand;
import com.couchbase.mock.memcached.protocol.BinaryResponse;
import com.couchbase.mock.memcached.protocol.ErrorCode;
/**
* @author Trond Norbye
*/
public class UnknownCommandExecutor implements CommandExecutor {
@Override
public BinaryResponse execute(BinaryCommand cmd, MemcachedServer server, MemcachedConnection client) {
return new BinaryResponse(cmd, ErrorCode.UNKNOWN_COMMAND);
}
}
| apache-2.0 |
ReliefZk/minor-rpc | src/main/java/com/zk/rpc/common/client/AbstractClient.java | 1816 | package com.zk.rpc.common.client;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import com.zk.rpc.bean.RpcRequest;
import com.zk.rpc.bean.RpcResponse;
import com.zk.rpc.common.common.TransportURL;
public abstract class AbstractClient implements Client {
private AtomicInteger rquestId = new AtomicInteger(0);
protected String host;
protected int port;
protected String beanName;
protected ConcurrentHashMap<Integer, ArrayBlockingQueue<RpcResponse>> responseMap = new ConcurrentHashMap<Integer, ArrayBlockingQueue<RpcResponse>>();
public AbstractClient(TransportURL url) {
this.host = url.getHost();
this.port = url.getPort();
this.beanName = url.getPath();
}
@Override
public String getBeanName() {
return this.beanName;
}
@Override
public void setResponse(Integer requestIndex, RpcResponse response) {
ArrayBlockingQueue<RpcResponse> responseQueue = responseMap.get(requestIndex);
if(responseQueue == null){
responseQueue = new ArrayBlockingQueue<RpcResponse>(1);
}
responseQueue.add(response);
responseMap.putIfAbsent(requestIndex, responseQueue);
}
@Override
public RpcResponse getResponse(Integer requestIndex) {
ArrayBlockingQueue<RpcResponse> responseQueue = responseMap.get(requestIndex);
if (responseQueue != null) {
try {
RpcResponse response = responseQueue.poll(30*1000,TimeUnit.MILLISECONDS);
return response;
} catch (InterruptedException e) {
}finally{
responseMap.remove(requestIndex);
}
}
return null;
}
public Object invoke(RpcRequest request) {
int rid = rquestId.getAndDecrement();
request.setMagic(rid);
sendRequest(request);
return getResponse(rid).getResponse();
}
}
| apache-2.0 |
nhoHQ/SSMIX2DataTestingTool | src/lib/checkFilePath.py | 1754 | # coding: UTF-8
import os
import codecs
locationNG = []
#ファイル名とディレクトリ名に矛盾があるファイルを列挙する
def checkFilePath(rootDir, fileName):
try:
#root = 0/1/2/3/4/5/6
#0: other
#1: ss-mix standard storage root
#2: patient id 0,1,2
#3: patient id 3,4,5
#4: patient id full
#5: - or medication date
#6: file type
f = fileName.split('_')
#file = 0_1_2_3_4_5_6_7
#0: patient id
#1: - or medication date
#2: file type
#3: order number
#4: file time stamp
#5: department
#6: condition flag
l = os.path.join(f[0][0:3], f[0][3:6], f[0], f[1], f[2])
if rootDir.endswith(l) == False:
locationNG.append(os.path.join(rootDir,fileName))
except Exception as e:
print('exception @ %s\%s' % (rootDir, fileName))
locationNG.append('%s\%s' % (rootDir, fileName))
print(str(e))
return
#結果を出力する
def outputResults(outputFile):
try:
fout = codecs.open(outputFile, 'a', 'utf-8')
fout.write('\r\n#####checkFilePath result####\r\n')
fout.write('%d files failure\r\n\r\n' % len(locationNG))
for n in locationNG:
fout.write('%s\r\n' % n)
fout.write('\r\n')
fout.close()
except Exception as e:
print(str(e))
return
if __name__ == "__main__":
import sys
checkFilePath('..\\sampleData\\000\\000\\0000001\\20000401\\ADT-31\\', '..\\sampleData\\000\\000\\0000001\\20000401\\9999013_20110630_OMP-11_123456789012345_20110701113813225_01_1')
outputResults('%s.out' % os.getpid())
| apache-2.0 |
svn2github/scalatest | src/test/scala/org/scalatest/ShouldIncludeRegexSpec.scala | 42351 | /*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalatest.prop.Checkers
import org.scalacheck._
import Arbitrary._
import Prop._
import org.scalatest.exceptions.TestFailedException
class ShouldIncludeRegexSpec extends Spec with Matchers with Checkers with ReturnsNormallyThrowsAssertion {
/*
s should include substring t
s should include regex t
s should startWith substring t
s should startWith regex t
s should endWith substring t
s should endWith regex t
s should fullyMatch regex t
*/
object `The include regex syntax` {
val decimal = """(-)?(\d+)(\.\d*)?"""
val decimalRegex = """(-)?(\d+)(\.\d*)?""".r
object `(when the regex is specified by a string)` {
def `should do nothing if the string includes substring that matched regex specified as a string` {
"1.78" should include regex ("1.7")
"21.7" should include regex ("1.7")
"21.78" should include regex ("1.7")
"1.7" should include regex (decimal)
"21.7" should include regex (decimal)
"1.78" should include regex (decimal)
"a -1.8 difference" should include regex (decimal)
"b8" should include regex (decimal)
"8x" should include regex (decimal)
"1.x" should include regex (decimal)
// The remaining are full matches, which should also work with "include"
"1.7" should include regex ("1.7")
"1.7" should include regex (decimal)
"-1.8" should include regex (decimal)
"8" should include regex (decimal)
"1." should include regex (decimal)
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used with not` {
"eight" should not { include regex (decimal) }
"one.eight" should not { include regex (decimal) }
"eight" should not include regex (decimal)
"one.eight" should not include regex (decimal)
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-and expression` {
"a1.7" should (include regex (decimal) and (include regex (decimal)))
"a1.7" should (include regex (decimal) and (include regex (decimal)))
"a1.7" should (include regex (decimal) and (include regex (decimal)))
"1.7b" should ((include regex (decimal)) and (include regex (decimal)))
"1.7b" should ((include regex (decimal)) and (include regex (decimal)))
"1.7b" should ((include regex (decimal)) and (include regex (decimal)))
"a1.7b" should (include regex (decimal) and include regex (decimal))
"a1.7b" should (include regex (decimal) and include regex (decimal))
"a1.7b" should (include regex (decimal) and include regex (decimal))
"1.7" should (include regex (decimal) and (include regex (decimal)))
"1.7" should ((include regex (decimal)) and (include regex (decimal)))
"1.7" should (include regex (decimal) and include regex (decimal))
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-or expression` {
"a1.7" should (include regex ("hello") or (include regex (decimal)))
"a1.7" should (include regex ("hello") or (include regex (decimal)))
"a1.7" should (include regex ("hello") or (include regex (decimal)))
"1.7b" should ((include regex ("hello")) or (include regex (decimal)))
"1.7b" should ((include regex ("hello")) or (include regex (decimal)))
"a1.7b" should ((include regex ("hello")) or (include regex (decimal)))
"a1.7b" should (include regex ("hello") or include regex (decimal))
"a1.7b" should (include regex ("hello") or include regex (decimal))
"a1.7b" should (include regex ("hello") or include regex (decimal))
"1.7" should (include regex ("hello") or (include regex (decimal)))
"1.7" should ((include regex ("hello")) or (include regex (decimal)))
"1.7" should (include regex ("hello") or include regex (decimal))
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-and expression with not` {
"fred" should (not (include regex ("bob")) and not (include regex (decimal)))
"fred" should ((not include regex ("bob")) and (not include regex (decimal)))
"fred" should (not include regex ("bob") and not include regex (decimal))
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-or expression with not` {
"fred" should (not (include regex ("fred")) or not (include regex (decimal)))
"fred" should ((not include regex ("fred")) or (not include regex (decimal)))
"fred" should (not include regex ("fred") or not include regex (decimal))
}
def `should throw TestFailedException if the string does not match substring that matched regex specified as a string` {
val caught1 = intercept[TestFailedException] {
"1.7" should include regex ("1.78")
}
assert(caught1.getMessage === "\"1.7\" did not include substring that matched regex 1.78")
val caught2 = intercept[TestFailedException] {
"1.7" should include regex ("21.7")
}
assert(caught2.getMessage === "\"1.7\" did not include substring that matched regex 21.7")
val caught3 = intercept[TestFailedException] {
"-one.eight" should include regex (decimal)
}
assert(caught3.getMessage === "\"-one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught6 = intercept[TestFailedException] {
"eight" should include regex (decimal)
}
assert(caught6.getMessage === "\"eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught7 = intercept[TestFailedException] {
"one.eight" should include regex (decimal)
}
assert(caught7.getMessage === "\"one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught8 = intercept[TestFailedException] {
"onedoteight" should include regex (decimal)
}
assert(caught8.getMessage === "\"onedoteight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught9 = intercept[TestFailedException] {
"***" should include regex (decimal)
}
assert(caught9.getMessage === "\"***\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string does matches substring that matched regex specified as a string when used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should not { include regex ("1.7") }
}
assert(caught1.getMessage === "\"1.7\" included substring that matched regex 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should not { include regex (decimal) }
}
assert(caught2.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught3 = intercept[TestFailedException] {
"-1.8" should not { include regex (decimal) }
}
assert(caught3.getMessage === "\"-1.8\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught4 = intercept[TestFailedException] {
"8" should not { include regex (decimal) }
}
assert(caught4.getMessage === "\"8\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught5 = intercept[TestFailedException] {
"1." should not { include regex (decimal) }
}
assert(caught5.getMessage === "\"1.\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught11 = intercept[TestFailedException] {
"1.7" should not include regex ("1.7")
}
assert(caught11.getMessage === "\"1.7\" included substring that matched regex 1.7")
val caught12 = intercept[TestFailedException] {
"1.7" should not include regex (decimal)
}
assert(caught12.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught13 = intercept[TestFailedException] {
"-1.8" should not include regex (decimal)
}
assert(caught13.getMessage === "\"-1.8\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught14 = intercept[TestFailedException] {
"8" should not include regex (decimal)
}
assert(caught14.getMessage === "\"8\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught15 = intercept[TestFailedException] {
"1." should not include regex (decimal)
}
assert(caught15.getMessage === "\"1.\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
// The rest are non-exact matches
val caught21 = intercept[TestFailedException] {
"a1.7" should not { include regex ("1.7") }
}
assert(caught21.getMessage === "\"a1.7\" included substring that matched regex 1.7")
val caught22 = intercept[TestFailedException] {
"1.7b" should not { include regex (decimal) }
}
assert(caught22.getMessage === "\"1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught23 = intercept[TestFailedException] {
"a-1.8b" should not { include regex (decimal) }
}
assert(caught23.getMessage === "\"a-1.8b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
"1.7" should (include regex (decimal) and (include regex ("1.8")))
}
assert(caught1.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, but \"1.7\" did not include substring that matched regex 1.8")
val caught2 = intercept[TestFailedException] {
"1.7" should ((include regex (decimal)) and (include regex ("1.8")))
}
assert(caught2.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, but \"1.7\" did not include substring that matched regex 1.8")
val caught3 = intercept[TestFailedException] {
"1.7" should (include regex (decimal) and include regex ("1.8"))
}
assert(caught3.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, but \"1.7\" did not include substring that matched regex 1.8")
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught4 = intercept[TestFailedException] {
"one.eight" should (include regex (decimal) and (include regex ("1.8")))
}
assert(caught4.getMessage === "\"one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught5 = intercept[TestFailedException] {
"one.eight" should ((include regex (decimal)) and (include regex ("1.8")))
}
assert(caught5.getMessage === "\"one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught6 = intercept[TestFailedException] {
"one.eight" should (include regex (decimal) and include regex ("1.8"))
}
assert(caught6.getMessage === "\"one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
"one.seven" should (include regex (decimal) or (include regex ("1.8")))
}
assert(caught1.getMessage === "\"one.seven\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"one.seven\" did not include substring that matched regex 1.8")
val caught2 = intercept[TestFailedException] {
"one.seven" should ((include regex (decimal)) or (include regex ("1.8")))
}
assert(caught2.getMessage === "\"one.seven\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"one.seven\" did not include substring that matched regex 1.8")
val caught3 = intercept[TestFailedException] {
"one.seven" should (include regex (decimal) or include regex ("1.8"))
}
assert(caught3.getMessage === "\"one.seven\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"one.seven\" did not include substring that matched regex 1.8")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not include regex ("1.8") and (not include regex (decimal)))
}
assert(caught1.getMessage === "\"1.7\" did not include substring that matched regex 1.8, but \"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not include regex ("1.8")) and (not include regex (decimal)))
}
assert(caught2.getMessage === "\"1.7\" did not include substring that matched regex 1.8, but \"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught3 = intercept[TestFailedException] {
"1.7" should (not include regex ("1.8") and not include regex (decimal))
}
assert(caught3.getMessage === "\"1.7\" did not include substring that matched regex 1.8, but \"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught4 = intercept[TestFailedException] {
"a1.7" should (not include regex ("1.8") and (not include regex (decimal)))
}
assert(caught4.getMessage === "\"a1.7\" did not include substring that matched regex 1.8, but \"a1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught5 = intercept[TestFailedException] {
"1.7b" should ((not include regex ("1.8")) and (not include regex (decimal)))
}
assert(caught5.getMessage === "\"1.7b\" did not include substring that matched regex 1.8, but \"1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught6 = intercept[TestFailedException] {
"a1.7b" should (not include regex ("1.8") and not include regex (decimal))
}
assert(caught6.getMessage === "\"a1.7b\" did not include substring that matched regex 1.8, but \"a1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not include regex (decimal) or (not include regex ("1.7")))
}
assert(caught1.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7\" included substring that matched regex 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not include regex (decimal)) or (not include regex ("1.7")))
}
assert(caught2.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7\" included substring that matched regex 1.7")
val caught3 = intercept[TestFailedException] {
"1.7" should (not include regex (decimal) or not include regex ("1.7"))
}
assert(caught3.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7\" included substring that matched regex 1.7")
val caught4 = intercept[TestFailedException] {
"1.7" should (not (include regex (decimal)) or not (include regex ("1.7")))
}
assert(caught4.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7\" included substring that matched regex 1.7")
val caught5 = intercept[TestFailedException] {
"a1.7" should (not include regex (decimal) or (not include regex ("1.7")))
}
assert(caught5.getMessage === "\"a1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"a1.7\" included substring that matched regex 1.7")
val caught6 = intercept[TestFailedException] {
"1.7b" should ((not include regex (decimal)) or (not include regex ("1.7")))
}
assert(caught6.getMessage === "\"1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7b\" included substring that matched regex 1.7")
val caught7 = intercept[TestFailedException] {
"a1.7b" should (not include regex (decimal) or not include regex ("1.7"))
}
assert(caught7.getMessage === "\"a1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"a1.7b\" included substring that matched regex 1.7")
val caught8 = intercept[TestFailedException] {
"a1.7b" should (not (include regex (decimal)) or not (include regex ("1.7")))
}
assert(caught8.getMessage === "\"a1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"a1.7b\" included substring that matched regex 1.7")
}
}
object `(when the regex is specified by an actual Regex)` {
def `should do nothing if the string includes substring that matched regex specified as a string` {
"1.78" should include regex ("1.7")
"21.7" should include regex ("1.7")
"21.78" should include regex ("1.7")
"1.7" should include regex (decimalRegex)
"21.7" should include regex (decimalRegex)
"1.78" should include regex (decimalRegex)
"a -1.8 difference" should include regex (decimalRegex)
"b8" should include regex (decimalRegex)
"8x" should include regex (decimalRegex)
"1.x" should include regex (decimalRegex)
// The remaining are full matches, which should also work with "include"
"1.7" should include regex ("1.7")
"1.7" should include regex (decimalRegex)
"-1.8" should include regex (decimalRegex)
"8" should include regex (decimalRegex)
"1." should include regex (decimalRegex)
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used with not` {
"eight" should not { include regex (decimalRegex) }
"one.eight" should not { include regex (decimalRegex) }
"eight" should not include regex (decimalRegex)
"one.eight" should not include regex (decimalRegex)
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-and expression` {
"a1.7" should (include regex (decimalRegex) and (include regex (decimalRegex)))
"1.7b" should (include regex (decimalRegex) and (include regex (decimalRegex)))
"a1.7b" should (include regex (decimalRegex) and (include regex (decimalRegex)))
"a1.7" should ((include regex (decimalRegex)) and (include regex (decimalRegex)))
"1.7b" should ((include regex (decimalRegex)) and (include regex (decimalRegex)))
"a1.7b" should ((include regex (decimalRegex)) and (include regex (decimalRegex)))
"a1.7" should (include regex (decimalRegex) and include regex (decimalRegex))
"1.7b" should (include regex (decimalRegex) and include regex (decimalRegex))
"a1.7b" should (include regex (decimalRegex) and include regex (decimalRegex))
"1.7" should (include regex (decimalRegex) and (include regex (decimalRegex)))
"1.7" should ((include regex (decimalRegex)) and (include regex (decimalRegex)))
"1.7" should (include regex (decimalRegex) and include regex (decimalRegex))
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-or expression` {
"a1.7" should (include regex ("hello") or (include regex (decimalRegex)))
"1.7b" should (include regex ("hello") or (include regex (decimalRegex)))
"a1.7b" should (include regex ("hello") or (include regex (decimalRegex)))
"a1.7" should ((include regex ("hello")) or (include regex (decimalRegex)))
"1.7b" should ((include regex ("hello")) or (include regex (decimalRegex)))
"a1.7b" should ((include regex ("hello")) or (include regex (decimalRegex)))
"a1.7" should (include regex ("hello") or include regex (decimalRegex))
"1.7b" should (include regex ("hello") or include regex (decimalRegex))
"a1.7b" should (include regex ("hello") or include regex (decimalRegex))
"1.7" should (include regex ("hello") or (include regex (decimalRegex)))
"1.7" should ((include regex ("hello")) or (include regex (decimalRegex)))
"1.7" should (include regex ("hello") or include regex (decimalRegex))
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-and expression with not` {
"fred" should (not (include regex ("bob")) and not (include regex (decimalRegex)))
"fred" should ((not include regex ("bob")) and (not include regex (decimalRegex)))
"fred" should (not include regex ("bob") and not include regex (decimalRegex))
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-or expression with not` {
"fred" should (not (include regex ("fred")) or not (include regex (decimalRegex)))
"fred" should ((not include regex ("fred")) or (not include regex (decimalRegex)))
"fred" should (not include regex ("fred") or not include regex (decimalRegex))
}
def `should throw TestFailedException if the string does not match substring that matched regex specified as a string` {
val caught1 = intercept[TestFailedException] {
"1.7" should include regex ("1.78")
}
assert(caught1.getMessage === "\"1.7\" did not include substring that matched regex 1.78")
val caught2 = intercept[TestFailedException] {
"1.7" should include regex ("21.7")
}
assert(caught2.getMessage === "\"1.7\" did not include substring that matched regex 21.7")
val caught3 = intercept[TestFailedException] {
"-one.eight" should include regex (decimalRegex)
}
assert(caught3.getMessage === "\"-one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught6 = intercept[TestFailedException] {
"eight" should include regex (decimalRegex)
}
assert(caught6.getMessage === "\"eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught7 = intercept[TestFailedException] {
"one.eight" should include regex (decimalRegex)
}
assert(caught7.getMessage === "\"one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught8 = intercept[TestFailedException] {
"onedoteight" should include regex (decimalRegex)
}
assert(caught8.getMessage === "\"onedoteight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught9 = intercept[TestFailedException] {
"***" should include regex (decimalRegex)
}
assert(caught9.getMessage === "\"***\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string does matches substring that matched regex specified as a string when used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should not { include regex ("1.7") }
}
assert(caught1.getMessage === "\"1.7\" included substring that matched regex 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should not { include regex (decimalRegex) }
}
assert(caught2.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught3 = intercept[TestFailedException] {
"-1.8" should not { include regex (decimalRegex) }
}
assert(caught3.getMessage === "\"-1.8\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught4 = intercept[TestFailedException] {
"8" should not { include regex (decimalRegex) }
}
assert(caught4.getMessage === "\"8\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught5 = intercept[TestFailedException] {
"1." should not { include regex (decimalRegex) }
}
assert(caught5.getMessage === "\"1.\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught11 = intercept[TestFailedException] {
"1.7" should not include regex ("1.7")
}
assert(caught11.getMessage === "\"1.7\" included substring that matched regex 1.7")
val caught12 = intercept[TestFailedException] {
"1.7" should not include regex (decimalRegex)
}
assert(caught12.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught13 = intercept[TestFailedException] {
"-1.8" should not include regex (decimalRegex)
}
assert(caught13.getMessage === "\"-1.8\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught14 = intercept[TestFailedException] {
"8" should not include regex (decimalRegex)
}
assert(caught14.getMessage === "\"8\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught15 = intercept[TestFailedException] {
"1." should not include regex (decimalRegex)
}
assert(caught15.getMessage === "\"1.\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
// The rest are non-exact matches
val caught21 = intercept[TestFailedException] {
"a1.7" should not { include regex ("1.7") }
}
assert(caught21.getMessage === "\"a1.7\" included substring that matched regex 1.7")
val caught22 = intercept[TestFailedException] {
"1.7b" should not { include regex (decimalRegex) }
}
assert(caught22.getMessage === "\"1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught23 = intercept[TestFailedException] {
"a-1.8b" should not { include regex (decimalRegex) }
}
assert(caught23.getMessage === "\"a-1.8b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-and expression` {
val caught1 = intercept[TestFailedException] {
"1.7" should (include regex (decimalRegex) and (include regex ("1.8")))
}
assert(caught1.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, but \"1.7\" did not include substring that matched regex 1.8")
val caught2 = intercept[TestFailedException] {
"1.7" should ((include regex (decimalRegex)) and (include regex ("1.8")))
}
assert(caught2.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, but \"1.7\" did not include substring that matched regex 1.8")
val caught3 = intercept[TestFailedException] {
"1.7" should (include regex (decimalRegex) and include regex ("1.8"))
}
assert(caught3.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, but \"1.7\" did not include substring that matched regex 1.8")
// Check to make sure the error message "short circuits" (i.e., just reports the left side's failure)
val caught4 = intercept[TestFailedException] {
"one.eight" should (include regex (decimalRegex) and (include regex ("1.8")))
}
assert(caught4.getMessage === "\"one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught5 = intercept[TestFailedException] {
"one.eight" should ((include regex (decimalRegex)) and (include regex ("1.8")))
}
assert(caught5.getMessage === "\"one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught6 = intercept[TestFailedException] {
"one.eight" should (include regex (decimalRegex) and include regex ("1.8"))
}
assert(caught6.getMessage === "\"one.eight\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-or expression` {
val caught1 = intercept[TestFailedException] {
"one.seven" should (include regex (decimalRegex) or (include regex ("1.8")))
}
assert(caught1.getMessage === "\"one.seven\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"one.seven\" did not include substring that matched regex 1.8")
val caught2 = intercept[TestFailedException] {
"one.seven" should ((include regex (decimalRegex)) or (include regex ("1.8")))
}
assert(caught2.getMessage === "\"one.seven\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"one.seven\" did not include substring that matched regex 1.8")
val caught3 = intercept[TestFailedException] {
"one.seven" should (include regex (decimalRegex) or include regex ("1.8"))
}
assert(caught3.getMessage === "\"one.seven\" did not include substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"one.seven\" did not include substring that matched regex 1.8")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not include regex ("1.8") and (not include regex (decimalRegex)))
}
assert(caught1.getMessage === "\"1.7\" did not include substring that matched regex 1.8, but \"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not include regex ("1.8")) and (not include regex (decimalRegex)))
}
assert(caught2.getMessage === "\"1.7\" did not include substring that matched regex 1.8, but \"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught3 = intercept[TestFailedException] {
"1.7" should (not include regex ("1.8") and not include regex (decimalRegex))
}
assert(caught3.getMessage === "\"1.7\" did not include substring that matched regex 1.8, but \"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught4 = intercept[TestFailedException] {
"a1.7" should (not include regex ("1.8") and (not include regex (decimalRegex)))
}
assert(caught4.getMessage === "\"a1.7\" did not include substring that matched regex 1.8, but \"a1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught5 = intercept[TestFailedException] {
"1.7b" should ((not include regex ("1.8")) and (not include regex (decimalRegex)))
}
assert(caught5.getMessage === "\"1.7b\" did not include substring that matched regex 1.8, but \"1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught6 = intercept[TestFailedException] {
"a1.7b" should (not include regex ("1.8") and not include regex (decimalRegex))
}
assert(caught6.getMessage === "\"a1.7b\" did not include substring that matched regex 1.8, but \"a1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"1.7" should (not include regex (decimalRegex) or (not include regex ("1.7")))
}
assert(caught1.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7\" included substring that matched regex 1.7")
val caught2 = intercept[TestFailedException] {
"1.7" should ((not include regex (decimalRegex)) or (not include regex ("1.7")))
}
assert(caught2.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7\" included substring that matched regex 1.7")
val caught3 = intercept[TestFailedException] {
"1.7" should (not include regex (decimalRegex) or not include regex ("1.7"))
}
assert(caught3.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7\" included substring that matched regex 1.7")
val caught4 = intercept[TestFailedException] {
"1.7" should (not (include regex (decimalRegex)) or not (include regex ("1.7")))
}
assert(caught4.getMessage === "\"1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7\" included substring that matched regex 1.7")
val caught5 = intercept[TestFailedException] {
"a1.7" should (not include regex (decimalRegex) or (not include regex ("1.7")))
}
assert(caught5.getMessage === "\"a1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"a1.7\" included substring that matched regex 1.7")
val caught6 = intercept[TestFailedException] {
"1.7b" should ((not include regex (decimalRegex)) or (not include regex ("1.7")))
}
assert(caught6.getMessage === "\"1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7b\" included substring that matched regex 1.7")
val caught7 = intercept[TestFailedException] {
"a1.7b" should (not include regex (decimalRegex) or not include regex ("1.7"))
}
assert(caught7.getMessage === "\"a1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"a1.7b\" included substring that matched regex 1.7")
val caught8 = intercept[TestFailedException] {
"a1.7b" should (not (include regex (decimalRegex)) or not (include regex ("1.7")))
}
assert(caught8.getMessage === "\"a1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"a1.7b\" included substring that matched regex 1.7")
}
}
}
}
/*
def `should do nothing if the string includes substring that matched regex specified as a string` {
"1.78" should include regex ("1.7")
"21.7" should include regex ("1.7")
"21.78" should include regex ("1.7")
"1.7" should include regex (decimalRegex)
"21.7" should include regex (decimalRegex)
"1.78" should include regex (decimalRegex)
"a -1.8 difference" should include regex (decimalRegex)
"b8" should include regex (decimalRegex)
"8x" should include regex (decimalRegex)
"1.x" should include regex (decimalRegex)
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-and expression` {
"a1.7" should (include regex (decimalRegex) and (include regex (decimalRegex)))
"1.7b" should (include regex (decimalRegex) and (include regex (decimalRegex)))
"a1.7b" should (include regex (decimalRegex) and (include regex (decimalRegex)))
"a1.7" should ((include regex (decimalRegex)) and (include regex (decimalRegex)))
"1.7b" should ((include regex (decimalRegex)) and (include regex (decimalRegex)))
"a1.7b" should ((include regex (decimalRegex)) and (include regex (decimalRegex)))
"a1.7" should (include regex (decimalRegex) and include regex (decimalRegex))
"1.7b" should (include regex (decimalRegex) and include regex (decimalRegex))
"a1.7b" should (include regex (decimalRegex) and include regex (decimalRegex))
}
def `should do nothing if the string does not include substring that matched regex specified as a string when used in a logical-or expression` {
"a1.7" should (include regex ("hello") or (include regex (decimalRegex)))
"1.7b" should (include regex ("hello") or (include regex (decimalRegex)))
"a1.7b" should (include regex ("hello") or (include regex (decimalRegex)))
"a1.7" should ((include regex ("hello")) or (include regex (decimalRegex)))
"1.7b" should ((include regex ("hello")) or (include regex (decimalRegex)))
"a1.7b" should ((include regex ("hello")) or (include regex (decimalRegex)))
"a1.7" should (include regex ("hello") or include regex (decimalRegex))
"1.7b" should (include regex ("hello") or include regex (decimalRegex))
"a1.7b" should (include regex ("hello") or include regex (decimalRegex))
}
def `should throw TestFailedException if the string does matches substring that matched regex specified as a string when used with not` {
val caught1 = intercept[TestFailedException] {
"a1.7" should not { include regex ("1.7") }
}
assert(caught1.getMessage === "\"a1.7\" included substring that matched regex 1.7")
val caught2 = intercept[TestFailedException] {
"1.7b" should not { include regex (decimalRegex) }
}
assert(caught2.getMessage === "\"1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught3 = intercept[TestFailedException] {
"a-1.8b" should not { include regex (decimalRegex) }
}
assert(caught3.getMessage === "\"a-1.8b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-and expression used with not` {
val caught1 = intercept[TestFailedException] {
"a1.7" should (not include regex ("1.8") and (not include regex (decimalRegex)))
}
assert(caught1.getMessage === "\"a1.7\" did not include substring that matched regex 1.8, but \"a1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught2 = intercept[TestFailedException] {
"1.7b" should ((not include regex ("1.8")) and (not include regex (decimalRegex)))
}
assert(caught2.getMessage === "\"1.7b\" did not include substring that matched regex 1.8, but \"1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
val caught3 = intercept[TestFailedException] {
"a1.7b" should (not include regex ("1.8") and not include regex (decimalRegex))
}
assert(caught3.getMessage === "\"a1.7b\" did not include substring that matched regex 1.8, but \"a1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?")
}
def `should throw TestFailedException if the string includes substring that matched regex specified as a string when used in a logical-or expression used with not` {
val caught1 = intercept[TestFailedException] {
"a1.7" should (not include regex (decimalRegex) or (not include regex ("1.7")))
}
assert(caught1.getMessage === "\"a1.7\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"a1.7\" included substring that matched regex 1.7")
val caught2 = intercept[TestFailedException] {
"1.7b" should ((not include regex (decimalRegex)) or (not include regex ("1.7")))
}
assert(caught2.getMessage === "\"1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"1.7b\" included substring that matched regex 1.7")
val caught3 = intercept[TestFailedException] {
"a1.7b" should (not include regex (decimalRegex) or not include regex ("1.7"))
}
assert(caught3.getMessage === "\"a1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"a1.7b\" included substring that matched regex 1.7")
val caught4 = intercept[TestFailedException] {
"a1.7b" should (not (include regex (decimalRegex)) or not (include regex ("1.7")))
}
assert(caught4.getMessage === "\"a1.7b\" included substring that matched regex (-)?(\\d+)(\\.\\d*)?, and \"a1.7b\" included substring that matched regex 1.7")
}
*/
| apache-2.0 |
Sage-Bionetworks/Synapse-Repository-Services | services/repository-managers/src/main/java/org/sagebionetworks/repo/manager/table/metadata/providers/DatasetMetadataIndexProvider.java | 3840 | package org.sagebionetworks.repo.manager.table.metadata.providers;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import org.sagebionetworks.repo.manager.NodeManager;
import org.sagebionetworks.repo.manager.table.metadata.DefaultColumnModel;
import org.sagebionetworks.repo.manager.table.metadata.MetadataIndexProvider;
import org.sagebionetworks.repo.model.NodeDAO;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.UserInfo;
import org.sagebionetworks.repo.model.annotation.v2.Annotations;
import org.sagebionetworks.repo.model.jdo.KeyFactory;
import org.sagebionetworks.repo.model.table.ColumnModel;
import org.sagebionetworks.repo.model.table.ColumnType;
import org.sagebionetworks.repo.model.table.DatasetItem;
import org.sagebionetworks.repo.model.table.ReplicationType;
import org.sagebionetworks.repo.model.table.SubType;
import org.sagebionetworks.repo.model.table.ViewObjectType;
import org.sagebionetworks.table.cluster.view.filter.FlatIdAndVersionFilter;
import org.sagebionetworks.table.cluster.view.filter.FlatIdsFilter;
import org.sagebionetworks.table.cluster.view.filter.IdVersionPair;
import org.sagebionetworks.table.cluster.view.filter.ViewFilter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.google.common.collect.Sets;
@Service
public class DatasetMetadataIndexProvider implements MetadataIndexProvider {
private final NodeDAO nodeDao;
private final NodeManager nodeManager;
static final DefaultColumnModel DATASET_FILE_COLUMNS = DefaultColumnModel.builder(ViewObjectType.DATASET)
.withObjectField(Constants.FILE_DEFAULT_COLUMNS).build();
@Autowired
public DatasetMetadataIndexProvider(NodeDAO nodeDao, NodeManager nodeManager) {
super();
this.nodeDao = nodeDao;
this.nodeManager = nodeManager;
}
@Override
public ViewObjectType getObjectType() {
return ViewObjectType.DATASET;
}
@Override
public ColumnType getIdColumnType() {
return ColumnType.ENTITYID;
}
@Override
public ColumnType getParentIdColumnType() {
return ColumnType.ENTITYID;
}
@Override
public ColumnType getBenefactorIdColumnType() {
return ColumnType.ENTITYID;
}
@Override
public DefaultColumnModel getDefaultColumnModel(Long viewTypeMask) {
return DATASET_FILE_COLUMNS;
}
@Override
public Optional<Annotations> getAnnotations(UserInfo userInfo, String objectId) {
return Optional.ofNullable(nodeManager.getUserAnnotations(userInfo, objectId));
}
@Override
public void updateAnnotations(UserInfo userInfo, String objectId, Annotations annotations) {
nodeManager.updateUserAnnotations(userInfo, objectId, annotations);
}
@Override
public boolean canUpdateAnnotation(ColumnModel model) {
return true;
}
@Override
public ViewFilter getViewFilter(Long viewId) {
List<DatasetItem> items = nodeDao.getDatasetItems(viewId);
Set<IdVersionPair> scope = items.stream().map(i -> new IdVersionPair()
.setId(KeyFactory.stringToKey(i.getEntityId())).setVersion(i.getVersionNumber()))
.collect(Collectors.toSet());
return new FlatIdAndVersionFilter(ReplicationType.ENTITY, getSubTypes(), scope);
}
@Override
public ViewFilter getViewFilter(Long viewTypeMask, Set<Long> containerIds) {
return new FlatIdsFilter(ReplicationType.ENTITY, getSubTypes(), containerIds);
}
Set<SubType> getSubTypes() {
// currently only files are supported.
return Sets.newHashSet(SubType.file);
}
@Override
public void validateScopeAndType(Long typeMask, Set<Long> scopeIds, int maxContainersPerView) {
if (scopeIds != null && scopeIds.size() > maxContainersPerView) {
throw new IllegalArgumentException(
String.format("Maximum of %,d items in a dataset exceeded.", maxContainersPerView));
}
}
}
| apache-2.0 |
hortonworks/cloudbreak | freeipa/src/main/java/com/sequenceiq/freeipa/converter/network/NetworkRequestToNetworkConverter.java | 1987 | package com.sequenceiq.freeipa.converter.network;
import java.util.Map;
import java.util.Optional;
import javax.inject.Inject;
import org.springframework.core.convert.converter.Converter;
import org.springframework.stereotype.Component;
import com.sequenceiq.cloudbreak.common.converter.MissingResourceNameGenerator;
import com.sequenceiq.cloudbreak.common.exception.BadRequestException;
import com.sequenceiq.cloudbreak.common.json.Json;
import com.sequenceiq.cloudbreak.common.mappable.ProviderParameterCalculator;
import com.sequenceiq.cloudbreak.common.type.APIResourceType;
import com.sequenceiq.common.api.type.OutboundInternetTraffic;
import com.sequenceiq.freeipa.api.v1.freeipa.stack.model.common.network.NetworkRequest;
import com.sequenceiq.freeipa.entity.Network;
@Component
public class NetworkRequestToNetworkConverter implements Converter<NetworkRequest, Network> {
@Inject
private MissingResourceNameGenerator missingResourceNameGenerator;
@Inject
private ProviderParameterCalculator providerParameterCalculator;
@Override
public Network convert(NetworkRequest source) {
Network network = new Network();
network.setName(missingResourceNameGenerator.generateName(APIResourceType.NETWORK));
network.setOutboundInternetTraffic(getOutboundInternetTraffic(source));
network.setNetworkCidrs(source.getNetworkCidrs());
Map<String, Object> parameters = providerParameterCalculator.get(source).asMap();
if (parameters != null) {
try {
network.setAttributes(new Json(parameters));
} catch (IllegalArgumentException ignored) {
throw new BadRequestException("Invalid parameters");
}
}
return network;
}
private OutboundInternetTraffic getOutboundInternetTraffic(NetworkRequest network) {
return Optional.ofNullable(network.getOutboundInternetTraffic()).orElse(OutboundInternetTraffic.ENABLED);
}
}
| apache-2.0 |
betterplace/ablass | spec/controllers/sin_projects_controller_spec.rb | 512 | require 'spec_helper'
describe SinProjectsController, type: :controller do
describe "GET 'index'" do
let :sin do FactoryGirl.create(:sin) end
it "returns http success" do
get 'index', sin_id: sin
expect(response).to be_success
end
end
describe "GET 'show'" do
let :sin_project do
FactoryGirl.create(:sin_project)
end
it "returns http success" do
get 'show', sin_id: sin_project.sin, id: sin_project
expect(response).to be_success
end
end
end
| apache-2.0 |
zy4kamu/Coda | src/nlp-stack/Dictionary/Core/DictionaryTrie/DictionaryTrieBinaryFile.cpp | 35504 | /**
* DictionaryTrieBinaryFileCreator.cpp
*/
#include "DictionaryTrieBinaryFile.h"
#include "Dictionary.h"
#define LINK_FILE_PATH "2015_03_23_links.txt"
#define MAX_BUFFER_SIZE 15000000
/**
* Constructor of DictionaryTrieBinaryFileCreator
*/
DictionaryTrieBinaryFileCreator::DictionaryTrieBinaryFileCreator(Dictionary* _dic) : DictionaryTrieBuild(_dic)
{
}
/**
* Destructor of DictionaryTrieBinaryFileCreator
*/
DictionaryTrieBinaryFileCreator::~DictionaryTrieBinaryFileCreator(void)
{
dic = NULL;
}
void DictionaryTrieBinaryFileCreator::writeToBinaryFile(string _filePath)
{
wcout << "Write Trie to binary file ..." << endl;
ofstream f(_filePath.c_str(), ios::out|ios::binary);
f.write((char *)&buffer[0], bufferSize);
f.close();
wcout << "Write Trie to binary file done!" << endl;
}
vector<unsigned char> DictionaryTrieBinaryFileCreator::binaryDictionaryNode(DictionaryNode* _node, int _parentId)
{
vector<unsigned char> _result;
_result.push_back(getDictionary()->getDictionaryTools()->wcharToChar(_node->getCharacter()));
_result.push_back((unsigned char) (_parentId / 65536));
_parentId %= 65536;
_result.push_back((unsigned char) (_parentId / 256));
_result.push_back((unsigned char) (_parentId % 256));
return _result;
}
vector<unsigned char> DictionaryTrieBinaryFileCreator::binaryDictionaryNodeModel(DictionaryNodeModel * _nodeModel, int _nodeId)
{
vector<unsigned char> _result;
int _modelId = _nodeModel->getModelId();
_result.push_back((unsigned char) (_modelId / 256));
_result.push_back((unsigned char) (_modelId % 256));
int _lemmaId = _nodeModel->getLemmaId();
_result.push_back((unsigned char) (_lemmaId / 65536));
_lemmaId %= 65536;
_result.push_back((unsigned char) (_lemmaId / 256));
_result.push_back((unsigned char) (_lemmaId % 256));
_result.push_back((unsigned char) (_nodeId / 65536));
_nodeId %= 65536;
_result.push_back((unsigned char) (_nodeId / 256));
_result.push_back((unsigned char) (_nodeId % 256));
return _result;
}
void DictionaryTrieBinaryFileCreator::writeToBuffer(vector<unsigned char> charVector)
{
for (int i = 0; i < (int) charVector.size(); ++i)
{
buffer[bufferSize] = charVector.at(i);
bufferSize++;
}
}
void DictionaryTrieBinaryFileCreator::addAllDictionaryNodeToBuffer(void)
{
wcout << "Add all DictionaryNode to buffer ..." << endl;
wcout << "numberOfNodes = " << numberOfNodes << endl;
// write number of nodes
vector<unsigned char> binaryNumberOfNodes = getDictionary()->getDictionaryTools()->intToCharVector3(numberOfNodes);
writeToBuffer(binaryNumberOfNodes);
queue<DictionaryNode*> nodeQueue = queue<DictionaryNode*>();
nodeQueue.push(root);
int currentNodeId = 0;
vector<unsigned char> _binNode = binaryDictionaryNode(root, -1);
// write root
writeToBuffer(_binNode);
while (!nodeQueue.empty())
{
DictionaryNode* currentNode = nodeQueue.front();
nodeQueue.pop();
vector<DictionaryNode*> childrenNodes = currentNode->getChildren();
for (int i = 0; i < (int) childrenNodes.size(); ++i)
{
DictionaryNode* childNode = childrenNodes.at(i);
nodeQueue.push(childNode);
_binNode = binaryDictionaryNode(childNode, currentNodeId);
writeToBuffer(_binNode);
}
currentNodeId++;
}
wcout << "Add DictionaryNode done! currentNodeId = " << currentNodeId << endl;
}
void DictionaryTrieBinaryFileCreator::addAllDictionaryNodeModelToBuffer(void)
{
wcout << "Add all DictionaryNodeModel to buffer ... nodeModelMap.size() = " << nodeModelMap.size() << endl;
wcout << "......................................... numberOfNodeModels = " << numberOfNodeModels << endl;
// write number of nodes
vector<unsigned char> binaryNumberOfNodeModels = getDictionary()->getDictionaryTools()->intToCharVector3((int) nodeModelMap.size());
writeToBuffer(binaryNumberOfNodeModels);
queue<DictionaryNode*> nodeQueue = queue<DictionaryNode*>();
nodeQueue.push(root);
int currentNodeId = 0;
int nodeModelCount = 0;
while (!nodeQueue.empty())
{
DictionaryNode* currentNode = nodeQueue.front();
nodeQueue.pop();
vector<DictionaryNodeModel*> * _nodeModels = currentNode->getNodeModels();
for (int i = 0; i < (int) _nodeModels->size(); ++i)
{
DictionaryNodeModel* _nodeModel = _nodeModels->at(i);
vector<unsigned char> _binNodeModel = binaryDictionaryNodeModel(_nodeModel, currentNodeId);
writeToBuffer(_binNodeModel);
nodeModelCount++;
}
vector<DictionaryNode*> childrenNodes = currentNode->getChildren();
for (int i = 0; i < (int) childrenNodes.size(); ++i)
{
DictionaryNode* childNode = childrenNodes.at(i);
nodeQueue.push(childNode);
}
currentNodeId++;
}
wcout << "Add NodeModels done! nodeModelCount = " << nodeModelCount << endl;
}
/*
* add a link between 2 lemmas by lemmaIds
*/
void DictionaryTrieBinaryFileCreator::addLinkByLemmaIdsToBuffer(int fromLemmaId, int toLemmaId)
{
// if fromLemmaId or toLemmaId is out of range than return
// @todo : maxLemmaId, min = 0 or 1 ?
if (fromLemmaId <= 0 || toLemmaId <= 0)
{
return;
}
vector<unsigned char> binaryLemmaId = getDictionary()->getDictionaryTools()->intToCharVector3(fromLemmaId);
writeToBuffer(binaryLemmaId);
binaryLemmaId = getDictionary()->getDictionaryTools()->intToCharVector3(toLemmaId);
writeToBuffer(binaryLemmaId);
countLink++;
}
/*
* add a link between 2 lemmas
* eg : 1#2
*/
void DictionaryTrieBinaryFileCreator::addLinkToBuffer(wstring _line)
{
// find first occurrence of '#'
int sharpPos = _line.find(L"#");
if (sharpPos >= 0)
{
// convert 2 strings to int
wstring fromLemmaIndexStr = _line.substr(0, sharpPos);
//int _fromLemmaId = _wtoi(fromLemmaIndexStr.c_str());
int _fromLemmaId = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(fromLemmaIndexStr).c_str());
wstring toLemmaIndexStr = _line.substr(sharpPos + 1);
//int _toLemmaId = _wtoi(toLemmaIndexStr.c_str());
int _toLemmaId = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(toLemmaIndexStr).c_str());
addLinkByLemmaIdsToBuffer(_fromLemmaId, _toLemmaId);
}
}
/**
* Read links from file
*/
void DictionaryTrieBinaryFileCreator::readLinksFromFileToBuffer(string _filePath)
{
// open file
wifstream fin(_filePath.c_str());
// set endoding to UTF-8
#ifdef MSVC
fin.imbue(locale(fin.getloc(), new codecvt_utf8<wchar_t>));
#else
//fin.imbue(std::locale("ru_RU.UTF-8"));
fin.imbue(std::locale("en_US.UTF-8"));
#endif
wstring line;
// read number of links
getline(fin, line);
numberOfLinks = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> binaryNumberOfLinks = getDictionary()->getDictionaryTools()->intToCharVector3(numberOfLinks);
writeToBuffer(binaryNumberOfLinks);
wcout << "Read links to buffer... numberOfLinks = " << numberOfLinks << endl;
countLink = 0;
// read links
while(getline(fin, line))
{
// must have length >= 3, minimum 1#2
if (line.length() >= 3)
{
addLinkToBuffer(line);
}
}
fin.close();
wcout << "Read links to buffer done! countLink = " << countLink << endl;
}
void DictionaryTrieBinaryFileCreator::addAllDictionaryTrieModelToBuffer(void)
{
wcout << "addAllDictionaryTrieModelToBuffer ... getNumberOfModels() = " << trieModelMap.size() << endl;
vector<unsigned char> binaryNumberOfTrieModel = getDictionary()->getDictionaryTools()->intToCharVector2(trieModelMap.size());
writeToBuffer(binaryNumberOfTrieModel);
int trieModelCount = 0;
// write trieModelMap by order
for (int ind = 1; ind <= (int) trieModelMap.size(); ++ind)
{
if (trieModelMap.count(ind) == 0)
{
wcout << "ERROR 3 : DictionaryTrieBinaryFileCreator::addAllDictionaryTrieModelToBuffer : ind = " << ind << endl;
}
DictionaryTrieModel* _trieModel = trieModelMap.at(ind);
vector<DictionaryTrieModelElement*> * _trieModelElements = _trieModel->getDictionaryTrieModelElements();
//wstring _str = L"";
vector<unsigned char> binNumberOfTrieModelElement = getDictionary()->getDictionaryTools()->intToCharVector1((int) _trieModelElements->size());
writeToBuffer(binNumberOfTrieModelElement);
for (int i = 0; i < (int) _trieModelElements->size(); ++i)
{
DictionaryTrieModelElement* _trieModelElement = _trieModelElements->at(i);
vector<unsigned char> binSuffixLen = getDictionary()->getDictionaryTools()->intToCharVector1((int) _trieModelElement->getSuffix()->size());
writeToBuffer(binSuffixLen);
vector<unsigned char> binSuffix = getDictionary()->getDictionaryTools()->wstringToCharVector(*(_trieModelElement->getSuffix()));
writeToBuffer(binSuffix);
vector<unsigned char> binBeginWithPo = _trieModelElement->isBeginWithPo() ? getDictionary()->getDictionaryTools()->intToCharVector1(1) : getDictionary()->getDictionaryTools()->intToCharVector1(0);
writeToBuffer(binBeginWithPo);
vector<unsigned char> binFeatureListId = getDictionary()->getDictionaryTools()->intToCharVector2(_trieModelElement->getFeatureListId());
writeToBuffer(binFeatureListId);
}
trieModelCount++;
}
wcout << "addAllTrieModelToBuffer done! trieModelCount = " << trieModelCount << endl;
}
// add featureListMap to buffer
void DictionaryTrieBinaryFileCreator::addFeatureListMapToBuffer(void)
{
wcout << "addFeatureListMapToBuffer ... featureListMap.size() = " << featureListMap.size() << endl;
vector<unsigned char> binNumberOfFeatureList = getDictionary()->getDictionaryTools()->intToCharVector2((int) featureListMap.size());
writeToBuffer(binNumberOfFeatureList);
int count = 0;
// save featureListMap in order
for (int iFeatureList = 1; iFeatureList <= (int) featureListMap.size(); ++iFeatureList)
{
if (featureListMap.count(iFeatureList) == 0)
{
wcout << "ERROR 1 : addFeatureListMapToBuffer : " << iFeatureList << endl;
continue;
}
vector<int> featureList = featureListMap.at(iFeatureList);
vector<unsigned char> binNumberOfFeature = getDictionary()->getDictionaryTools()->intToCharVector1((int) featureList.size());
writeToBuffer(binNumberOfFeature);
for (int i = 0; i < (int) featureList.size(); ++i)
{
vector<unsigned char> binFeatureId = getDictionary()->getDictionaryTools()->intToCharVector1((int) featureList.at(i));
writeToBuffer(binFeatureId);
}
count++;
}
wcout << "addFeatureListMapToBuffer DONE ! ... count = " << count << endl;
}
// add featureMap to buffer
void DictionaryTrieBinaryFileCreator::addFeatureMapToBuffer(void)
{
wcout << "addFeatureMapToBuffer ... featureMap.size() = " << featureMap.size() << endl;
vector<unsigned char> binNumberOfFeature = getDictionary()->getDictionaryTools()->intToCharVector1((int) featureMap.size());
writeToBuffer(binNumberOfFeature);
int count = 0;
// save featureMap in order
for (int iFeature = 1; iFeature <= (int) featureMap.size(); ++iFeature)
{
if (featureMap.count(iFeature) == 0)
{
wcout << "ERROR 2 : addFeatureMapToBuffer : " << iFeature << endl;
continue;
}
wstring feature = featureMap.at(iFeature);
vector<unsigned char> binFeatureLength = getDictionary()->getDictionaryTools()->intToCharVector1((int) feature.size());
writeToBuffer(binFeatureLength);
vector<unsigned char> binFeature = getDictionary()->getDictionaryTools()->wstringToCharVector(feature);
writeToBuffer(binFeature);
count++;
}
wcout << "addFeatureMapToBuffer DONE ! ... count = " << count << endl;
}
void DictionaryTrieBinaryFileCreator::buildBuffer(void)
{
wcout << "Build buffer ..." << endl;
buffer = new unsigned char[MAX_BUFFER_SIZE];
bufferSize = 0;
addAllDictionaryNodeToBuffer();
addAllDictionaryNodeModelToBuffer();
readLinksFromFileToBuffer(this->getDictionary()->getPathToData() + LINK_FILE_PATH);
addAllDictionaryTrieModelToBuffer();
addFeatureListMapToBuffer();
addFeatureMapToBuffer();
wcout << "Build done" << endl;
}
void DictionaryTrieBinaryFileCreator::buildBufferFromTextFile(string _filePath)
{
wcout << "Build buffer from text file..." << endl;
buffer = new unsigned char[MAX_BUFFER_SIZE];
bufferSize = 0;
wifstream fi(_filePath.c_str());
// set endoding to UTF-8
#ifdef MSVC
fi.imbue(locale(fi.getloc(), new codecvt_utf8<wchar_t>));
#else
//fin.imbue(std::locale("ru_RU.UTF-8"));
fi.imbue(std::locale("en_US.UTF-8"));
#endif
wstring line;
// Node
getline(fi, line);
int _number_of_nodes = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_number_of_nodes = getDictionary()->getDictionaryTools()->intToCharVector3(_number_of_nodes);
writeToBuffer(_binary_number_of_nodes);
for (int i = 0; i < _number_of_nodes; ++i)
{
getline(fi, line);
/*
if (i == 35627)
{
wcout << "NodeId = " << i << " Char = " << line;
}
*/
vector<unsigned char> _binary_node_char;
_binary_node_char.push_back(getDictionary()->getDictionaryTools()->wcharToChar(line.at(0)));
writeToBuffer(_binary_node_char);
getline(fi, line);
/*
if (i == 35627)
{
wcout << " CharId = " << (int) getDictionary()->getDictionaryTools()->wcharToChar(line.at(0)) << " ParentId = " << endl;
}
*/
int _node_parent_id = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_node_parent_id = getDictionary()->getDictionaryTools()->intToCharVector3(_node_parent_id);
writeToBuffer(_binary_node_parent_id);
}
wcout << "#build : Node ok: " << _number_of_nodes << endl;
// NodeModel
getline(fi, line);
int _number_of_node_models = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_number_of_node_models = getDictionary()->getDictionaryTools()->intToCharVector3(_number_of_node_models);
writeToBuffer(_binary_number_of_node_models);
for (int i = 0; i < _number_of_node_models; ++i)
{
getline(fi, line);
int _model_id = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_model_id = getDictionary()->getDictionaryTools()->intToCharVector2(_model_id);
writeToBuffer(_binary_model_id);
getline(fi, line);
int _lemma_id = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_lemma_id = getDictionary()->getDictionaryTools()->intToCharVector3(_lemma_id);
writeToBuffer(_binary_lemma_id);
getline(fi, line);
int _node_id = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_node_id = getDictionary()->getDictionaryTools()->intToCharVector3(_node_id);
writeToBuffer(_binary_node_id);
/*
if (_model_id == 872)
{
wcout << "ModelId = " << _model_id << " LemmaId = " << _lemma_id << " NodeId = " << _node_id << endl;
}
*/
}
wcout << "#build : NodeModel ok: " << _number_of_node_models << endl;
// Links
getline(fi, line);
int _number_of_links = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_number_of_links = getDictionary()->getDictionaryTools()->intToCharVector3(_number_of_links);
writeToBuffer(_binary_number_of_links);
for (int i = 0; i < _number_of_links; ++i)
{
getline(fi, line);
int _from_lemma_id = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_from_lemma_id = getDictionary()->getDictionaryTools()->intToCharVector3(_from_lemma_id);
writeToBuffer(_binary_from_lemma_id);
getline(fi, line);
int _to_lemma_id = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_to_lemma_id = getDictionary()->getDictionaryTools()->intToCharVector3(_to_lemma_id);
writeToBuffer(_binary_to_lemma_id);
}
wcout << "#build : Links ok: " << _number_of_links << endl;
// TrieModel
getline(fi, line);
int _number_of_trie_models = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
wcout << "#build: _number_of_trie_models = " << _number_of_trie_models << endl;
vector<unsigned char> _binary_number_of_trie_models = getDictionary()->getDictionaryTools()->intToCharVector2(_number_of_trie_models);
writeToBuffer(_binary_number_of_trie_models);
for (int i = 1; i <= _number_of_trie_models; ++i)
{
getline(fi, line);
int _number_of_trie_model_elements = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_number_of_trie_model_elements = getDictionary()->getDictionaryTools()->intToCharVector1(_number_of_trie_model_elements);
writeToBuffer(_binary_number_of_trie_model_elements);
// wcout << "#WriteTM#" << i << ": elements=" << _number_of_trie_model_elements << endl;
for (int j = 0; j < _number_of_trie_model_elements; ++j)
{
// suffix
getline(fi, line);
// wcout << "_suffix = " << line;
int _suffix_length = (int) line.length();
vector<unsigned char> _binary_suffix_length = getDictionary()->getDictionaryTools()->intToCharVector1(_suffix_length);
writeToBuffer(_binary_suffix_length);
vector<unsigned char> _binary_suffix = getDictionary()->getDictionaryTools()->wstringToCharVector(line);
writeToBuffer(_binary_suffix);
// begin with PO
getline(fi, line);
int _begin_with_po = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_begin_with_po = getDictionary()->getDictionaryTools()->intToCharVector1(_begin_with_po);
writeToBuffer(_binary_begin_with_po);
// featureListId
getline(fi, line);
int _feature_list_id = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_feature_list_id = getDictionary()->getDictionaryTools()->intToCharVector2(_feature_list_id);
writeToBuffer(_binary_feature_list_id);
// wcout << " PO = " << _begin_with_po << " featureListId = " << _feature_list_id << endl;
}
}
wcout << "#build : TrieModels ok: " << _number_of_trie_models << endl;
// Feature lists
getline(fi, line);
int _number_of_feature_lists = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
wcout << "_number_of_feature_lists = " << _number_of_feature_lists << endl;
vector<unsigned char> _binary_number_of_feature_lists = getDictionary()->getDictionaryTools()->intToCharVector2(_number_of_feature_lists);
writeToBuffer(_binary_number_of_feature_lists);
for (int i = 0; i < _number_of_feature_lists; ++i)
{
getline(fi, line);
int _number_of_features = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
// wcout << " _number_of_features = " << _number_of_features << ": ";
vector<unsigned char> _binary_number_of_features = getDictionary()->getDictionaryTools()->intToCharVector1(_number_of_features);
writeToBuffer(_binary_number_of_features);
for (int j = 0; j < _number_of_features; ++j)
{
getline(fi, line);
int _feature_id = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
// wcout << _feature_id << "; ";
vector<unsigned char> _binary_feature_id = getDictionary()->getDictionaryTools()->intToCharVector1(_feature_id);
writeToBuffer(_binary_feature_id);
}
// wcout << endl;
}
wcout << "#build : Feature list ok: " << _number_of_feature_lists << endl;
// Features map
getline(fi, line);
int _number_of_features = atoi(this->getDictionary()->getDictionaryTools()->wstring2string(line).c_str());
vector<unsigned char> _binary_number_of_features = getDictionary()->getDictionaryTools()->intToCharVector1(_number_of_features);
writeToBuffer(_binary_number_of_features);
for (int i = 1; i <= _number_of_features; ++i)
{
//wcout << "@feature #" << i << endl;
// short feature
getline(fi, line);
int _short_feature_length = (int) line.length();
//wcout << _short_feature_length << endl;
//wcout << line << endl;
vector<unsigned char> _short_binary_feature_length = getDictionary()->getDictionaryTools()->intToCharVector1(_short_feature_length);
writeToBuffer(_short_binary_feature_length);
vector<unsigned char> _short_binary_feature = getDictionary()->getDictionaryTools()->wstringToCharVector(line);
writeToBuffer(_short_binary_feature);
// long feature
getline(fi, line);
int _long_feature_length = (int) line.length();
//wcout << _long_feature_length << endl;
//wcout << line << endl;
vector<unsigned char> _long_binary_feature_length = getDictionary()->getDictionaryTools()->intToCharVector1(_long_feature_length);
writeToBuffer(_long_binary_feature_length);
vector<unsigned char> _long_binary_feature = getDictionary()->getDictionaryTools()->wstringToCharVector(line);
writeToBuffer(_long_binary_feature);
}
wcout << "#build : Feature Map ok: " << _number_of_features << endl;
fi.close();
}
/**
* Constructor of DictionaryTrieBinaryFileReader
*/
DictionaryTrieBinaryFileReader::DictionaryTrieBinaryFileReader(Dictionary* _dic, string dictionaryFile) : DictionaryTrieBinaryFileCreator(_dic)
{
try
{
loadFromBinaryFile(dic->getPathToData() + dictionaryFile);
}
catch (int e)
{
if (e == FILE_NOT_FOUND_ERROR_CODE)
{
wcout << "Error : Dictionary binary data file not found : " << endl;
#ifdef MSVC
_getch();
#endif
exit(0);
}
}
}
/**
* Destructor of DictionaryTrieBinaryFileReader
*/
DictionaryTrieBinaryFileReader::~DictionaryTrieBinaryFileReader(void)
{
for (int i = 0; i < (int) DictionaryNodeList.size(); ++i)
{
DictionaryNode* dictionaryNode = DictionaryNodeList.at(i);
if (dictionaryNode)
{
delete dictionaryNode;
}
}
DictionaryNodeList.clear();
}
/**
* Load dictionary from binary file
*/
void DictionaryTrieBinaryFileReader::loadFromBinaryFile(string _filePath)
{
bool debug = false;
// wcout << "loadFromBinaryFile" << endl;
ifstream f(_filePath.c_str(), ios::in|ios::binary|ios::ate);
unsigned char* buffer;
if (f.is_open())
{
// get size of file
int size = (int) f.tellg();
// jump to begin of file
f.seekg(0, ios::beg);
//============= Read NodeVer3s ============================================================================================
// allocate buffer
buffer = new unsigned char[size];
// read file
f.read((char*) buffer, size);
// close file
f.close();
// set offset begin of buffer
int offset = 0;
// convert 3 bytes to number of NodeVer3
numberOfNodes = buffer[offset] * 65536 + buffer[offset + 1] * 256 + buffer[offset + 2];
if (debug)
{
wcout << "numberOfNodes = " << numberOfNodes << endl;
}
offset += 3;
// read list of NodeVer3
DictionaryNodeList = vector<DictionaryNode*>();
for (int _nodeId = 0; _nodeId < numberOfNodes; ++_nodeId)
{
// convert first byte to wchar_t
wchar_t _character = getDictionary()->getDictionaryTools()->charToWchar(buffer[offset]);
// convert 3 remaining bytes to _parentId
int _parentId = buffer[offset + 1] * 65536 + buffer[offset + 2] * 256 + buffer[offset + 3];
offset += 4;
// if (_nodeId == 35627)
// {
// wcout << "NodeId = " << _nodeId << " Char = " << _character << " ParentId = " << _parentId << endl;
// }
// create new NodeVer3
DictionaryNode* _node = new DictionaryNode();
_node->setCharacter(_character);
if (_parentId < 0 || _parentId >= _nodeId)
{
//wcout << "### Error ### : loadFromBinaryFile -> id = " << _nodeId << " parentId = " << _parentId << endl;
_node->setParentNode(NULL); // root
}
else
{
_node->setParentNode(DictionaryNodeList.at(_parentId));
DictionaryNodeList.at(_parentId)->addChild(_node);
}
DictionaryNodeList.push_back(_node);
}
root = DictionaryNodeList.at(0);
root->setParentNode(NULL);
if (debug)
{
wcout << endl << "OK 1 : numberOfNodes = " << numberOfNodes << endl;
}
//================ Read NodeModels =========================================================================================
// read number of NodeModel
// convert 3 bytes to number of NodeModel
int numberOfNodeModel = buffer[offset] * 65536 + buffer[offset + 1] * 256 + buffer[offset + 2];
if (debug)
{
wcout << "numberOfNodeModel = " << numberOfNodeModel << endl;
}
offset += 3;
// read list of NodeModel
nodeModelMap = map<int, DictionaryNodeModel*>();
for (int i = 0; i < numberOfNodeModel; ++i)
{
// convert first 2 bytes to modelId
int _modelId = buffer[offset] * 256 + buffer[offset + 1];
// convert the next 3 bytes to _lemmaId
int _lemmaId = buffer[offset + 2] * 65536 + buffer[offset + 3] * 256 + buffer[offset + 4];
// convert 3 remaining bytes to _nodeId
int _nodeVer3Id = buffer[offset + 5] * 65536 + buffer[offset + 6] * 256 + buffer[offset + 7];
offset += 8;
// create new NodeModel
if (nodeModelMap.count(_lemmaId))
{
//wcout << "##### TrieVer5::loadFromBinaryFile -> Duplicate : lemmaId = " << _lemmaId << " modelId = " << _modelId << endl;
}
else
{
// create a NodeModel
DictionaryNodeModel* _nodeModel = new DictionaryNodeModel(_modelId, _lemmaId);
// add NodeModel to NodeVer3
DictionaryNodeList.at(_nodeVer3Id)->addNodeModel(_nodeModel);
// map lemmaId to NodeModel
nodeModelMap.insert(pair<int, DictionaryNodeModel*>(_lemmaId, _nodeModel));
// if (_modelId == 872)
// {
// wcout << "NodeId == " << _nodeVer3Id << " ModelId = " << _modelId << endl;
// }
}
}
if (debug)
{
wcout << "OK 2 : numberOfNodeModel = " << numberOfNodeModel << endl;
}
//================ Read links =========================================================================================
// read number of links
// convert 3 bytes to number of links
int numberOfLinks = buffer[offset] * 65536 + buffer[offset + 1] * 256 + buffer[offset + 2];
offset += 3;
if (debug)
{
wcout << "numberOfLinks = " << numberOfLinks << endl;
}
// read links
for (int i = 0; i < numberOfLinks; ++i)
{
// convert the first 3 bytes to _fromLemmaId
int _fromLemmaId = buffer[offset] * 65536 + buffer[offset + 1] * 256 + buffer[offset + 2];
// convert the 3 remaining bytes to _toLemmaId
int _toLemmaId = buffer[offset + 3] * 65536 + buffer[offset + 4] * 256 + buffer[offset + 5];
offset += 6;
addLinkByLemmaIds(_fromLemmaId, _toLemmaId);
}
if (debug)
{
wcout << "OK 3" << endl;
}
//================ Read TrieModels =========================================================================================
// read number of TrieModel
// convert 2 bytes to number of TrieModel
int numberOfModels = buffer[offset] * 256 + buffer[offset + 1];
offset += 2;
if (debug)
{
wcout << "Number of TrieModels = " << numberOfModels << endl;
}
// read TrieModels
for (int i = 1; i <= numberOfModels; ++i)
{
// create a TrieModel from string sufixesAttrs
DictionaryTrieModel* _trieModel = new DictionaryTrieModel();
// convert 1 bytes to numberOfTrieModelElement
int numberOfTrieModelElement = buffer[offset];
offset += 1;
// wcout << "TM#" << i << ": elements=" << numberOfTrieModelElement << endl;
for (int j = 0; j < numberOfTrieModelElement; ++j)
{
DictionaryTrieModelElement* _modelElement = new DictionaryTrieModelElement();
// convert 1 byte to suffix's length
int suffixLength = buffer[offset];
offset += 1;
wstring _suffix = L"";
// read suffix
for (int k = 0; k < suffixLength; ++k)
{
_suffix.push_back(getDictionary()->getDictionaryTools()->charToWchar(buffer[offset + k]));
}
offset += suffixLength;
// wcout << "_suffix = " << _suffix << " ";
// set suffix for DictionaryTrieModelElement
_modelElement->setSuffix(_suffix);
// convert 1 byte to beginWithPo
int _beginWithPo = buffer[offset];
offset += 1;
// wcout << "Po = " << _beginWithPo << " ";
// set beginWithPo for DictionaryTrieModelElement
_modelElement->setBeginWithPo(_beginWithPo == 1);
// convert 2 byte to featureListId
int _featureListId = buffer[offset] * 256 + buffer[offset + 1];
offset += 2;
// wcout << "_featureListId = " << _featureListId << endl;
// set featureListId for DictionaryTrieModelElement
_modelElement->setFeatureListId(_featureListId);
// add DictionaryTrieModelElement to DictionaryTrieModel
_trieModel->addDictionaryTrieModelElement(_modelElement);
}
// map modelIndex to trieModel
mapModelIndexToTrieModel(i, _trieModel);
}
if (debug)
{
wcout << "OK 4" << endl;
}
//================ Read featureListMap =========================================================================================
// read number of FeatureList
// convert 2 bytes to number of FeatureList
int numberOfFeatureList = buffer[offset] * 256 + buffer[offset + 1];
if (debug)
{
wcout << "numberOfFeatureList = " << numberOfFeatureList << endl;
}
offset += 2;
for (int i = 1; i <= numberOfFeatureList; ++i)
{
// read number of features in list
int numberOfFeature = buffer[offset];
// wcout << " numberOfFeature = " << numberOfFeature << ": ";
offset += 1;
vector<int> featureIdList = vector<int>();
// read features
for (int j = 0; j < numberOfFeature; ++j)
{
// convert 1 byte to featureId
int featureId = buffer[offset];
// wcout << featureId << "; ";
offset += 1;
// add featureId to featureIdList
featureIdList.push_back(featureId);
}
// wcout << endl;
// insert featureIdList to featureListMap
featureListMap.insert(pair<int, vector<int> >(i, featureIdList));
}
if (debug)
{
wcout << "OK 5" << endl;
}
//================ Read featureMap =========================================================================================
// read number of features
// convert 1 bytes to number of FeatureList
featureIdMap.clear();
idFeatureMap.clear();
int _numberOfFeature = buffer[offset];
if (debug)
{
wcout << "_numberOfFeature = " << _numberOfFeature << endl;
}
offset += 1;
for (int i = 1; i <= _numberOfFeature; ++i)
{
// short feature
// convert 1 byte to feature's length
int _short_feature_length = buffer[offset];
offset += 1;
// read feature
wstring _short_feature = L"";
for (int j = 0; j < _short_feature_length; ++j)
{
//wcout << buffer[offset + j] << "; ";
_short_feature.push_back(getDictionary()->getDictionaryTools()->charToWchar(buffer[offset + j]));
}
//wcout << endl;
offset += _short_feature_length;
// insert _feature to featureMap
featureIdMap.insert(pair<wstring, int>(_short_feature, i));
idFeatureMap.insert(pair<int, wstring>(i, _short_feature));
if (debug)
{
wcout << "Short feature (BIN) #" << i << ": (" << _short_feature_length << ") " << _short_feature << endl;
}
// // long feature
// // convert 1 byte to feature's length
// int _long_feature_length = buffer[offset];
// offset += 1;
// // read feature
// wstring _long_feature = L"";
// for (int j = 0; j < _long_feature_length; ++j)
// {
// //wcout << buffer[offset + j] << "; ";
// _long_feature.push_back(getDictionary()->getDictionaryTools()->charToWchar(buffer[offset + j]));
// }
// //wcout << endl;
// offset += _long_feature_length;
// // insert _feature to featureMap
// featureMap.insert(pair<int, wstring>(i, _long_feature));
// if (debug)
// {
// wcout << "Long feature (BIN) #" << i << ": (" << _long_feature_length << ") " << _long_feature << endl;
// }
//m.kudinov: fixing strange bug. looked like no short version was provided
featureMap.insert(pair<int, wstring>(i, _short_feature));
}
if (debug)
{
wcout << "OK 6" << endl;
}
//================ Loading done =========================================================================================
delete[] buffer;
buffer = NULL;
if (debug)
{
wcout << "... loadFromBinaryFile done!" << endl;
}
}
else
{
throw FILE_NOT_FOUND_ERROR_CODE;
wcout << "### Error ### : loadFromBinaryFile -> Unable to open file" << endl;
}
}
| apache-2.0 |
distribuitech/datos | datos-vfs/src/main/java/com/datos/vfs/provider/sftp/SftpStreamProxy.java | 5826 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datos.vfs.provider.sftp;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.Socket;
import com.datos.vfs.FileSystemOptions;
import com.jcraft.jsch.ChannelExec;
import com.jcraft.jsch.Proxy;
import com.jcraft.jsch.Session;
import com.jcraft.jsch.SocketFactory;
/**
* Stream based proxy for JSch.
*
* <p>
* Use a command on the proxy that will forward the SSH stream to the target host and port.
* </p>
*
* @since 2.1
*/
public class SftpStreamProxy implements Proxy
{
/**
* Command format using bash built-in TCP stream.
*/
public static final String BASH_TCP_COMMAND = "/bin/bash -c 'exec 3<>/dev/tcp/%s/%d; cat <&3 & cat >&3; kill $!";
/**
* Command format using netcat command.
*/
public static final String NETCAT_COMMAND = "nc -q 0 %s %d";
private ChannelExec channel;
/**
* Command pattern to execute on the proxy host.
* <p>
* When run, the command output should be forwarded to the target host and port, and its input should be forwarded
* from the target host and port.
* <p>
* The command will be created for each host/port pair by using {@linkplain String#format(String, Object...)} with
* two objects: the target host name ({@linkplain String}) and the target port ({@linkplain Integer}).
* <p>
* Here are two examples (that can be easily used by using the static members of this class):
* <ul>
* <li><code>nc -q 0 %s %d</code> to use the netcat command ({@linkplain #NETCAT_COMMAND})</li>
* <li><code>/bin/bash -c 'exec 3<>/dev/tcp/%s/%d; cat <&3 & cat >&3; kill $!</code> will use bash built-in TCP
* stream, which can be useful when there is no netcat available. ({@linkplain #BASH_TCP_COMMAND})</li>
* </ul>
*/
private final String commandFormat;
/**
* Hostname used to connect to the proxy host.
*/
private final String proxyHost;
/**
* The options for connection.
*/
private final FileSystemOptions proxyOptions;
/**
* The password to be used for connection.
*/
private final String proxyPassword;
/**
* Port used to connect to the proxy host.
*/
private final int proxyPort;
/**
* Username used to connect to the proxy host.
*/
private final String proxyUser;
private Session session;
/**
* Creates a stream proxy.
*
* @param commandFormat
* A format string that will be used to create the command to execute on the proxy host using
* {@linkplain String#format(String, Object...)}. Two parameters are given to the format command, the
* target host name (String) and port (Integer).
* @param proxyUser
* The proxy user
* @param proxyPassword
* The proxy password
* @param proxyHost
* The proxy host
* @param proxyPort
* The port to connect to on the proxy
* @param proxyOptions
* Options used when connecting to the proxy
*/
public SftpStreamProxy(final String commandFormat, final String proxyUser, final String proxyHost,
final int proxyPort, final String proxyPassword, final FileSystemOptions proxyOptions)
{
this.proxyHost = proxyHost;
this.proxyPort = proxyPort;
this.proxyUser = proxyUser;
this.proxyPassword = proxyPassword;
this.commandFormat = commandFormat;
this.proxyOptions = proxyOptions;
}
@Override
public void close()
{
if (channel != null)
{
channel.disconnect();
}
if (session != null)
{
session.disconnect();
}
}
@Override
public void connect(final SocketFactory socketFactory, final String targetHost,
final int targetPort, final int timeout)
throws Exception
{
session = SftpClientFactory.createConnection(proxyHost, proxyPort, proxyUser.toCharArray(),
proxyPassword.toCharArray(), proxyOptions);
channel = (ChannelExec) session.openChannel("exec");
channel.setCommand(String.format(commandFormat, targetHost, targetPort));
channel.connect(timeout);
}
@Override
public InputStream getInputStream()
{
try
{
return channel.getInputStream();
}
catch (final IOException e)
{
throw new IllegalStateException("IOException getting the SSH proxy input stream", e);
}
}
@Override
public OutputStream getOutputStream()
{
try
{
return channel.getOutputStream();
}
catch (final IOException e)
{
throw new IllegalStateException("IOException getting the SSH proxy output stream", e);
}
}
@Override
public Socket getSocket()
{
return null;
}
}
| apache-2.0 |
RealTimeWeb/wikisite | MoinMoin/parser/text_python.py | 845 | # -*- coding: iso-8859-1 -*-
"""
MoinMoin - Python Source Parser
DEPRECATED compatibility wrapper calling the highlight parser.
This is to support (deprecated) existing syntax like:
{{{#!python ...
...
}}}
It is equivalent to the new way to highlight code:
{{{#!highlight python ...
...
}}}
@copyright: 2008 MoinMoin:ThomasWaldmann
@license: GNU GPL, see COPYING for details.
"""
from MoinMoin.parser.highlight import Parser as HighlightParser
from MoinMoin.parser.highlight import Dependencies
class Parser(HighlightParser):
parsername = 'python' # Lexer name pygments recognizes
extensions = [] # this is only a compatibility wrapper, we have declared
# support for this extension in the HighlightParser, so
# moin will call that directly
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-connect/src/main/java/com/amazonaws/services/connect/model/transform/ListSecurityKeysRequestProtocolMarshaller.java | 2689 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.connect.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.connect.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* ListSecurityKeysRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class ListSecurityKeysRequestProtocolMarshaller implements Marshaller<Request<ListSecurityKeysRequest>, ListSecurityKeysRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.REST_JSON)
.requestUri("/instance/{InstanceId}/security-keys").httpMethodName(HttpMethodName.GET).hasExplicitPayloadMember(false).hasPayloadMembers(false)
.serviceName("AmazonConnect").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public ListSecurityKeysRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<ListSecurityKeysRequest> marshall(ListSecurityKeysRequest listSecurityKeysRequest) {
if (listSecurityKeysRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<ListSecurityKeysRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(SDK_OPERATION_BINDING,
listSecurityKeysRequest);
protocolMarshaller.startMarshalling();
ListSecurityKeysRequestMarshaller.getInstance().marshall(listSecurityKeysRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| apache-2.0 |
radishgz/ui-1.5.9 | app/admin-tab/processes/list/controller.js | 2161 | import Ember from 'ember';
export default Ember.Controller.extend({
prefs: Ember.inject.service(),
queryParams: ['which','sortBy','descending'],
which: 'running',
sortBy: 'id',
descending: false,
actions: {
replay(process) {
if ( process.hasAction('replay') ) {
process.doAction('replay');
}
}
},
headers: function() {
let which = this.get('which');
let out = [
{
name: 'id',
translationKey: 'generic.id',
sort: ['id:desc'],
width: '75px',
},
{
name: 'processName',
translationKey: 'generic.name',
sort: ['processName','id:desc'],
},
{
translationKey: 'processesPage.list.table.resource',
name: 'resource',
sort: ['resourceType','id:desc'],
searchField: ['typeAndId', 'resourceType','resourceId'],
}
];
if ( which === 'delayed' || which === 'completed' ) {
out.push({
translationKey: 'processesPage.list.table.exitReason',
name: 'exitReason',
sort: ['exitReason','id:desc'],
width: '150px',
});
}
out.push({
translationKey: 'processesPage.list.table.startTime',
name: 'startTime',
sort: ['startTime:desc','id:desc'],
width: '120px',
searchField: false,
});
if ( which === 'completed' ) {
out.push({
translationKey: 'processesPage.list.table.endTime',
name: 'endTime',
sort: ['endTime:desc','id:desc'],
width: '120px',
searchField: false,
});
}
if ( which === 'delayed' ) {
out.push({
translationKey: 'processesPage.list.table.runAfter',
name: 'runAfter',
sort: ['runAfter:desc','id:desc'],
width: '120px',
searchField: false,
});
} else {
out.push({
translationKey: 'processesPage.list.table.runTime',
name: 'runTime',
sort: ['runTime:desc','id:desc'],
width: '100px',
searchField: false,
});
}
out.push({
isActions: true,
width: '40px',
});
return out;
}.property('which'),
});
| apache-2.0 |
macchina-io/macchina.io | platform/JS/V8/v8/src/builtins/arm64/builtins-arm64.cc | 109227 | // Copyright 2013 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#if V8_TARGET_ARCH_ARM64
#include "src/arm64/macro-assembler-arm64-inl.h"
#include "src/codegen.h"
#include "src/counters.h"
#include "src/debug/debug.h"
#include "src/deoptimizer.h"
#include "src/frame-constants.h"
#include "src/frames.h"
#include "src/objects-inl.h"
#include "src/runtime/runtime.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm)
// Load the built-in Array function from the current context.
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
// Load the InternalArray function from the native context.
__ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, result);
}
// Load the built-in InternalArray function from the current context.
static void GenerateLoadInternalArrayFunction(MacroAssembler* masm,
Register result) {
// Load the InternalArray function from the native context.
__ LoadNativeContextSlot(Context::INTERNAL_ARRAY_FUNCTION_INDEX, result);
}
void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
ExitFrameType exit_frame_type) {
__ Mov(x5, ExternalReference(address, masm->isolate()));
if (exit_frame_type == BUILTIN_EXIT) {
__ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
RelocInfo::CODE_TARGET);
} else {
DCHECK(exit_frame_type == EXIT);
__ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithExitFrame),
RelocInfo::CODE_TARGET);
}
}
namespace {
void AdaptorWithExitFrameType(MacroAssembler* masm,
Builtins::ExitFrameType exit_frame_type) {
// ----------- S t a t e -------------
// -- x0 : number of arguments excluding receiver
// -- x1 : target
// -- x3 : new target
// -- x5 : entry point
// -- sp[0] : last argument
// -- ...
// -- sp[4 * (argc - 1)] : first argument
// -- sp[4 * argc] : receiver
// -----------------------------------
__ AssertFunction(x1);
// Make sure we operate in the context of the called function (for example
// ConstructStubs implemented in C++ will be run in the context of the caller
// instead of the callee, due to the way that [[Construct]] is defined for
// ordinary functions).
__ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
// CEntryStub expects x0 to contain the number of arguments including the
// receiver and the extra arguments.
const int num_extra_args = 3;
__ Add(x0, x0, num_extra_args + 1);
// Insert extra arguments.
__ SmiTag(x0);
__ Push(x0, x1, x3);
__ SmiUntag(x0);
// Jump to the C entry runtime stub directly here instead of using
// JumpToExternalReference. We have already loaded entry point to x5
// in Generate_adaptor.
__ mov(x1, x5);
CEntryStub stub(masm->isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
exit_frame_type == Builtins::BUILTIN_EXIT);
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
}
} // namespace
void Builtins::Generate_AdaptorWithExitFrame(MacroAssembler* masm) {
AdaptorWithExitFrameType(masm, EXIT);
}
void Builtins::Generate_AdaptorWithBuiltinExitFrame(MacroAssembler* masm) {
AdaptorWithExitFrameType(masm, BUILTIN_EXIT);
}
void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- lr : return address
// -- sp[...]: constructor arguments
// -----------------------------------
ASM_LOCATION("Builtins::Generate_InternalArrayConstructor");
Label generic_array_code;
// Get the InternalArray function.
GenerateLoadInternalArrayFunction(masm, x1);
if (FLAG_debug_code) {
// Initial map for the builtin InternalArray functions should be maps.
__ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
__ Tst(x10, kSmiTagMask);
__ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction);
__ CompareObjectType(x10, x11, x12, MAP_TYPE);
__ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction);
}
// Run the native code for the InternalArray function called as a normal
// function.
InternalArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
}
void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- lr : return address
// -- sp[...]: constructor arguments
// -----------------------------------
ASM_LOCATION("Builtins::Generate_ArrayConstructor");
Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
// Get the Array function.
GenerateLoadArrayFunction(masm, x1);
if (FLAG_debug_code) {
// Initial map for the builtin Array functions should be maps.
__ Ldr(x10, FieldMemOperand(x1, JSFunction::kPrototypeOrInitialMapOffset));
__ Tst(x10, kSmiTagMask);
__ Assert(ne, kUnexpectedInitialMapForArrayFunction);
__ CompareObjectType(x10, x11, x12, MAP_TYPE);
__ Assert(eq, kUnexpectedInitialMapForArrayFunction);
}
// Run the native code for the Array function called as a normal function.
__ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
__ Mov(x3, x1);
ArrayConstructorStub stub(masm->isolate());
__ TailCallStub(&stub);
}
// static
void Builtins::Generate_NumberConstructor(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
// -- cp : context
// -- lr : return address
// -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
// -- sp[argc * 8] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_NumberConstructor");
// 1. Load the first argument into x0.
Label no_arguments;
{
__ Cbz(x0, &no_arguments);
__ Mov(x2, x0); // Store argc in x2.
__ Sub(x0, x0, 1);
__ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
}
// 2a. Convert first argument to number.
{
FrameScope scope(masm, StackFrame::MANUAL);
__ SmiTag(x2);
__ EnterBuiltinFrame(cp, x1, x2);
__ Call(BUILTIN_CODE(masm->isolate(), ToNumber), RelocInfo::CODE_TARGET);
__ LeaveBuiltinFrame(cp, x1, x2);
__ SmiUntag(x2);
}
{
// Drop all arguments.
__ Drop(x2);
}
// 2b. No arguments, return +0 (already in x0).
__ Bind(&no_arguments);
__ Drop(1);
__ Ret();
}
// static
void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
// -- x3 : new target
// -- cp : context
// -- lr : return address
// -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
// -- sp[argc * 8] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_NumberConstructor_ConstructStub");
// 1. Make sure we operate in the context of the called function.
__ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
// 2. Load the first argument into x2.
{
Label no_arguments, done;
__ Move(x6, x0); // Store argc in x6.
__ Cbz(x0, &no_arguments);
__ Sub(x0, x0, 1);
__ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
__ B(&done);
__ Bind(&no_arguments);
__ Mov(x2, Smi::kZero);
__ Bind(&done);
}
// 3. Make sure x2 is a number.
{
Label done_convert;
__ JumpIfSmi(x2, &done_convert);
__ JumpIfObjectType(x2, x4, x4, HEAP_NUMBER_TYPE, &done_convert, eq);
{
FrameScope scope(masm, StackFrame::MANUAL);
__ SmiTag(x6);
__ EnterBuiltinFrame(cp, x1, x6);
__ Push(x3);
__ Move(x0, x2);
__ Call(BUILTIN_CODE(masm->isolate(), ToNumber), RelocInfo::CODE_TARGET);
__ Move(x2, x0);
__ Pop(x3);
__ LeaveBuiltinFrame(cp, x1, x6);
__ SmiUntag(x6);
}
__ Bind(&done_convert);
}
// 4. Check if new target and constructor differ.
Label drop_frame_and_ret, new_object;
__ Cmp(x1, x3);
__ B(ne, &new_object);
// 5. Allocate a JSValue wrapper for the number.
__ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
__ B(&drop_frame_and_ret);
// 6. Fallback to the runtime to create new object.
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
__ SmiTag(x6);
__ EnterBuiltinFrame(cp, x1, x6);
__ Push(x2); // first argument
__ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
RelocInfo::CODE_TARGET);
__ Pop(x2);
__ LeaveBuiltinFrame(cp, x1, x6);
__ SmiUntag(x6);
}
__ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
__ bind(&drop_frame_and_ret);
{
__ Drop(x6);
__ Drop(1);
__ Ret();
}
}
// static
void Builtins::Generate_StringConstructor(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
// -- cp : context
// -- lr : return address
// -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
// -- sp[argc * 8] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_StringConstructor");
// 1. Load the first argument into x0.
Label no_arguments;
{
__ Cbz(x0, &no_arguments);
__ Mov(x2, x0); // Store argc in x2.
__ Sub(x0, x0, 1);
__ Ldr(x0, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
}
// 2a. At least one argument, return x0 if it's a string, otherwise
// dispatch to appropriate conversion.
Label drop_frame_and_ret, to_string, symbol_descriptive_string;
{
__ JumpIfSmi(x0, &to_string);
STATIC_ASSERT(FIRST_NONSTRING_TYPE == SYMBOL_TYPE);
__ CompareObjectType(x0, x3, x3, FIRST_NONSTRING_TYPE);
__ B(hi, &to_string);
__ B(eq, &symbol_descriptive_string);
__ b(&drop_frame_and_ret);
}
// 2b. No arguments, return the empty string (and pop the receiver).
__ Bind(&no_arguments);
{
__ LoadRoot(x0, Heap::kempty_stringRootIndex);
__ Drop(1);
__ Ret();
}
// 3a. Convert x0 to a string.
__ Bind(&to_string);
{
FrameScope scope(masm, StackFrame::MANUAL);
__ SmiTag(x2);
__ EnterBuiltinFrame(cp, x1, x2);
__ Call(BUILTIN_CODE(masm->isolate(), ToString), RelocInfo::CODE_TARGET);
__ LeaveBuiltinFrame(cp, x1, x2);
__ SmiUntag(x2);
}
__ b(&drop_frame_and_ret);
// 3b. Convert symbol in x0 to a string.
__ Bind(&symbol_descriptive_string);
{
__ Drop(x2);
__ Drop(1);
__ Push(x0);
__ TailCallRuntime(Runtime::kSymbolDescriptiveString);
}
__ bind(&drop_frame_and_ret);
{
__ Drop(x2);
__ Drop(1);
__ Ret();
}
}
// static
void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
// -- x3 : new target
// -- cp : context
// -- lr : return address
// -- sp[(argc - n - 1) * 8] : arg[n] (zero based)
// -- sp[argc * 8] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_StringConstructor_ConstructStub");
// 1. Make sure we operate in the context of the called function.
__ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
// 2. Load the first argument into x2.
{
Label no_arguments, done;
__ mov(x6, x0); // Store argc in x6.
__ Cbz(x0, &no_arguments);
__ Sub(x0, x0, 1);
__ Ldr(x2, MemOperand(jssp, x0, LSL, kPointerSizeLog2));
__ B(&done);
__ Bind(&no_arguments);
__ LoadRoot(x2, Heap::kempty_stringRootIndex);
__ Bind(&done);
}
// 3. Make sure x2 is a string.
{
Label convert, done_convert;
__ JumpIfSmi(x2, &convert);
__ JumpIfObjectType(x2, x4, x4, FIRST_NONSTRING_TYPE, &done_convert, lo);
__ Bind(&convert);
{
FrameScope scope(masm, StackFrame::MANUAL);
__ SmiTag(x6);
__ EnterBuiltinFrame(cp, x1, x6);
__ Push(x3);
__ Move(x0, x2);
__ Call(BUILTIN_CODE(masm->isolate(), ToString), RelocInfo::CODE_TARGET);
__ Move(x2, x0);
__ Pop(x3);
__ LeaveBuiltinFrame(cp, x1, x6);
__ SmiUntag(x6);
}
__ Bind(&done_convert);
}
// 4. Check if new target and constructor differ.
Label drop_frame_and_ret, new_object;
__ Cmp(x1, x3);
__ B(ne, &new_object);
// 5. Allocate a JSValue wrapper for the string.
__ AllocateJSValue(x0, x1, x2, x4, x5, &new_object);
__ B(&drop_frame_and_ret);
// 6. Fallback to the runtime to create new object.
__ bind(&new_object);
{
FrameScope scope(masm, StackFrame::MANUAL);
__ SmiTag(x6);
__ EnterBuiltinFrame(cp, x1, x6);
__ Push(x2); // first argument
__ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
RelocInfo::CODE_TARGET);
__ Pop(x2);
__ LeaveBuiltinFrame(cp, x1, x6);
__ SmiUntag(x6);
}
__ Str(x2, FieldMemOperand(x0, JSValue::kValueOffset));
__ bind(&drop_frame_and_ret);
{
__ Drop(x6);
__ Drop(1);
__ Ret();
}
}
static void GenerateTailCallToSharedCode(MacroAssembler* masm) {
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(x2, FieldMemOperand(x2, SharedFunctionInfo::kCodeOffset));
__ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
__ Br(x2);
}
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
Runtime::FunctionId function_id) {
// ----------- S t a t e -------------
// -- x0 : argument count (preserved for callee)
// -- x1 : target function (preserved for callee)
// -- x3 : new target (preserved for callee)
// -----------------------------------
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Push a copy of the target function and the new target.
// Push another copy as a parameter to the runtime call.
__ SmiTag(x0);
__ Push(x0, x1, x3, x1);
__ CallRuntime(function_id, 1);
__ Move(x2, x0);
// Restore target function and new target.
__ Pop(x3, x1, x0);
__ SmiUntag(x0);
}
__ Add(x2, x2, Code::kHeaderSize - kHeapObjectTag);
__ Br(x2);
}
namespace {
void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) {
Label post_instantiation_deopt_entry;
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
// -- x3 : new target
// -- cp : context
// -- lr : return address
// -- sp[...]: constructor arguments
// -----------------------------------
ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
// Preserve the incoming parameters on the stack.
__ SmiTag(x0);
__ Push(cp, x0);
__ SmiUntag(x0);
__ PushRoot(Heap::kTheHoleValueRootIndex);
// Set up pointer to last argument.
__ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
// Copy arguments and receiver to the expression stack.
// Copy 2 values every loop to use ldp/stp.
// Compute pointer behind the first argument.
__ Add(x4, x2, Operand(x0, LSL, kPointerSizeLog2));
Label loop, entry, done_copying_arguments;
// ----------- S t a t e -------------
// -- x0: number of arguments (untagged)
// -- x1: constructor function
// -- x3: new target
// -- x2: pointer to last argument (caller sp)
// -- x4: pointer to argument last copied
// -- sp[0*kPointerSize]: the hole (receiver)
// -- sp[1*kPointerSize]: number of arguments (tagged)
// -- sp[2*kPointerSize]: context
// -----------------------------------
__ B(&entry);
__ Bind(&loop);
__ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
__ Push(x11, x10);
__ Bind(&entry);
__ Cmp(x4, x2);
__ B(gt, &loop);
// Because we copied values 2 by 2 we may have copied one extra value.
// Drop it if that is the case.
__ B(eq, &done_copying_arguments);
__ Drop(1);
__ Bind(&done_copying_arguments);
// Call the function.
// x0: number of arguments
// x1: constructor function
// x3: new target
ParameterCount actual(x0);
__ InvokeFunction(x1, x3, actual, CALL_FUNCTION);
// Restore the context from the frame.
__ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
// Restore smi-tagged arguments count from the frame.
__ Peek(x1, 0);
// Leave construct frame.
}
// Remove caller arguments from the stack and return.
__ DropBySMI(x1);
__ Drop(1);
__ Ret();
}
// The construct stub for ES5 constructor functions and ES6 class constructors.
void Generate_JSConstructStubGeneric(MacroAssembler* masm,
bool restrict_constructor_return) {
// ----------- S t a t e -------------
// -- x0 : number of arguments
// -- x1 : constructor function
// -- x3 : new target
// -- lr : return address
// -- cp : context pointer
// -- sp[...]: constructor arguments
// -----------------------------------
ASM_LOCATION("Builtins::Generate_JSConstructStubHelper");
// Enter a construct frame.
{
FrameScope scope(masm, StackFrame::CONSTRUCT);
Label post_instantiation_deopt_entry, not_create_implicit_receiver;
// Preserve the incoming parameters on the stack.
__ SmiTag(x0);
__ Push(cp, x0, x1, x3);
// ----------- S t a t e -------------
// -- sp[0*kPointerSize]: new target
// -- x1 and sp[1*kPointerSize]: constructor function
// -- sp[2*kPointerSize]: number of arguments (tagged)
// -- sp[3*kPointerSize]: context
// -----------------------------------
__ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kCompilerHintsOffset));
__ tst(w4, Operand(SharedFunctionInfo::kDerivedConstructorMask));
__ B(ne, ¬_create_implicit_receiver);
// If not derived class constructor: Allocate the new receiver object.
__ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1,
x4, x5);
__ Call(BUILTIN_CODE(masm->isolate(), FastNewObject),
RelocInfo::CODE_TARGET);
__ B(&post_instantiation_deopt_entry);
// Else: use TheHoleValue as receiver for constructor call
__ bind(¬_create_implicit_receiver);
__ LoadRoot(x0, Heap::kTheHoleValueRootIndex);
// ----------- S t a t e -------------
// -- x0: receiver
// -- Slot 3 / sp[0*kPointerSize]: new target
// -- Slot 2 / sp[1*kPointerSize]: constructor function
// -- Slot 1 / sp[2*kPointerSize]: number of arguments (tagged)
// -- Slot 0 / sp[3*kPointerSize]: context
// -----------------------------------
// Deoptimizer enters here.
masm->isolate()->heap()->SetConstructStubCreateDeoptPCOffset(
masm->pc_offset());
__ bind(&post_instantiation_deopt_entry);
// Restore new target.
__ Pop(x3);
// Push the allocated receiver to the stack. We need two copies
// because we may have to return the original one and the calling
// conventions dictate that the called function pops the receiver.
__ Push(x0, x0);
// ----------- S t a t e -------------
// -- x3: new target
// -- sp[0*kPointerSize]: implicit receiver
// -- sp[1*kPointerSize]: implicit receiver
// -- sp[2*kPointerSize]: constructor function
// -- sp[3*kPointerSize]: number of arguments (tagged)
// -- sp[4*kPointerSize]: context
// -----------------------------------
// Restore constructor function and argument count.
__ Ldr(x1, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
__ Ldr(x0, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
__ SmiUntag(x0);
// Set up pointer to last argument.
__ Add(x2, fp, StandardFrameConstants::kCallerSPOffset);
// Copy arguments and receiver to the expression stack.
// Copy 2 values every loop to use ldp/stp.
// Compute pointer behind the first argument.
__ Add(x4, x2, Operand(x0, LSL, kPointerSizeLog2));
Label loop, entry, done_copying_arguments;
// ----------- S t a t e -------------
// -- x0: number of arguments (untagged)
// -- x3: new target
// -- x2: pointer to last argument (caller sp)
// -- x4: pointer to argument last copied
// -- sp[0*kPointerSize]: implicit receiver
// -- sp[1*kPointerSize]: implicit receiver
// -- x1 and sp[2*kPointerSize]: constructor function
// -- sp[3*kPointerSize]: number of arguments (tagged)
// -- sp[4*kPointerSize]: context
// -----------------------------------
__ B(&entry);
__ Bind(&loop);
__ Ldp(x10, x11, MemOperand(x4, -2 * kPointerSize, PreIndex));
__ Push(x11, x10);
__ Bind(&entry);
__ Cmp(x4, x2);
__ B(gt, &loop);
// Because we copied values 2 by 2 we may have copied one extra value.
// Drop it if that is the case.
__ B(eq, &done_copying_arguments);
__ Drop(1);
__ Bind(&done_copying_arguments);
// Call the function.
ParameterCount actual(x0);
__ InvokeFunction(x1, x3, actual, CALL_FUNCTION);
// ----------- S t a t e -------------
// -- x0: constructor result
// -- sp[0*kPointerSize]: implicit receiver
// -- sp[1*kPointerSize]: constructor function
// -- sp[2*kPointerSize]: number of arguments
// -- sp[3*kPointerSize]: context
// -----------------------------------
// Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetConstructStubInvokeDeoptPCOffset(
masm->pc_offset());
// Restore the context from the frame.
__ Ldr(cp, MemOperand(fp, ConstructFrameConstants::kContextOffset));
// If the result is an object (in the ECMA sense), we should get rid
// of the receiver and use the result; see ECMA-262 section 13.2.2-7
// on page 74.
Label use_receiver, do_throw, other_result, leave_frame;
// If the result is undefined, we jump out to using the implicit receiver.
__ CompareRoot(x0, Heap::kUndefinedValueRootIndex);
__ B(eq, &use_receiver);
// Otherwise we do a smi check and fall through to check if the return value
// is a valid receiver.
// If the result is a smi, it is *not* an object in the ECMA sense.
__ JumpIfSmi(x0, &other_result);
// If the type of the result (stored in its map) is less than
// FIRST_JS_RECEIVER_TYPE, it is not an object in the ECMA sense.
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
__ JumpIfObjectType(x0, x4, x5, FIRST_JS_RECEIVER_TYPE, &leave_frame, ge);
// The result is now neither undefined nor an object.
__ Bind(&other_result);
__ Ldr(x4, MemOperand(fp, ConstructFrameConstants::kConstructorOffset));
__ Ldr(x4, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w4, FieldMemOperand(x4, SharedFunctionInfo::kCompilerHintsOffset));
__ tst(w4, Operand(SharedFunctionInfo::kClassConstructorMask));
if (restrict_constructor_return) {
// Throw if constructor function is a class constructor
__ B(eq, &use_receiver);
} else {
__ B(ne, &use_receiver);
__ CallRuntime(
Runtime::kIncrementUseCounterConstructorReturnNonUndefinedPrimitive);
__ B(&use_receiver);
}
__ Bind(&do_throw);
__ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
// Throw away the result of the constructor invocation and use the
// on-stack receiver as the result.
__ Bind(&use_receiver);
__ Peek(x0, 0 * kPointerSize);
__ CompareRoot(x0, Heap::kTheHoleValueRootIndex);
__ B(eq, &do_throw);
__ Bind(&leave_frame);
// Restore smi-tagged arguments count from the frame.
__ Ldr(x1, MemOperand(fp, ConstructFrameConstants::kLengthOffset));
// Leave construct frame.
}
// Remove caller arguments from the stack and return.
__ DropBySMI(x1);
__ Drop(1);
__ Ret();
}
} // namespace
void Builtins::Generate_JSConstructStubGenericRestrictedReturn(
MacroAssembler* masm) {
Generate_JSConstructStubGeneric(masm, true);
}
void Builtins::Generate_JSConstructStubGenericUnrestrictedReturn(
MacroAssembler* masm) {
Generate_JSConstructStubGeneric(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSBuiltinsConstructStubHelper(masm);
}
void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Generate_JSBuiltinsConstructStubHelper(masm);
}
void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) {
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(x1);
__ CallRuntime(Runtime::kThrowConstructedNonConstructable);
}
// static
void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : the value to pass to the generator
// -- x1 : the JSGeneratorObject to resume
// -- x2 : the resume mode (tagged)
// -- lr : return address
// -----------------------------------
__ AssertGeneratorObject(x1);
// Store input value into generator object.
__ Str(x0, FieldMemOperand(x1, JSGeneratorObject::kInputOrDebugPosOffset));
__ RecordWriteField(x1, JSGeneratorObject::kInputOrDebugPosOffset, x0, x3,
kLRHasNotBeenSaved, kDontSaveFPRegs);
// Store resume mode into generator object.
__ Str(x2, FieldMemOperand(x1, JSGeneratorObject::kResumeModeOffset));
// Load suspended function and context.
__ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
__ Ldr(cp, FieldMemOperand(x4, JSFunction::kContextOffset));
// Flood function if we are stepping.
Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator;
Label stepping_prepared;
ExternalReference debug_hook =
ExternalReference::debug_hook_on_function_call_address(masm->isolate());
__ Mov(x10, Operand(debug_hook));
__ Ldrsb(x10, MemOperand(x10));
__ CompareAndBranch(x10, Operand(0), ne, &prepare_step_in_if_stepping);
// Flood function if we need to continue stepping in the suspended generator.
ExternalReference debug_suspended_generator =
ExternalReference::debug_suspended_generator_address(masm->isolate());
__ Mov(x10, Operand(debug_suspended_generator));
__ Ldr(x10, MemOperand(x10));
__ CompareAndBranch(x10, Operand(x1), eq,
&prepare_step_in_suspended_generator);
__ Bind(&stepping_prepared);
// Push receiver.
__ Ldr(x5, FieldMemOperand(x1, JSGeneratorObject::kReceiverOffset));
__ Push(x5);
// ----------- S t a t e -------------
// -- x1 : the JSGeneratorObject to resume
// -- x2 : the resume mode (tagged)
// -- x4 : generator function
// -- cp : generator context
// -- lr : return address
// -- jssp[0] : generator receiver
// -----------------------------------
// Push holes for arguments to generator function. Since the parser forced
// context allocation for any variables in generators, the actual argument
// values have already been copied into the context and these dummy values
// will never be used.
__ Ldr(x10, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w10,
FieldMemOperand(x10, SharedFunctionInfo::kFormalParameterCountOffset));
__ LoadRoot(x11, Heap::kTheHoleValueRootIndex);
__ PushMultipleTimes(x11, w10);
// Underlying function needs to have bytecode available.
if (FLAG_debug_code) {
__ Ldr(x3, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(x3, FieldMemOperand(x3, SharedFunctionInfo::kFunctionDataOffset));
__ CompareObjectType(x3, x3, x3, BYTECODE_ARRAY_TYPE);
__ Assert(eq, kMissingBytecodeArray);
}
// Resume (Ignition/TurboFan) generator object.
{
__ Ldr(x0, FieldMemOperand(x4, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w0, FieldMemOperand(
x0, SharedFunctionInfo::kFormalParameterCountOffset));
// We abuse new.target both to indicate that this is a resume call and to
// pass in the generator object. In ordinary calls, new.target is always
// undefined because generator functions are non-constructable.
__ Move(x3, x1);
__ Move(x1, x4);
__ Ldr(x5, FieldMemOperand(x1, JSFunction::kCodeOffset));
__ Add(x5, x5, Code::kHeaderSize - kHeapObjectTag);
__ Jump(x5);
}
__ Bind(&prepare_step_in_if_stepping);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(x1, x2, x4);
__ CallRuntime(Runtime::kDebugOnFunctionCall);
__ Pop(x2, x1);
__ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
}
__ B(&stepping_prepared);
__ Bind(&prepare_step_in_suspended_generator);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(x1, x2);
__ CallRuntime(Runtime::kDebugPrepareStepInSuspendedGenerator);
__ Pop(x2, x1);
__ Ldr(x4, FieldMemOperand(x1, JSGeneratorObject::kFunctionOffset));
}
__ B(&stepping_prepared);
}
enum IsTagged { kArgcIsSmiTagged, kArgcIsUntaggedInt };
// Clobbers x10, x15; preserves all other registers.
static void Generate_CheckStackOverflow(MacroAssembler* masm, Register argc,
IsTagged argc_is_tagged) {
// Check the stack for overflow.
// We are not trying to catch interruptions (e.g. debug break and
// preemption) here, so the "real stack limit" is checked.
Label enough_stack_space;
__ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
// Make x10 the space we have left. The stack might already be overflowed
// here which will cause x10 to become negative.
// TODO(jbramley): Check that the stack usage here is safe.
__ Sub(x10, jssp, x10);
// Check if the arguments will overflow the stack.
if (argc_is_tagged == kArgcIsSmiTagged) {
__ Cmp(x10, Operand::UntagSmiAndScale(argc, kPointerSizeLog2));
} else {
DCHECK(argc_is_tagged == kArgcIsUntaggedInt);
__ Cmp(x10, Operand(argc, LSL, kPointerSizeLog2));
}
__ B(gt, &enough_stack_space);
__ CallRuntime(Runtime::kThrowStackOverflow);
// We should never return from the APPLY_OVERFLOW builtin.
if (__ emit_debug_code()) {
__ Unreachable();
}
__ Bind(&enough_stack_space);
}
// Input:
// x0: new.target.
// x1: function.
// x2: receiver.
// x3: argc.
// x4: argv.
// Output:
// x0: result.
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) {
// Called from JSEntryStub::GenerateBody().
Register new_target = x0;
Register function = x1;
Register receiver = x2;
Register argc = x3;
Register argv = x4;
Register scratch = x10;
ProfileEntryHookStub::MaybeCallEntryHook(masm);
{
// Enter an internal frame.
FrameScope scope(masm, StackFrame::INTERNAL);
// Setup the context (we need to use the caller context from the isolate).
__ Mov(scratch, Operand(ExternalReference(IsolateAddressId::kContextAddress,
masm->isolate())));
__ Ldr(cp, MemOperand(scratch));
__ InitializeRootRegister();
// Push the function and the receiver onto the stack.
__ Push(function, receiver);
// Check if we have enough stack space to push all arguments.
// Expects argument count in eax. Clobbers ecx, edx, edi.
Generate_CheckStackOverflow(masm, argc, kArgcIsUntaggedInt);
// Copy arguments to the stack in a loop, in reverse order.
// x3: argc.
// x4: argv.
Label loop, entry;
// Compute the copy end address.
__ Add(scratch, argv, Operand(argc, LSL, kPointerSizeLog2));
__ B(&entry);
__ Bind(&loop);
__ Ldr(x11, MemOperand(argv, kPointerSize, PostIndex));
__ Ldr(x12, MemOperand(x11)); // Dereference the handle.
__ Push(x12); // Push the argument.
__ Bind(&entry);
__ Cmp(scratch, argv);
__ B(ne, &loop);
__ Mov(scratch, argc);
__ Mov(argc, new_target);
__ Mov(new_target, scratch);
// x0: argc.
// x3: new.target.
// Initialize all JavaScript callee-saved registers, since they will be seen
// by the garbage collector as part of handlers.
// The original values have been saved in JSEntryStub::GenerateBody().
__ LoadRoot(x19, Heap::kUndefinedValueRootIndex);
__ Mov(x20, x19);
__ Mov(x21, x19);
__ Mov(x22, x19);
__ Mov(x23, x19);
__ Mov(x24, x19);
__ Mov(x25, x19);
// Don't initialize the reserved registers.
// x26 : root register (root).
// x27 : context pointer (cp).
// x28 : JS stack pointer (jssp).
// x29 : frame pointer (fp).
Handle<Code> builtin = is_construct
? BUILTIN_CODE(masm->isolate(), Construct)
: masm->isolate()->builtins()->Call();
__ Call(builtin, RelocInfo::CODE_TARGET);
// Exit the JS internal frame and remove the parameters (except function),
// and return.
}
// Result is in x0. Return.
__ Ret();
}
void Builtins::Generate_JSEntryTrampoline(MacroAssembler* masm) {
Generate_JSEntryTrampolineHelper(masm, false);
}
void Builtins::Generate_JSConstructEntryTrampoline(MacroAssembler* masm) {
Generate_JSEntryTrampolineHelper(masm, true);
}
static void ReplaceClosureCodeWithOptimizedCode(
MacroAssembler* masm, Register optimized_code, Register closure,
Register scratch1, Register scratch2, Register scratch3) {
Register native_context = scratch1;
// Store code entry in the closure.
__ Str(optimized_code, FieldMemOperand(closure, JSFunction::kCodeOffset));
__ Mov(scratch1, optimized_code); // Write barrier clobbers scratch1 below.
__ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
// Link the closure into the optimized function list.
__ Ldr(native_context, NativeContextMemOperand());
__ Ldr(scratch2,
ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
__ Str(scratch2,
FieldMemOperand(closure, JSFunction::kNextFunctionLinkOffset));
__ RecordWriteField(closure, JSFunction::kNextFunctionLinkOffset, scratch2,
scratch3, kLRHasNotBeenSaved, kDontSaveFPRegs,
EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
const int function_list_offset =
Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST);
__ Str(closure,
ContextMemOperand(native_context, Context::OPTIMIZED_FUNCTIONS_LIST));
__ Mov(scratch2, closure);
__ RecordWriteContextSlot(native_context, function_list_offset, scratch2,
scratch3, kLRHasNotBeenSaved, kDontSaveFPRegs);
}
static void LeaveInterpreterFrame(MacroAssembler* masm, Register scratch) {
Register args_count = scratch;
// Get the arguments + receiver count.
__ ldr(args_count,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ Ldr(args_count.W(),
FieldMemOperand(args_count, BytecodeArray::kParameterSizeOffset));
// Leave the frame (also dropping the register file).
__ LeaveFrame(StackFrame::JAVA_SCRIPT);
// Drop receiver + arguments.
__ Drop(args_count, 1);
}
// Tail-call |function_id| if |smi_entry| == |marker|
static void TailCallRuntimeIfMarkerEquals(MacroAssembler* masm,
Register smi_entry,
OptimizationMarker marker,
Runtime::FunctionId function_id) {
Label no_match;
__ CompareAndBranch(smi_entry, Operand(Smi::FromEnum(marker)), ne, &no_match);
GenerateTailCallToReturnedCode(masm, function_id);
__ bind(&no_match);
}
static void MaybeTailCallOptimizedCodeSlot(MacroAssembler* masm,
Register feedback_vector,
Register scratch1, Register scratch2,
Register scratch3) {
// ----------- S t a t e -------------
// -- x0 : argument count (preserved for callee if needed, and caller)
// -- x3 : new target (preserved for callee if needed, and caller)
// -- x1 : target function (preserved for callee if needed, and caller)
// -- feedback vector (preserved for caller if needed)
// -----------------------------------
DCHECK(
!AreAliased(feedback_vector, x0, x1, x3, scratch1, scratch2, scratch3));
Label optimized_code_slot_is_cell, fallthrough;
Register closure = x1;
Register optimized_code_entry = scratch1;
__ Ldr(
optimized_code_entry,
FieldMemOperand(feedback_vector, FeedbackVector::kOptimizedCodeOffset));
// Check if the code entry is a Smi. If yes, we interpret it as an
// optimisation marker. Otherwise, interpret is as a weak cell to a code
// object.
__ JumpIfNotSmi(optimized_code_entry, &optimized_code_slot_is_cell);
{
// Optimized code slot is a Smi optimization marker.
// Fall through if no optimization trigger.
__ CompareAndBranch(optimized_code_entry,
Operand(Smi::FromEnum(OptimizationMarker::kNone)), eq,
&fallthrough);
TailCallRuntimeIfMarkerEquals(masm, optimized_code_entry,
OptimizationMarker::kCompileOptimized,
Runtime::kCompileOptimized_NotConcurrent);
TailCallRuntimeIfMarkerEquals(
masm, optimized_code_entry,
OptimizationMarker::kCompileOptimizedConcurrent,
Runtime::kCompileOptimized_Concurrent);
{
// Otherwise, the marker is InOptimizationQueue, so fall through hoping
// that an interrupt will eventually update the slot with optimized code.
if (FLAG_debug_code) {
__ Cmp(
optimized_code_entry,
Operand(Smi::FromEnum(OptimizationMarker::kInOptimizationQueue)));
__ Assert(eq, kExpectedOptimizationSentinel);
}
__ B(&fallthrough);
}
}
{
// Optimized code slot is a WeakCell.
__ bind(&optimized_code_slot_is_cell);
__ Ldr(optimized_code_entry,
FieldMemOperand(optimized_code_entry, WeakCell::kValueOffset));
__ JumpIfSmi(optimized_code_entry, &fallthrough);
// Check if the optimized code is marked for deopt. If it is, call the
// runtime to clear it.
Label found_deoptimized_code;
__ Ldr(scratch2, FieldMemOperand(optimized_code_entry,
Code::kKindSpecificFlags1Offset));
__ TestAndBranchIfAnySet(scratch2, 1 << Code::kMarkedForDeoptimizationBit,
&found_deoptimized_code);
// Optimized code is good, get it into the closure and link the closure into
// the optimized functions list, then tail call the optimized code.
// The feedback vector is no longer used, so re-use it as a scratch
// register.
ReplaceClosureCodeWithOptimizedCode(masm, optimized_code_entry, closure,
scratch2, scratch3, feedback_vector);
__ Add(optimized_code_entry, optimized_code_entry,
Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(optimized_code_entry);
// Optimized code slot contains deoptimized code, evict it and re-enter the
// closure's code.
__ bind(&found_deoptimized_code);
GenerateTailCallToReturnedCode(masm, Runtime::kEvictOptimizedCodeSlot);
}
// Fall-through if the optimized code cell is clear and there is no
// optimization marker.
__ bind(&fallthrough);
}
// Advance the current bytecode offset. This simulates what all bytecode
// handlers do upon completion of the underlying operation.
static void AdvanceBytecodeOffset(MacroAssembler* masm, Register bytecode_array,
Register bytecode_offset, Register bytecode,
Register scratch1) {
Register bytecode_size_table = scratch1;
DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
bytecode));
__ Mov(
bytecode_size_table,
Operand(ExternalReference::bytecode_size_table_address(masm->isolate())));
// Check if the bytecode is a Wide or ExtraWide prefix bytecode.
Label load_size, extra_wide;
STATIC_ASSERT(0 == static_cast<int>(interpreter::Bytecode::kWide));
STATIC_ASSERT(1 == static_cast<int>(interpreter::Bytecode::kExtraWide));
__ Cmp(bytecode, Operand(0x1));
__ B(hi, &load_size);
__ B(eq, &extra_wide);
// Load the next bytecode and update table to the wide scaled table.
__ Add(bytecode_offset, bytecode_offset, Operand(1));
__ Ldrb(bytecode, MemOperand(bytecode_array, bytecode_offset));
__ Add(bytecode_size_table, bytecode_size_table,
Operand(kIntSize * interpreter::Bytecodes::kBytecodeCount));
__ B(&load_size);
__ Bind(&extra_wide);
// Load the next bytecode and update table to the extra wide scaled table.
__ Add(bytecode_offset, bytecode_offset, Operand(1));
__ Ldrb(bytecode, MemOperand(bytecode_array, bytecode_offset));
__ Add(bytecode_size_table, bytecode_size_table,
Operand(2 * kIntSize * interpreter::Bytecodes::kBytecodeCount));
__ B(&load_size);
// Load the size of the current bytecode.
__ Bind(&load_size);
__ Ldr(scratch1.W(), MemOperand(bytecode_size_table, bytecode, LSL, 2));
__ Add(bytecode_offset, bytecode_offset, scratch1);
}
// Generate code for entering a JS function with the interpreter.
// On entry to the function the receiver and arguments have been pushed on the
// stack left to right. The actual argument count matches the formal parameter
// count expected by the function.
//
// The live registers are:
// - x1: the JS function object being called.
// - x3: the incoming new target or generator object
// - cp: our context.
// - fp: our caller's frame pointer.
// - jssp: stack pointer.
// - lr: return address.
//
// The function builds an interpreter frame. See InterpreterFrameConstants in
// frames.h for its layout.
void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
ProfileEntryHookStub::MaybeCallEntryHook(masm);
Register closure = x1;
Register feedback_vector = x2;
// Load the feedback vector from the closure.
__ Ldr(feedback_vector,
FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
__ Ldr(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
// Read off the optimized code slot in the feedback vector, and if there
// is optimized code or an optimization marker, call that instead.
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, x7, x4, x5);
// Open a frame scope to indicate that there is a frame on the stack. The
// MANUAL indicates that the scope shouldn't actually generate code to set up
// the frame (that is done below).
FrameScope frame_scope(masm, StackFrame::MANUAL);
__ Push(lr, fp, cp, closure);
__ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
// Get the bytecode array from the function object (or from the DebugInfo if
// it is present) and load it into kInterpreterBytecodeArrayRegister.
Label maybe_load_debug_bytecode_array, bytecode_array_loaded;
__ Ldr(x0, FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(kInterpreterBytecodeArrayRegister,
FieldMemOperand(x0, SharedFunctionInfo::kFunctionDataOffset));
__ Ldr(x11, FieldMemOperand(x0, SharedFunctionInfo::kDebugInfoOffset));
__ JumpIfNotSmi(x11, &maybe_load_debug_bytecode_array);
__ Bind(&bytecode_array_loaded);
// Increment invocation count for the function.
__ Ldr(x11, FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
__ Ldr(x11, FieldMemOperand(x11, Cell::kValueOffset));
__ Ldr(w10, FieldMemOperand(x11, FeedbackVector::kInvocationCountOffset));
__ Add(w10, w10, Operand(1));
__ Str(w10, FieldMemOperand(x11, FeedbackVector::kInvocationCountOffset));
// Check function data field is actually a BytecodeArray object.
if (FLAG_debug_code) {
__ AssertNotSmi(kInterpreterBytecodeArrayRegister,
kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
__ CompareObjectType(kInterpreterBytecodeArrayRegister, x0, x0,
BYTECODE_ARRAY_TYPE);
__ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
}
// Reset code age.
__ Mov(x10, Operand(BytecodeArray::kNoAgeBytecodeAge));
__ Strb(x10, FieldMemOperand(kInterpreterBytecodeArrayRegister,
BytecodeArray::kBytecodeAgeOffset));
// Load the initial bytecode offset.
__ Mov(kInterpreterBytecodeOffsetRegister,
Operand(BytecodeArray::kHeaderSize - kHeapObjectTag));
// Push bytecode array and Smi tagged bytecode array offset.
__ SmiTag(x0, kInterpreterBytecodeOffsetRegister);
__ Push(kInterpreterBytecodeArrayRegister, x0);
// Allocate the local and temporary register file on the stack.
{
// Load frame size from the BytecodeArray object.
__ Ldr(w11, FieldMemOperand(kInterpreterBytecodeArrayRegister,
BytecodeArray::kFrameSizeOffset));
// Do a stack check to ensure we don't go over the limit.
Label ok;
DCHECK(jssp.Is(__ StackPointer()));
__ Sub(x10, jssp, Operand(x11));
__ CompareRoot(x10, Heap::kRealStackLimitRootIndex);
__ B(hs, &ok);
__ CallRuntime(Runtime::kThrowStackOverflow);
__ Bind(&ok);
// If ok, push undefined as the initial value for all register file entries.
// Note: there should always be at least one stack slot for the return
// register in the register file.
Label loop_header;
__ LoadRoot(x10, Heap::kUndefinedValueRootIndex);
// TODO(rmcilroy): Ensure we always have an even number of registers to
// allow stack to be 16 bit aligned (and remove need for jssp).
__ Lsr(x11, x11, kPointerSizeLog2);
__ PushMultipleTimes(x10, x11);
__ Bind(&loop_header);
}
// If the bytecode array has a valid incoming new target or generator object
// register, initialize it with incoming value which was passed in x3.
Label no_incoming_new_target_or_generator_register;
__ Ldrsw(x10,
FieldMemOperand(
kInterpreterBytecodeArrayRegister,
BytecodeArray::kIncomingNewTargetOrGeneratorRegisterOffset));
__ Cbz(x10, &no_incoming_new_target_or_generator_register);
__ Str(x3, MemOperand(fp, x10, LSL, kPointerSizeLog2));
__ Bind(&no_incoming_new_target_or_generator_register);
// Load accumulator with undefined.
__ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex);
// Load the dispatch table into a register and dispatch to the bytecode
// handler at the current bytecode offset.
Label do_dispatch;
__ bind(&do_dispatch);
__ Mov(kInterpreterDispatchTableRegister,
Operand(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
__ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister));
__ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
__ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
__ Call(ip0);
masm->isolate()->heap()->SetInterpreterEntryReturnPCOffset(masm->pc_offset());
// Any returns to the entry trampoline are either due to the return bytecode
// or the interpreter tail calling a builtin and then a dispatch.
// Get bytecode array and bytecode offset from the stack frame.
__ Ldr(kInterpreterBytecodeArrayRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ Ldr(kInterpreterBytecodeOffsetRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
// Check if we should return.
Label do_return;
__ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister));
__ Cmp(x1, Operand(static_cast<int>(interpreter::Bytecode::kReturn)));
__ B(&do_return, eq);
// Advance to the next bytecode and dispatch.
AdvanceBytecodeOffset(masm, kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister, x1, x2);
__ B(&do_dispatch);
__ bind(&do_return);
// The return value is in x0.
LeaveInterpreterFrame(masm, x2);
__ Ret();
// Load debug copy of the bytecode array if it exists.
// kInterpreterBytecodeArrayRegister is already loaded with
// SharedFunctionInfo::kFunctionDataOffset.
__ Bind(&maybe_load_debug_bytecode_array);
__ Ldrsw(x10, UntagSmiFieldMemOperand(x11, DebugInfo::kFlagsOffset));
__ TestAndBranchIfAllClear(x10, DebugInfo::kHasBreakInfo,
&bytecode_array_loaded);
__ Ldr(kInterpreterBytecodeArrayRegister,
FieldMemOperand(x11, DebugInfo::kDebugBytecodeArrayOffset));
__ B(&bytecode_array_loaded);
}
static void Generate_StackOverflowCheck(MacroAssembler* masm, Register num_args,
Register scratch,
Label* stack_overflow) {
// Check the stack for overflow.
// We are not trying to catch interruptions (e.g. debug break and
// preemption) here, so the "real stack limit" is checked.
Label enough_stack_space;
__ LoadRoot(scratch, Heap::kRealStackLimitRootIndex);
// Make scratch the space we have left. The stack might already be overflowed
// here which will cause scratch to become negative.
__ Sub(scratch, jssp, scratch);
// Check if the arguments will overflow the stack.
__ Cmp(scratch, Operand(num_args, LSL, kPointerSizeLog2));
__ B(le, stack_overflow);
}
static void Generate_InterpreterPushArgs(MacroAssembler* masm,
Register num_args, Register index,
Register last_arg, Register stack_addr,
Register scratch) {
__ Mov(scratch, num_args);
__ lsl(scratch, scratch, kPointerSizeLog2);
__ sub(last_arg, index, scratch);
// Set stack pointer and where to stop.
__ Mov(stack_addr, jssp);
__ Claim(scratch, 1);
// Push the arguments.
Label loop_header, loop_check;
__ B(&loop_check);
__ Bind(&loop_header);
// TODO(rmcilroy): Push two at a time once we ensure we keep stack aligned.
__ Ldr(scratch, MemOperand(index, -kPointerSize, PostIndex));
__ Str(scratch, MemOperand(stack_addr, -kPointerSize, PreIndex));
__ Bind(&loop_check);
__ Cmp(index, last_arg);
__ B(gt, &loop_header);
}
// static
void Builtins::Generate_InterpreterPushArgsThenCallImpl(
MacroAssembler* masm, ConvertReceiverMode receiver_mode,
InterpreterPushArgsMode mode) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x2 : the address of the first argument to be pushed. Subsequent
// arguments should be consecutive above this, in the same order as
// they are to be pushed onto the stack.
// -- x1 : the target to call (can be any Object).
// -----------------------------------
Label stack_overflow;
// Add one for the receiver.
__ add(x3, x0, Operand(1));
// Add a stack check before pushing arguments.
Generate_StackOverflowCheck(masm, x3, x6, &stack_overflow);
// Push "undefined" as the receiver arg if we need to.
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
__ PushRoot(Heap::kUndefinedValueRootIndex);
__ Mov(x3, x0); // Argument count is correct.
}
// Push the arguments. x2, x4, x5, x6 will be modified.
Generate_InterpreterPushArgs(masm, x3, x2, x4, x5, x6);
if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
__ Pop(x2); // Pass the spread in a register
__ Sub(x0, x0, 1); // Subtract one for spread
}
// Call the target.
if (mode == InterpreterPushArgsMode::kJSFunction) {
__ Jump(
masm->isolate()->builtins()->CallFunction(ConvertReceiverMode::kAny),
RelocInfo::CODE_TARGET);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithSpread),
RelocInfo::CODE_TARGET);
} else {
__ Jump(masm->isolate()->builtins()->Call(ConvertReceiverMode::kAny),
RelocInfo::CODE_TARGET);
}
__ bind(&stack_overflow);
{
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ Unreachable();
}
}
// static
void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
MacroAssembler* masm, InterpreterPushArgsMode mode) {
// ----------- S t a t e -------------
// -- x0 : argument count (not including receiver)
// -- x3 : new target
// -- x1 : constructor to call
// -- x2 : allocation site feedback if available, undefined otherwise
// -- x4 : address of the first argument
// -----------------------------------
Label stack_overflow;
// Push a slot for the receiver.
__ Push(xzr);
// Add a stack check before pushing arguments.
Generate_StackOverflowCheck(masm, x0, x7, &stack_overflow);
// Push the arguments. x5, x4, x6, x7 will be modified.
Generate_InterpreterPushArgs(masm, x0, x4, x5, x6, x7);
if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
__ Pop(x2); // Pass the spread in a register
__ Sub(x0, x0, 1); // Subtract one for spread
} else {
__ AssertUndefinedOrAllocationSite(x2, x6);
}
if (mode == InterpreterPushArgsMode::kJSFunction) {
__ AssertFunction(x1);
// Tail call to the function-specific construct stub (still in the caller
// context at this point).
__ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
__ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
__ Br(x4);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// Call the constructor with x0, x1, and x3 unmodified.
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
RelocInfo::CODE_TARGET);
} else {
DCHECK_EQ(InterpreterPushArgsMode::kOther, mode);
// Call the constructor with x0, x1, and x3 unmodified.
__ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
}
__ bind(&stack_overflow);
{
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ Unreachable();
}
}
static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
// Set the return address to the correct point in the interpreter entry
// trampoline.
Smi* interpreter_entry_return_pc_offset(
masm->isolate()->heap()->interpreter_entry_return_pc_offset());
DCHECK_NE(interpreter_entry_return_pc_offset, Smi::kZero);
__ LoadObject(x1, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
__ Add(lr, x1, Operand(interpreter_entry_return_pc_offset->value() +
Code::kHeaderSize - kHeapObjectTag));
// Initialize the dispatch table register.
__ Mov(kInterpreterDispatchTableRegister,
Operand(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
// Get the bytecode array pointer from the frame.
__ Ldr(kInterpreterBytecodeArrayRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
if (FLAG_debug_code) {
// Check function data field is actually a BytecodeArray object.
__ AssertNotSmi(kInterpreterBytecodeArrayRegister,
kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
__ CompareObjectType(kInterpreterBytecodeArrayRegister, x1, x1,
BYTECODE_ARRAY_TYPE);
__ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry);
}
// Get the target bytecode offset from the frame.
__ Ldr(kInterpreterBytecodeOffsetRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
// Dispatch to the target bytecode.
__ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister));
__ Mov(x1, Operand(x1, LSL, kPointerSizeLog2));
__ Ldr(ip0, MemOperand(kInterpreterDispatchTableRegister, x1));
__ Jump(ip0);
}
void Builtins::Generate_InterpreterEnterBytecodeAdvance(MacroAssembler* masm) {
// Get bytecode array and bytecode offset from the stack frame.
__ ldr(kInterpreterBytecodeArrayRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeArrayFromFp));
__ ldr(kInterpreterBytecodeOffsetRegister,
MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
__ SmiUntag(kInterpreterBytecodeOffsetRegister);
// Load the current bytecode.
__ Ldrb(x1, MemOperand(kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister));
// Advance to the next bytecode.
AdvanceBytecodeOffset(masm, kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister, x1, x2);
// Convert new bytecode offset to a Smi and save in the stackframe.
__ SmiTag(x2, kInterpreterBytecodeOffsetRegister);
__ Str(x2, MemOperand(fp, InterpreterFrameConstants::kBytecodeOffsetFromFp));
Generate_InterpreterEnterBytecode(masm);
}
void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
Generate_InterpreterEnterBytecode(masm);
}
void Builtins::Generate_CheckOptimizationMarker(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argument count (preserved for callee)
// -- x3 : new target (preserved for callee)
// -- x1 : target function (preserved for callee)
// -----------------------------------
Register closure = x1;
// Get the feedback vector.
Register feedback_vector = x2;
__ Ldr(feedback_vector,
FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
__ Ldr(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
// The feedback vector must be defined.
if (FLAG_debug_code) {
__ CompareRoot(feedback_vector, Heap::kUndefinedValueRootIndex);
__ Assert(ne, BailoutReason::kExpectedFeedbackVector);
}
// Is there an optimization marker or optimized code in the feedback vector?
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, x7, x4, x5);
// Otherwise, tail call the SFI code.
GenerateTailCallToSharedCode(masm);
}
void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argument count (preserved for callee)
// -- x3 : new target (preserved for callee)
// -- x1 : target function (preserved for callee)
// -----------------------------------
// First lookup code, maybe we don't need to compile!
Label gotta_call_runtime;
Register closure = x1;
Register feedback_vector = x2;
// Do we have a valid feedback vector?
__ Ldr(feedback_vector,
FieldMemOperand(closure, JSFunction::kFeedbackVectorOffset));
__ Ldr(feedback_vector, FieldMemOperand(feedback_vector, Cell::kValueOffset));
__ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex,
&gotta_call_runtime);
// Is there an optimization marker or optimized code in the feedback vector?
MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, x7, x4, x5);
// We found no optimized code.
Register entry = x7;
__ Ldr(entry,
FieldMemOperand(closure, JSFunction::kSharedFunctionInfoOffset));
// If SFI points to anything other than CompileLazy, install that.
__ Ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset));
__ Move(x5, masm->CodeObject());
__ Cmp(entry, x5);
__ B(eq, &gotta_call_runtime);
// Install the SFI's code entry.
__ Str(entry, FieldMemOperand(closure, JSFunction::kCodeOffset));
__ Mov(x10, entry); // Write barrier clobbers x10 below.
__ RecordWriteField(closure, JSFunction::kCodeOffset, x10, x5,
kLRHasNotBeenSaved, kDontSaveFPRegs, OMIT_REMEMBERED_SET,
OMIT_SMI_CHECK);
__ Add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(entry);
__ Bind(&gotta_call_runtime);
GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
}
void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argument count (preserved for callee)
// -- x1 : new target (preserved for callee)
// -- x3 : target function (preserved for callee)
// -----------------------------------
Label failed;
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve argument count for later compare.
__ Move(x4, x0);
// Push a copy of the target function and the new target.
__ SmiTag(x0);
// Push another copy as a parameter to the runtime call.
__ Push(x0, x1, x3, x1);
// Copy arguments from caller (stdlib, foreign, heap).
Label args_done;
for (int j = 0; j < 4; ++j) {
Label over;
if (j < 3) {
__ cmp(x4, Operand(j));
__ B(ne, &over);
}
for (int i = j - 1; i >= 0; --i) {
__ ldr(x4, MemOperand(fp, StandardFrameConstants::kCallerSPOffset +
i * kPointerSize));
__ push(x4);
}
for (int i = 0; i < 3 - j; ++i) {
__ PushRoot(Heap::kUndefinedValueRootIndex);
}
if (j < 3) {
__ jmp(&args_done);
__ bind(&over);
}
}
__ bind(&args_done);
// Call runtime, on success unwind frame, and parent frame.
__ CallRuntime(Runtime::kInstantiateAsmJs, 4);
// A smi 0 is returned on failure, an object on success.
__ JumpIfSmi(x0, &failed);
__ Drop(2);
__ pop(x4);
__ SmiUntag(x4);
scope.GenerateLeaveFrame();
__ add(x4, x4, Operand(1));
__ Drop(x4);
__ Ret();
__ bind(&failed);
// Restore target function and new target.
__ Pop(x3, x1, x0);
__ SmiUntag(x0);
}
// On failure, tail call back to regular js by re-calling the function
// which has be reset to the compile lazy builtin.
__ Ldr(x4, FieldMemOperand(x1, JSFunction::kCodeOffset));
__ Add(x4, x4, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(x4);
}
void Builtins::Generate_NotifyBuiltinContinuation(MacroAssembler* masm) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Preserve possible return result from lazy deopt.
__ Push(x0);
// Pass the function and deoptimization type to the runtime system.
__ CallRuntime(Runtime::kNotifyStubFailure, false);
__ Pop(x0);
}
// Ignore state (pushed by Deoptimizer::EntryGenerator::Generate).
__ Drop(1);
// Jump to the ContinueToBuiltin stub. Deoptimizer::EntryGenerator::Generate
// loads this into lr before it jumps here.
__ Br(lr);
}
namespace {
void Generate_ContinueToBuiltinHelper(MacroAssembler* masm,
bool java_script_builtin,
bool with_result) {
const RegisterConfiguration* config(RegisterConfiguration::Default());
int allocatable_register_count = config->num_allocatable_general_registers();
if (with_result) {
// Overwrite the hole inserted by the deoptimizer with the return value from
// the LAZY deopt point.
__ Str(x0, MemOperand(
jssp,
config->num_allocatable_general_registers() * kPointerSize +
BuiltinContinuationFrameConstants::kFixedFrameSize));
}
for (int i = allocatable_register_count - 1; i >= 0; --i) {
int code = config->GetAllocatableGeneralCode(i);
__ Pop(Register::from_code(code));
if (java_script_builtin && code == kJavaScriptCallArgCountRegister.code()) {
__ SmiUntag(Register::from_code(code));
}
}
__ ldr(fp,
MemOperand(jssp,
BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
__ Pop(ip0);
__ Add(jssp, jssp,
Operand(BuiltinContinuationFrameConstants::kFixedFrameSizeFromFp));
__ Pop(lr);
__ Add(ip0, ip0, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Br(ip0);
}
} // namespace
void Builtins::Generate_ContinueToCodeStubBuiltin(MacroAssembler* masm) {
Generate_ContinueToBuiltinHelper(masm, false, false);
}
void Builtins::Generate_ContinueToCodeStubBuiltinWithResult(
MacroAssembler* masm) {
Generate_ContinueToBuiltinHelper(masm, false, true);
}
void Builtins::Generate_ContinueToJavaScriptBuiltin(MacroAssembler* masm) {
Generate_ContinueToBuiltinHelper(masm, true, false);
}
void Builtins::Generate_ContinueToJavaScriptBuiltinWithResult(
MacroAssembler* masm) {
Generate_ContinueToBuiltinHelper(masm, true, true);
}
static void Generate_NotifyDeoptimizedHelper(MacroAssembler* masm,
Deoptimizer::BailoutType type) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Pass the deoptimization type to the runtime system.
__ Mov(x0, Smi::FromInt(static_cast<int>(type)));
__ Push(x0);
__ CallRuntime(Runtime::kNotifyDeoptimized);
}
// Get the full codegen state from the stack and untag it.
Register state = x6;
__ Peek(state, 0);
__ SmiUntag(state);
// Switch on the state.
Label with_tos_register, unknown_state;
__ CompareAndBranch(state,
static_cast<int>(Deoptimizer::BailoutState::NO_REGISTERS),
ne, &with_tos_register);
__ Drop(1); // Remove state.
__ Ret();
__ Bind(&with_tos_register);
// Reload TOS register.
DCHECK_EQ(kInterpreterAccumulatorRegister.code(), x0.code());
__ Peek(x0, kPointerSize);
__ CompareAndBranch(state,
static_cast<int>(Deoptimizer::BailoutState::TOS_REGISTER),
ne, &unknown_state);
__ Drop(2); // Remove state and TOS.
__ Ret();
__ Bind(&unknown_state);
__ Abort(kInvalidFullCodegenState);
}
void Builtins::Generate_NotifyDeoptimized(MacroAssembler* masm) {
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::EAGER);
}
void Builtins::Generate_NotifyLazyDeoptimized(MacroAssembler* masm) {
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::LAZY);
}
void Builtins::Generate_NotifySoftDeoptimized(MacroAssembler* masm) {
Generate_NotifyDeoptimizedHelper(masm, Deoptimizer::SOFT);
}
static void Generate_OnStackReplacementHelper(MacroAssembler* masm,
bool has_handler_frame) {
// Lookup the function in the JavaScript frame.
if (has_handler_frame) {
__ Ldr(x0, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ Ldr(x0, MemOperand(x0, JavaScriptFrameConstants::kFunctionOffset));
} else {
__ Ldr(x0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
}
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Pass function as argument.
__ Push(x0);
__ CallRuntime(Runtime::kCompileForOnStackReplacement);
}
// If the code object is null, just return to the caller.
Label skip;
__ CompareAndBranch(x0, Smi::kZero, ne, &skip);
__ Ret();
__ Bind(&skip);
// Drop any potential handler frame that is be sitting on top of the actual
// JavaScript frame. This is the case then OSR is triggered from bytecode.
if (has_handler_frame) {
__ LeaveFrame(StackFrame::STUB);
}
// Load deoptimization data from the code object.
// <deopt_data> = <code>[#deoptimization_data_offset]
__ Ldr(x1, MemOperand(x0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
// Load the OSR entrypoint offset from the deoptimization data.
// <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
__ Ldrsw(w1, UntagSmiFieldMemOperand(
x1, FixedArray::OffsetOfElementAt(
DeoptimizationInputData::kOsrPcOffsetIndex)));
// Compute the target address = code_obj + header_size + osr_offset
// <entry_addr> = <code_obj> + #header_size + <osr_offset>
__ Add(x0, x0, x1);
__ Add(lr, x0, Code::kHeaderSize - kHeapObjectTag);
// And "return" to the OSR entry point of the function.
__ Ret();
}
void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) {
Generate_OnStackReplacementHelper(masm, false);
}
void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
Generate_OnStackReplacementHelper(masm, true);
}
// static
void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argc
// -- jssp[0] : argArray (if argc == 2)
// -- jssp[8] : thisArg (if argc >= 1)
// -- jssp[16] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_FunctionPrototypeApply");
Register argc = x0;
Register arg_array = x2;
Register receiver = x1;
Register this_arg = x0;
Register undefined_value = x3;
Register null_value = x4;
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
__ LoadRoot(null_value, Heap::kNullValueRootIndex);
// 1. Load receiver into x1, argArray into x2 (if present), remove all
// arguments from the stack (including the receiver), and push thisArg (if
// present) instead.
{
// Claim (2 - argc) dummy arguments from the stack, to put the stack in a
// consistent state for a simple pop operation.
__ Claim(2);
__ Drop(argc);
// ----------- S t a t e -------------
// -- x0 : argc
// -- jssp[0] : argArray (dummy value if argc <= 1)
// -- jssp[8] : thisArg (dummy value if argc == 0)
// -- jssp[16] : receiver
// -----------------------------------
__ Cmp(argc, 1);
__ Pop(arg_array, this_arg); // Overwrites argc.
__ CmovX(this_arg, undefined_value, lo); // undefined if argc == 0.
__ CmovX(arg_array, undefined_value, ls); // undefined if argc <= 1.
__ Peek(receiver, 0);
__ Poke(this_arg, 0);
}
// ----------- S t a t e -------------
// -- x2 : argArray
// -- x1 : receiver
// -- jssp[0] : thisArg
// -----------------------------------
// 2. We don't need to check explicitly for callable receiver here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3. Tail call with no arguments if argArray is null or undefined.
Label no_arguments;
__ Cmp(arg_array, null_value);
__ Ccmp(arg_array, undefined_value, ZFlag, ne);
__ B(eq, &no_arguments);
// 4a. Apply the receiver to the given argArray.
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
RelocInfo::CODE_TARGET);
// 4b. The argArray is either null or undefined, so we tail call without any
// arguments to the receiver.
__ Bind(&no_arguments);
{
__ Mov(x0, 0);
DCHECK(receiver.Is(x1));
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
}
// static
void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) {
Register argc = x0;
Register function = x1;
Register scratch1 = x10;
Register scratch2 = x11;
ASM_LOCATION("Builtins::Generate_FunctionPrototypeCall");
// 1. Make sure we have at least one argument.
{
Label done;
__ Cbnz(argc, &done);
__ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
__ Push(scratch1);
__ Mov(argc, 1);
__ Bind(&done);
}
// 2. Get the callable to call (passed as receiver) from the stack.
__ Peek(function, Operand(argc, LSL, kXRegSizeLog2));
// 3. Shift arguments and return address one slot down on the stack
// (overwriting the original receiver). Adjust argument count to make
// the original first argument the new receiver.
{
Label loop;
// Calculate the copy start address (destination). Copy end address is jssp.
__ Add(scratch2, jssp, Operand(argc, LSL, kPointerSizeLog2));
__ Sub(scratch1, scratch2, kPointerSize);
__ Bind(&loop);
__ Ldr(x12, MemOperand(scratch1, -kPointerSize, PostIndex));
__ Str(x12, MemOperand(scratch2, -kPointerSize, PostIndex));
__ Cmp(scratch1, jssp);
__ B(ge, &loop);
// Adjust the actual number of arguments and remove the top element
// (which is a copy of the last argument).
__ Sub(argc, argc, 1);
__ Drop(1);
}
// 4. Call the callable.
__ Jump(masm->isolate()->builtins()->Call(), RelocInfo::CODE_TARGET);
}
void Builtins::Generate_ReflectApply(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argc
// -- jssp[0] : argumentsList (if argc == 3)
// -- jssp[8] : thisArgument (if argc >= 2)
// -- jssp[16] : target (if argc >= 1)
// -- jssp[24] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_ReflectApply");
Register argc = x0;
Register arguments_list = x2;
Register target = x1;
Register this_argument = x4;
Register undefined_value = x3;
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
// 1. Load target into x1 (if present), argumentsList into x2 (if present),
// remove all arguments from the stack (including the receiver), and push
// thisArgument (if present) instead.
{
// Claim (3 - argc) dummy arguments from the stack, to put the stack in a
// consistent state for a simple pop operation.
__ Claim(3);
__ Drop(argc);
// ----------- S t a t e -------------
// -- x0 : argc
// -- jssp[0] : argumentsList (dummy value if argc <= 2)
// -- jssp[8] : thisArgument (dummy value if argc <= 1)
// -- jssp[16] : target (dummy value if argc == 0)
// -- jssp[24] : receiver
// -----------------------------------
__ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
__ Pop(arguments_list, this_argument, target); // Overwrites argc.
__ CmovX(target, undefined_value, eq); // undefined if argc == 0.
__ Cmp(x10, 2);
__ CmovX(this_argument, undefined_value, lo); // undefined if argc <= 1.
__ CmovX(arguments_list, undefined_value, ls); // undefined if argc <= 2.
__ Poke(this_argument, 0); // Overwrite receiver.
}
// ----------- S t a t e -------------
// -- x2 : argumentsList
// -- x1 : target
// -- jssp[0] : thisArgument
// -----------------------------------
// 2. We don't need to check explicitly for callable target here,
// since that's the first thing the Call/CallWithArrayLike builtins
// will do.
// 3. Apply the target to the given argumentsList.
__ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike),
RelocInfo::CODE_TARGET);
}
void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : argc
// -- jssp[0] : new.target (optional)
// -- jssp[8] : argumentsList
// -- jssp[16] : target
// -- jssp[24] : receiver
// -----------------------------------
ASM_LOCATION("Builtins::Generate_ReflectConstruct");
Register argc = x0;
Register arguments_list = x2;
Register target = x1;
Register new_target = x3;
Register undefined_value = x4;
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
// 1. Load target into x1 (if present), argumentsList into x2 (if present),
// new.target into x3 (if present, otherwise use target), remove all
// arguments from the stack (including the receiver), and push thisArgument
// (if present) instead.
{
// Claim (3 - argc) dummy arguments from the stack, to put the stack in a
// consistent state for a simple pop operation.
__ Claim(3);
__ Drop(argc);
// ----------- S t a t e -------------
// -- x0 : argc
// -- jssp[0] : new.target (dummy value if argc <= 2)
// -- jssp[8] : argumentsList (dummy value if argc <= 1)
// -- jssp[16] : target (dummy value if argc == 0)
// -- jssp[24] : receiver
// -----------------------------------
__ Adds(x10, argc, 0); // Preserve argc, and set the Z flag if it is zero.
__ Pop(new_target, arguments_list, target); // Overwrites argc.
__ CmovX(target, undefined_value, eq); // undefined if argc == 0.
__ Cmp(x10, 2);
__ CmovX(arguments_list, undefined_value, lo); // undefined if argc <= 1.
__ CmovX(new_target, target, ls); // target if argc <= 2.
__ Poke(undefined_value, 0); // Overwrite receiver.
}
// ----------- S t a t e -------------
// -- x2 : argumentsList
// -- x1 : target
// -- x3 : new.target
// -- jssp[0] : receiver (undefined)
// -----------------------------------
// 2. We don't need to check explicitly for constructor target here,
// since that's the first thing the Construct/ConstructWithArrayLike
// builtins will do.
// 3. We don't need to check explicitly for constructor new.target here,
// since that's the second thing the Construct/ConstructWithArrayLike
// builtins will do.
// 4. Construct the target with the given new.target and argumentsList.
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithArrayLike),
RelocInfo::CODE_TARGET);
}
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ Push(lr, fp);
__ Mov(x11, StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR));
__ Push(x11, x1); // x1: function
// We do not yet push the number of arguments, to maintain a 16-byte aligned
// stack pointer. This is done in step (3) in
// Generate_ArgumentsAdaptorTrampoline.
__ Add(fp, jssp, StandardFrameConstants::kFixedFrameSizeFromFp);
}
static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : result being passed through
// -----------------------------------
// Get the number of arguments passed (as a smi), tear down the frame and
// then drop the parameters and the receiver.
__ Ldr(x10, MemOperand(fp, -(StandardFrameConstants::kFixedFrameSizeFromFp +
kPointerSize)));
__ Mov(jssp, fp);
__ Pop(fp, lr);
// Drop actual parameters and receiver.
// TODO(all): This will need to be rounded up to a multiple of two when using
// the CSP, as we will have claimed an even number of slots in total for the
// parameters.
__ DropBySMI(x10, kXRegSize);
__ Drop(1);
}
// static
void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
Handle<Code> code) {
// ----------- S t a t e -------------
// -- x1 : target
// -- x0 : number of parameters on the stack (not including the receiver)
// -- x2 : arguments list (a FixedArray)
// -- x4 : len (number of elements to push from args)
// -- x3 : new.target (for [[Construct]])
// -----------------------------------
__ AssertFixedArray(x2);
Register arguments_list = x2;
Register argc = x0;
Register len = x4;
// Check for stack overflow.
{
// Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack limit".
Label done;
__ LoadRoot(x10, Heap::kRealStackLimitRootIndex);
// Make x10 the space we have left. The stack might already be overflowed
// here which will cause x10 to become negative.
__ Sub(x10, masm->StackPointer(), x10);
// Check if the arguments will overflow the stack.
__ Cmp(x10, Operand(len, LSL, kPointerSizeLog2));
__ B(gt, &done); // Signed comparison.
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ Bind(&done);
}
// Push arguments onto the stack (thisArgument is already on the stack).
{
Label done, push, loop;
Register src = x5;
__ Add(src, arguments_list, FixedArray::kHeaderSize - kHeapObjectTag);
__ Add(argc, argc, len); // The 'len' argument for Call() or Construct().
__ Cbz(len, &done);
Register the_hole_value = x11;
Register undefined_value = x12;
// We do not use the CompareRoot macro as it would do a LoadRoot behind the
// scenes and we want to avoid that in a loop.
__ LoadRoot(the_hole_value, Heap::kTheHoleValueRootIndex);
__ LoadRoot(undefined_value, Heap::kUndefinedValueRootIndex);
__ Claim(len);
__ Bind(&loop);
__ Sub(len, len, 1);
__ Ldr(x10, MemOperand(src, kPointerSize, PostIndex));
__ Cmp(x10, the_hole_value);
__ Csel(x10, x10, undefined_value, ne);
__ Poke(x10, Operand(len, LSL, kPointerSizeLog2));
__ Cbnz(len, &loop);
__ Bind(&done);
}
// Tail-call to the actual Call or Construct builtin.
__ Jump(code, RelocInfo::CODE_TARGET);
}
// static
void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
CallOrConstructMode mode,
Handle<Code> code) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x3 : the new.target (for [[Construct]] calls)
// -- x1 : the target to call (can be any Object)
// -- x2 : start index (to support rest parameters)
// -----------------------------------
// Check if new.target has a [[Construct]] internal method.
if (mode == CallOrConstructMode::kConstruct) {
Label new_target_constructor, new_target_not_constructor;
__ JumpIfSmi(x3, &new_target_not_constructor);
__ Ldr(x5, FieldMemOperand(x3, HeapObject::kMapOffset));
__ Ldrb(x5, FieldMemOperand(x5, Map::kBitFieldOffset));
__ TestAndBranchIfAnySet(x5, 1 << Map::kIsConstructor,
&new_target_constructor);
__ Bind(&new_target_not_constructor);
{
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
__ Push(x3);
__ CallRuntime(Runtime::kThrowNotConstructor);
}
__ Bind(&new_target_constructor);
}
// Check if we have an arguments adaptor frame below the function frame.
Label arguments_adaptor, arguments_done;
__ Ldr(x5, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
__ Ldr(x4, MemOperand(x5, CommonFrameConstants::kContextOrFrameTypeOffset));
__ Cmp(x4, StackFrame::TypeToMarker(StackFrame::ARGUMENTS_ADAPTOR));
__ B(eq, &arguments_adaptor);
{
__ Ldr(x6, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ Ldr(x6, FieldMemOperand(x6, JSFunction::kSharedFunctionInfoOffset));
__ Ldrsw(x6, FieldMemOperand(
x6, SharedFunctionInfo::kFormalParameterCountOffset));
__ Mov(x5, fp);
}
__ B(&arguments_done);
__ Bind(&arguments_adaptor);
{
// Just load the length from ArgumentsAdaptorFrame.
__ Ldrsw(x6, UntagSmiMemOperand(
x5, ArgumentsAdaptorFrameConstants::kLengthOffset));
}
__ Bind(&arguments_done);
Label stack_done, stack_overflow;
__ Subs(x6, x6, x2);
__ B(le, &stack_done);
{
// Check for stack overflow.
Generate_StackOverflowCheck(masm, x6, x2, &stack_overflow);
// Forward the arguments from the caller frame.
{
Label loop;
__ Add(x5, x5, kPointerSize);
__ Add(x0, x0, x6);
__ bind(&loop);
{
__ Ldr(x4, MemOperand(x5, x6, LSL, kPointerSizeLog2));
__ Push(x4);
__ Subs(x6, x6, 1);
__ B(ne, &loop);
}
}
}
__ B(&stack_done);
__ Bind(&stack_overflow);
__ TailCallRuntime(Runtime::kThrowStackOverflow);
__ Bind(&stack_done);
__ Jump(code, RelocInfo::CODE_TARGET);
}
// static
void Builtins::Generate_CallFunction(MacroAssembler* masm,
ConvertReceiverMode mode) {
ASM_LOCATION("Builtins::Generate_CallFunction");
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the function to call (checked to be a JSFunction)
// -----------------------------------
__ AssertFunction(x1);
// See ES6 section 9.2.1 [[Call]] ( thisArgument, argumentsList)
// Check that function is not a "classConstructor".
Label class_constructor;
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(w3, FieldMemOperand(x2, SharedFunctionInfo::kCompilerHintsOffset));
__ TestAndBranchIfAnySet(w3, SharedFunctionInfo::kClassConstructorMask,
&class_constructor);
// Enter the context of the function; ToObject has to run in the function
// context, and we also need to take the global proxy from the function
// context in case of conversion.
__ Ldr(cp, FieldMemOperand(x1, JSFunction::kContextOffset));
// We need to convert the receiver for non-native sloppy mode functions.
Label done_convert;
__ TestAndBranchIfAnySet(w3,
SharedFunctionInfo::IsNativeBit::kMask |
SharedFunctionInfo::IsStrictBit::kMask,
&done_convert);
{
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the function to call (checked to be a JSFunction)
// -- x2 : the shared function info.
// -- cp : the function context.
// -----------------------------------
if (mode == ConvertReceiverMode::kNullOrUndefined) {
// Patch receiver to global proxy.
__ LoadGlobalProxy(x3);
} else {
Label convert_to_object, convert_receiver;
__ Peek(x3, Operand(x0, LSL, kXRegSizeLog2));
__ JumpIfSmi(x3, &convert_to_object);
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
__ CompareObjectType(x3, x4, x4, FIRST_JS_RECEIVER_TYPE);
__ B(hs, &done_convert);
if (mode != ConvertReceiverMode::kNotNullOrUndefined) {
Label convert_global_proxy;
__ JumpIfRoot(x3, Heap::kUndefinedValueRootIndex,
&convert_global_proxy);
__ JumpIfNotRoot(x3, Heap::kNullValueRootIndex, &convert_to_object);
__ Bind(&convert_global_proxy);
{
// Patch receiver to global proxy.
__ LoadGlobalProxy(x3);
}
__ B(&convert_receiver);
}
__ Bind(&convert_to_object);
{
// Convert receiver using ToObject.
// TODO(bmeurer): Inline the allocation here to avoid building the frame
// in the fast case? (fall back to AllocateInNewSpace?)
FrameScope scope(masm, StackFrame::INTERNAL);
__ SmiTag(x0);
__ Push(x0, x1);
__ Mov(x0, x3);
__ Push(cp);
__ Call(BUILTIN_CODE(masm->isolate(), ToObject),
RelocInfo::CODE_TARGET);
__ Pop(cp);
__ Mov(x3, x0);
__ Pop(x1, x0);
__ SmiUntag(x0);
}
__ Ldr(x2, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Bind(&convert_receiver);
}
__ Poke(x3, Operand(x0, LSL, kXRegSizeLog2));
}
__ Bind(&done_convert);
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the function to call (checked to be a JSFunction)
// -- x2 : the shared function info.
// -- cp : the function context.
// -----------------------------------
__ Ldrsw(
x2, FieldMemOperand(x2, SharedFunctionInfo::kFormalParameterCountOffset));
ParameterCount actual(x0);
ParameterCount expected(x2);
__ InvokeFunctionCode(x1, no_reg, expected, actual, JUMP_FUNCTION);
// The function is a "classConstructor", need to raise an exception.
__ bind(&class_constructor);
{
FrameScope frame(masm, StackFrame::INTERNAL);
__ Push(x1);
__ CallRuntime(Runtime::kThrowConstructorNonCallableError);
}
}
namespace {
void Generate_PushBoundArguments(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : target (checked to be a JSBoundFunction)
// -- x3 : new.target (only in case of [[Construct]])
// -----------------------------------
// Load [[BoundArguments]] into x2 and length of that into x4.
Label no_bound_arguments;
__ Ldr(x2, FieldMemOperand(x1, JSBoundFunction::kBoundArgumentsOffset));
__ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
__ Cmp(x4, 0);
__ B(eq, &no_bound_arguments);
{
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : target (checked to be a JSBoundFunction)
// -- x2 : the [[BoundArguments]] (implemented as FixedArray)
// -- x3 : new.target (only in case of [[Construct]])
// -- x4 : the number of [[BoundArguments]]
// -----------------------------------
// Reserve stack space for the [[BoundArguments]].
{
Label done;
__ Claim(x4);
// Check the stack for overflow. We are not trying to catch interruptions
// (i.e. debug break and preemption) here, so check the "real stack
// limit".
__ CompareRoot(jssp, Heap::kRealStackLimitRootIndex);
__ B(gt, &done); // Signed comparison.
// Restore the stack pointer.
__ Drop(x4);
{
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
__ CallRuntime(Runtime::kThrowStackOverflow);
}
__ Bind(&done);
}
// Relocate arguments down the stack.
{
Label loop, done_loop;
__ Mov(x5, 0);
__ Bind(&loop);
__ Cmp(x5, x0);
__ B(gt, &done_loop);
__ Peek(x10, Operand(x4, LSL, kPointerSizeLog2));
__ Poke(x10, Operand(x5, LSL, kPointerSizeLog2));
__ Add(x4, x4, 1);
__ Add(x5, x5, 1);
__ B(&loop);
__ Bind(&done_loop);
}
// Copy [[BoundArguments]] to the stack (below the arguments).
{
Label loop;
__ Ldrsw(x4, UntagSmiFieldMemOperand(x2, FixedArray::kLengthOffset));
__ Add(x2, x2, FixedArray::kHeaderSize - kHeapObjectTag);
__ Bind(&loop);
__ Sub(x4, x4, 1);
__ Ldr(x10, MemOperand(x2, x4, LSL, kPointerSizeLog2));
__ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
__ Add(x0, x0, 1);
__ Cmp(x4, 0);
__ B(gt, &loop);
}
}
__ Bind(&no_bound_arguments);
}
} // namespace
// static
void Builtins::Generate_CallBoundFunctionImpl(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the function to call (checked to be a JSBoundFunction)
// -----------------------------------
__ AssertBoundFunction(x1);
// Patch the receiver to [[BoundThis]].
__ Ldr(x10, FieldMemOperand(x1, JSBoundFunction::kBoundThisOffset));
__ Poke(x10, Operand(x0, LSL, kPointerSizeLog2));
// Push the [[BoundArguments]] onto the stack.
Generate_PushBoundArguments(masm);
// Call the [[BoundTargetFunction]] via the Call builtin.
__ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
__ Jump(BUILTIN_CODE(masm->isolate(), Call_ReceiverIsAny),
RelocInfo::CODE_TARGET);
}
// static
void Builtins::Generate_Call(MacroAssembler* masm, ConvertReceiverMode mode) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the target to call (can be any Object).
// -----------------------------------
Label non_callable, non_function, non_smi;
__ JumpIfSmi(x1, &non_callable);
__ Bind(&non_smi);
__ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
__ Jump(masm->isolate()->builtins()->CallFunction(mode),
RelocInfo::CODE_TARGET, eq);
__ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
__ Jump(BUILTIN_CODE(masm->isolate(), CallBoundFunction),
RelocInfo::CODE_TARGET, eq);
// Check if target has a [[Call]] internal method.
__ Ldrb(x4, FieldMemOperand(x4, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(x4, 1 << Map::kIsCallable, &non_callable);
// Check if target is a proxy and call CallProxy external builtin
__ Cmp(x5, JS_PROXY_TYPE);
__ B(ne, &non_function);
__ Jump(BUILTIN_CODE(masm->isolate(), CallProxy), RelocInfo::CODE_TARGET);
// 2. Call to something else, which might have a [[Call]] internal method (if
// not we raise an exception).
__ Bind(&non_function);
// Overwrite the original receiver with the (original) target.
__ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
// Let the "call_as_function_delegate" take care of the rest.
__ LoadNativeContextSlot(Context::CALL_AS_FUNCTION_DELEGATE_INDEX, x1);
__ Jump(masm->isolate()->builtins()->CallFunction(
ConvertReceiverMode::kNotNullOrUndefined),
RelocInfo::CODE_TARGET);
// 3. Call to something that is not callable.
__ bind(&non_callable);
{
FrameScope scope(masm, StackFrame::INTERNAL);
__ Push(x1);
__ CallRuntime(Runtime::kThrowCalledNonCallable);
}
}
// static
void Builtins::Generate_ConstructFunction(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the constructor to call (checked to be a JSFunction)
// -- x3 : the new target (checked to be a constructor)
// -----------------------------------
__ AssertFunction(x1);
// Calling convention for function specific ConstructStubs require
// x2 to contain either an AllocationSite or undefined.
__ LoadRoot(x2, Heap::kUndefinedValueRootIndex);
// Tail call to the function-specific construct stub (still in the caller
// context at this point).
__ Ldr(x4, FieldMemOperand(x1, JSFunction::kSharedFunctionInfoOffset));
__ Ldr(x4, FieldMemOperand(x4, SharedFunctionInfo::kConstructStubOffset));
__ Add(x4, x4, Code::kHeaderSize - kHeapObjectTag);
__ Br(x4);
}
// static
void Builtins::Generate_ConstructBoundFunction(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the function to call (checked to be a JSBoundFunction)
// -- x3 : the new target (checked to be a constructor)
// -----------------------------------
__ AssertBoundFunction(x1);
// Push the [[BoundArguments]] onto the stack.
Generate_PushBoundArguments(masm);
// Patch new.target to [[BoundTargetFunction]] if new.target equals target.
{
Label done;
__ Cmp(x1, x3);
__ B(ne, &done);
__ Ldr(x3,
FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
__ Bind(&done);
}
// Construct the [[BoundTargetFunction]] via the Construct builtin.
__ Ldr(x1, FieldMemOperand(x1, JSBoundFunction::kBoundTargetFunctionOffset));
__ Jump(BUILTIN_CODE(masm->isolate(), Construct), RelocInfo::CODE_TARGET);
}
// static
void Builtins::Generate_Construct(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- x0 : the number of arguments (not including the receiver)
// -- x1 : the constructor to call (can be any Object)
// -- x3 : the new target (either the same as the constructor or
// the JSFunction on which new was invoked initially)
// -----------------------------------
// Check if target is a Smi.
Label non_constructor, non_proxy;
__ JumpIfSmi(x1, &non_constructor);
// Dispatch based on instance type.
__ CompareObjectType(x1, x4, x5, JS_FUNCTION_TYPE);
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructFunction),
RelocInfo::CODE_TARGET, eq);
// Check if target has a [[Construct]] internal method.
__ Ldrb(x2, FieldMemOperand(x4, Map::kBitFieldOffset));
__ TestAndBranchIfAllClear(x2, 1 << Map::kIsConstructor, &non_constructor);
// Only dispatch to bound functions after checking whether they are
// constructors.
__ Cmp(x5, JS_BOUND_FUNCTION_TYPE);
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructBoundFunction),
RelocInfo::CODE_TARGET, eq);
// Only dispatch to proxies after checking whether they are constructors.
__ Cmp(x5, JS_PROXY_TYPE);
__ B(ne, &non_proxy);
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructProxy),
RelocInfo::CODE_TARGET);
// Called Construct on an exotic Object with a [[Construct]] internal method.
__ bind(&non_proxy);
{
// Overwrite the original receiver with the (original) target.
__ Poke(x1, Operand(x0, LSL, kXRegSizeLog2));
// Let the "call_as_constructor_delegate" take care of the rest.
__ LoadNativeContextSlot(Context::CALL_AS_CONSTRUCTOR_DELEGATE_INDEX, x1);
__ Jump(masm->isolate()->builtins()->CallFunction(),
RelocInfo::CODE_TARGET);
}
// Called Construct on an Object that doesn't have a [[Construct]] internal
// method.
__ bind(&non_constructor);
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructedNonConstructable),
RelocInfo::CODE_TARGET);
}
// static
void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_AllocateInNewSpace");
// ----------- S t a t e -------------
// -- x1 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(x1);
__ Push(x1);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInNewSpace);
}
// static
void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_AllocateInOldSpace");
// ----------- S t a t e -------------
// -- x1 : requested object size (untagged)
// -- lr : return address
// -----------------------------------
__ SmiTag(x1);
__ Move(x2, Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
__ Push(x1, x2);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAllocateInTargetSpace);
}
// static
void Builtins::Generate_Abort(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_Abort");
// ----------- S t a t e -------------
// -- x1 : message_id as Smi
// -- lr : return address
// -----------------------------------
MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
__ Push(x1);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAbort);
}
// static
void Builtins::Generate_AbortJS(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_AbortJS");
// ----------- S t a t e -------------
// -- x1 : message as String object
// -- lr : return address
// -----------------------------------
MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
__ Push(x1);
__ Move(cp, Smi::kZero);
__ TailCallRuntime(Runtime::kAbortJS);
}
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
ASM_LOCATION("Builtins::Generate_ArgumentsAdaptorTrampoline");
// ----------- S t a t e -------------
// -- x0 : actual number of arguments
// -- x1 : function (passed through to callee)
// -- x2 : expected number of arguments
// -- x3 : new target (passed through to callee)
// -----------------------------------
// The frame we are about to construct will look like:
//
// slot Adaptor frame
// +-----------------+--------------------------------
// -n-1 | receiver | ^
// | (parameter 0) | |
// |- - - - - - - - -| |
// -n | | Caller
// ... | ... | frame slots --> actual args
// -2 | parameter n-1 | |
// |- - - - - - - - -| |
// -1 | parameter n | v
// -----+-----------------+--------------------------------
// 0 | return addr | ^
// |- - - - - - - - -| |
// 1 | saved frame ptr | <-- frame ptr |
// |- - - - - - - - -| |
// 2 |Frame Type Marker| |
// |- - - - - - - - -| |
// 3 | function | Callee
// |- - - - - - - - -| frame slots
// 4 | num of | |
// | actual args | |
// |- - - - - - - - -| |
// [5] | [padding] | |
// |-----------------+---- |
// 5+pad | receiver | ^ |
// | (parameter 0) | | |
// |- - - - - - - - -| | |
// 6+pad | parameter 1 | | |
// |- - - - - - - - -| Frame slots ----> expected args
// 7+pad | parameter 2 | | |
// |- - - - - - - - -| | |
// | | | |
// ... | ... | | |
// | parameter m | | |
// |- - - - - - - - -| | |
// | [undefined] | | |
// |- - - - - - - - -| | |
// | | | |
// | ... | | |
// | [undefined] | v <-- stack ptr v
// -----+-----------------+---------------------------------
//
// There is an optional slot of padding to ensure stack alignment.
// If the number of expected arguments is larger than the number of actual
// arguments, the remaining expected slots will be filled with undefined.
Register argc_actual = x0; // Excluding the receiver.
Register argc_expected = x2; // Excluding the receiver.
Register function = x1;
Register code_entry = x10;
Label dont_adapt_arguments, stack_overflow;
Label enough_arguments;
__ Cmp(argc_expected, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
__ B(eq, &dont_adapt_arguments);
EnterArgumentsAdaptorFrame(masm);
Register copy_from = x10;
Register copy_end = x11;
Register copy_to = x12;
Register argc_to_copy = x13;
Register argc_unused_actual = x14;
Register scratch1 = x15, scratch2 = x16;
// We need slots for the expected arguments, with two extra slots for the
// number of actual arguments and the receiver.
__ RecordComment("-- Stack check --");
__ Add(scratch1, argc_expected, 2);
Generate_StackOverflowCheck(masm, scratch1, scratch2, &stack_overflow);
// Round up number of slots to be even, to maintain stack alignment.
__ RecordComment("-- Allocate callee frame slots --");
__ Add(scratch1, scratch1, 1);
__ Bic(scratch1, scratch1, 1);
__ Claim(scratch1, kPointerSize);
__ Mov(copy_to, jssp);
// Preparing the expected arguments is done in four steps, the order of
// which is chosen so we can use LDP/STP and avoid conditional branches as
// much as possible.
// (1) If we don't have enough arguments, fill the remaining expected
// arguments with undefined, otherwise skip this step.
__ Subs(scratch1, argc_actual, argc_expected);
__ Csel(argc_unused_actual, xzr, scratch1, lt);
__ Csel(argc_to_copy, argc_expected, argc_actual, ge);
__ B(ge, &enough_arguments);
// Fill the remaining expected arguments with undefined.
__ RecordComment("-- Fill slots with undefined --");
__ Sub(copy_end, copy_to, Operand(scratch1, LSL, kPointerSizeLog2));
__ LoadRoot(scratch1, Heap::kUndefinedValueRootIndex);
Label fill;
__ Bind(&fill);
__ Stp(scratch1, scratch1, MemOperand(copy_to, 2 * kPointerSize, PostIndex));
// We might write one slot extra, but that is ok because we'll overwrite it
// below.
__ Cmp(copy_end, copy_to);
__ B(hi, &fill);
// Correct copy_to, for the case where we wrote one additional slot.
__ Mov(copy_to, copy_end);
__ Bind(&enough_arguments);
// (2) Copy all of the actual arguments, or as many as we need.
__ RecordComment("-- Copy actual arguments --");
__ Add(copy_end, copy_to, Operand(argc_to_copy, LSL, kPointerSizeLog2));
__ Add(copy_from, fp, 2 * kPointerSize);
// Adjust for difference between actual and expected arguments.
__ Add(copy_from, copy_from,
Operand(argc_unused_actual, LSL, kPointerSizeLog2));
// Copy arguments. We use load/store pair instructions, so we might overshoot
// by one slot, but since we copy the arguments starting from the last one, if
// we do overshoot, the extra slot will be overwritten later by the receiver.
Label copy_2_by_2;
__ Bind(©_2_by_2);
__ Ldp(scratch1, scratch2,
MemOperand(copy_from, 2 * kPointerSize, PostIndex));
__ Stp(scratch1, scratch2, MemOperand(copy_to, 2 * kPointerSize, PostIndex));
__ Cmp(copy_end, copy_to);
__ B(hi, ©_2_by_2);
// (3) Store number of actual arguments and padding. The padding might be
// unnecessary, in which case it will be overwritten by the receiver.
__ RecordComment("-- Store number of args and padding --");
__ SmiTag(scratch1, argc_actual);
__ Stp(xzr, scratch1, MemOperand(fp, -4 * kPointerSize));
// (4) Store receiver. Calculate target address from jssp to avoid checking
// for padding. Storing the receiver will overwrite either the extra slot
// we copied with the actual arguments, if we did copy one, or the padding we
// stored above.
__ RecordComment("-- Store receiver --");
__ Add(copy_from, fp, 2 * kPointerSize);
__ Ldr(scratch1, MemOperand(copy_from, argc_actual, LSL, kPointerSizeLog2));
__ Str(scratch1, MemOperand(jssp, argc_expected, LSL, kPointerSizeLog2));
// Arguments have been adapted. Now call the entry point.
__ RecordComment("-- Call entry point --");
__ Mov(argc_actual, argc_expected);
// x0 : expected number of arguments
// x1 : function (passed through to callee)
// x3 : new target (passed through to callee)
__ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeOffset));
__ Add(code_entry, code_entry, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Call(code_entry);
// Store offset of return address for deoptimizer.
masm->isolate()->heap()->SetArgumentsAdaptorDeoptPCOffset(masm->pc_offset());
// Exit frame and return.
LeaveArgumentsAdaptorFrame(masm);
__ Ret();
// Call the entry point without adapting the arguments.
__ RecordComment("-- Call without adapting args --");
__ Bind(&dont_adapt_arguments);
__ Ldr(code_entry, FieldMemOperand(function, JSFunction::kCodeOffset));
__ Add(code_entry, code_entry, Operand(Code::kHeaderSize - kHeapObjectTag));
__ Jump(code_entry);
__ Bind(&stack_overflow);
__ RecordComment("-- Stack overflow --");
{
FrameScope frame(masm, StackFrame::MANUAL);
__ CallRuntime(Runtime::kThrowStackOverflow);
__ Unreachable();
}
}
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
// Wasm code uses the csp. This builtin excepts to use the jssp.
// Thus, move csp to jssp when entering this builtin (called from wasm).
DCHECK(masm->StackPointer().is(jssp));
__ Move(jssp, csp);
{
FrameScope scope(masm, StackFrame::INTERNAL);
// Save all parameter registers (see wasm-linkage.cc). They might be
// overwritten in the runtime call below. We don't have any callee-saved
// registers in wasm, so no need to store anything else.
const RegList gp_regs = x0.bit() | x1.bit() | x2.bit() | x3.bit() |
x4.bit() | x5.bit() | x6.bit() | x7.bit();
const RegList fp_regs = d0.bit() | d1.bit() | d2.bit() | d3.bit() |
d4.bit() | d5.bit() | d6.bit() | d7.bit();
__ PushXRegList(gp_regs);
__ PushDRegList(fp_regs);
// Initialize cp register with kZero, CEntryStub will use it to set the
// current context on the isolate.
__ Move(cp, Smi::kZero);
__ CallRuntime(Runtime::kWasmCompileLazy);
// Store returned instruction start in x8.
__ Add(x8, x0, Code::kHeaderSize - kHeapObjectTag);
// Restore registers.
__ PopDRegList(fp_regs);
__ PopXRegList(gp_regs);
}
// Move back to csp land. jssp now has the same value as when entering this
// function, but csp might have changed in the runtime call.
__ Move(csp, jssp);
// Now jump to the instructions of the returned code object.
__ Jump(x8);
}
#undef __
} // namespace internal
} // namespace v8
#endif // V8_TARGET_ARCH_ARM
| apache-2.0 |
jaege/Cpp-Primer-5th-Exercises | ch7/7.21.cpp | 2175 | #include <string>
#include <iostream>
class Sales_data;
std::istream &read(std::istream &, Sales_data &);
class Sales_data {
friend Sales_data add(const Sales_data &, const Sales_data &);
friend std::istream &read(std::istream &, Sales_data &);
friend std::ostream &print(std::ostream &, const Sales_data &);
public:
Sales_data() : bookNo(""), units_sold(0), revenue(0.0) {}
Sales_data(const std::string &no) : bookNo(no) {}
Sales_data(const std::string &no, unsigned us, double price)
: bookNo(no), units_sold(us), revenue(price * us) {}
Sales_data::Sales_data(std::istream &is) {
read(is, *this);
}
std::string isbn() const { return bookNo; }
Sales_data &combine(const Sales_data &);
private:
std::string bookNo;
unsigned units_sold = 0;
double revenue = 0.0;
};
Sales_data &Sales_data::combine(const Sales_data &rhs) {
units_sold += rhs.units_sold;
revenue += rhs.revenue;
return *this;
}
Sales_data add(const Sales_data &lhs, const Sales_data &rhs) {
Sales_data sum = lhs; // Use default copy constructor
sum.combine(rhs);
return sum;
}
std::istream &read(std::istream &is, Sales_data &item) {
double price;
is >> item.bookNo >> item.units_sold >> price;
item.revenue = item.units_sold * price;
return is;
}
std::ostream &print(std::ostream &os, const Sales_data &item) {
os << item.isbn() << " " << item.units_sold << " " << item.revenue;
return os;
}
int main() {
Sales_data d1;
Sales_data d2("0-201-78345-X");
Sales_data d3("0-201-78345-X", 5, 2.5);
Sales_data d4(std::cin);
print(std::cout, d1) << std::endl;
print(std::cout, d2) << std::endl;
print(std::cout, d3) << std::endl;
print(std::cout, d4) << std::endl;
Sales_data total(std::cin);
if (std::cin) {
Sales_data trans(std::cin);
while (std::cin) {
if (total.isbn() == trans.isbn()) {
total.combine(trans);
} else {
print(std::cout, total) << std::endl;
total = trans; // Use default copy constructor
}
read(std::cin, trans);
}
print(std::cout, total) << std::endl;
} else {
std::cerr << "No data!" << std::endl;
return -1;
}
return 0;
}
| apache-2.0 |
alex-charos/mailer | src/main/java/gr/charos/mailer/model/CommandResult.java | 92 | package gr.charos.mailer.model;
public enum CommandResult {
success, failure, exit
}
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-comprehend/src/main/java/com/amazonaws/services/comprehend/model/transform/ListEntityRecognizersResultJsonUnmarshaller.java | 3303 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.comprehend.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.comprehend.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* ListEntityRecognizersResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListEntityRecognizersResultJsonUnmarshaller implements Unmarshaller<ListEntityRecognizersResult, JsonUnmarshallerContext> {
public ListEntityRecognizersResult unmarshall(JsonUnmarshallerContext context) throws Exception {
ListEntityRecognizersResult listEntityRecognizersResult = new ListEntityRecognizersResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return listEntityRecognizersResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("EntityRecognizerPropertiesList", targetDepth)) {
context.nextToken();
listEntityRecognizersResult.setEntityRecognizerPropertiesList(new ListUnmarshaller<EntityRecognizerProperties>(
EntityRecognizerPropertiesJsonUnmarshaller.getInstance()).unmarshall(context));
}
if (context.testExpression("NextToken", targetDepth)) {
context.nextToken();
listEntityRecognizersResult.setNextToken(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return listEntityRecognizersResult;
}
private static ListEntityRecognizersResultJsonUnmarshaller instance;
public static ListEntityRecognizersResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new ListEntityRecognizersResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
berkaybasoz/algo_trade | Engine/DataFeeds/SubscriptionDataReader.cs | 28421 | /*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using QuantConnect.Configuration;
using QuantConnect.Data;
using QuantConnect.Data.Auxiliary;
using QuantConnect.Data.Custom;
using QuantConnect.Data.Market;
using QuantConnect.Interfaces;
using QuantConnect.Lean.Engine.Results;
using QuantConnect.Logging;
using QuantConnect.Util;
namespace QuantConnect.Lean.Engine.DataFeeds
{
/// <summary>
/// Subscription data reader is a wrapper on the stream reader class to download, unpack and iterate over a data file.
/// </summary>
/// <remarks>The class accepts any subscription configuration and automatically makes it availble to enumerate</remarks>
public class SubscriptionDataReader : IEnumerator<BaseData>
{
// Source string to create memory stream:
private SubscriptionDataSource _source;
private bool _endOfStream;
private IEnumerator<BaseData> _subscriptionFactoryEnumerator;
/// Configuration of the data-reader:
private readonly SubscriptionDataConfig _config;
/// true if we can find a scale factor file for the security of the form: ..\Lean\Data\equity\market\factor_files\{SYMBOL}.csv
private readonly bool _hasScaleFactors;
// Symbol Mapping:
private string _mappedSymbol = "";
// Location of the datafeed - the type of this data.
// Create a single instance to invoke all Type Methods:
private readonly BaseData _dataFactory;
//Start finish times of the backtest:
private readonly DateTime _periodStart;
private readonly DateTime _periodFinish;
private readonly FactorFile _factorFile;
private readonly MapFile _mapFile;
// we set the price factor ratio when we encounter a dividend in the factor file
// and on the next trading day we use this data to produce the dividend instance
private decimal? _priceFactorRatio;
// we set the split factor when we encounter a split in the factor file
// and on the next trading day we use this data to produce the split instance
private decimal? _splitFactor;
// we'll use these flags to denote we've already fired off the DelistedType.Warning
// and a DelistedType.Delisted Delisting object, the _delistingType object is save here
// since we need to wait for the next trading day before emitting
private bool _delisted;
private bool _delistedWarning;
// true if we're in live mode, false otherwise
private readonly bool _isLiveMode;
private readonly bool _includeAuxilliaryData;
private BaseData _previous;
private readonly Queue<BaseData> _auxiliaryData;
private readonly IResultHandler _resultHandler;
private readonly IEnumerator<DateTime> _tradeableDates;
// used when emitting aux data from within while loop
private bool _emittedAuxilliaryData;
private BaseData _lastInstanceBeforeAuxilliaryData;
/// <summary>
/// Last read BaseData object from this type and source
/// </summary>
public BaseData Current
{
get;
private set;
}
/// <summary>
/// Explicit Interface Implementation for Current
/// </summary>
object IEnumerator.Current
{
get { return Current; }
}
/// <summary>
/// Subscription data reader takes a subscription request, loads the type, accepts the data source and enumerate on the results.
/// </summary>
/// <param name="config">Subscription configuration object</param>
/// <param name="periodStart">Start date for the data request/backtest</param>
/// <param name="periodFinish">Finish date for the data request/backtest</param>
/// <param name="resultHandler">Result handler used to push error messages and perform sampling on skipped days</param>
/// <param name="mapFileResolver">Used for resolving the correct map files</param>
/// <param name="factorFileProvider">Used for getting factor files</param>
/// <param name="tradeableDates">Defines the dates for which we'll request data, in order, in the security's exchange time zone</param>
/// <param name="isLiveMode">True if we're in live mode, false otherwise</param>
/// <param name="includeAuxilliaryData">True if we want to emit aux data, false to only emit price data</param>
public SubscriptionDataReader(SubscriptionDataConfig config,
DateTime periodStart,
DateTime periodFinish,
IResultHandler resultHandler,
MapFileResolver mapFileResolver,
IFactorFileProvider factorFileProvider,
IEnumerable<DateTime> tradeableDates,
bool isLiveMode,
bool includeAuxilliaryData = true)
{
//Save configuration of data-subscription:
_config = config;
_auxiliaryData = new Queue<BaseData>();
//Save Start and End Dates:
_periodStart = periodStart;
_periodFinish = periodFinish;
//Save access to securities
_isLiveMode = isLiveMode;
_includeAuxilliaryData = includeAuxilliaryData;
//Save the type of data we'll be getting from the source.
//Create the dynamic type-activators:
var objectActivator = ObjectActivator.GetActivator(config.Type);
_resultHandler = resultHandler;
_tradeableDates = tradeableDates.GetEnumerator();
if (objectActivator == null)
{
_resultHandler.ErrorMessage("Custom data type '" + config.Type.Name + "' missing parameterless constructor E.g. public " + config.Type.Name + "() { }");
_endOfStream = true;
return;
}
//Create an instance of the "Type":
var userObj = objectActivator.Invoke(new object[] {});
_dataFactory = userObj as BaseData;
//If its quandl set the access token in data factory:
var quandl = _dataFactory as Quandl;
if (quandl != null)
{
if (!Quandl.IsAuthCodeSet)
{
Quandl.SetAuthCode(Config.Get("quandl-auth-token"));
}
}
_factorFile = new FactorFile(config.Symbol.Value, new List<FactorFileRow>());
_mapFile = new MapFile(config.Symbol.Value, new List<MapFileRow>());
// load up the map and factor files for equities
if (!config.IsCustomData && config.SecurityType == SecurityType.Equity)
{
try
{
var mapFile = mapFileResolver.ResolveMapFile(config.Symbol.ID.Symbol, config.Symbol.ID.Date);
// only take the resolved map file if it has data, otherwise we'll use the empty one we defined above
if (mapFile.Any()) _mapFile = mapFile;
var factorFile = factorFileProvider.Get(_config.Symbol);
_hasScaleFactors = factorFile != null;
if (_hasScaleFactors)
{
_factorFile = factorFile;
}
}
catch (Exception err)
{
Log.Error(err, "Fetching Price/Map Factors: " + config.Symbol.ID + ": ");
}
}
_subscriptionFactoryEnumerator = ResolveDataEnumerator(true);
}
/// <summary>
/// Advances the enumerator to the next element of the collection.
/// </summary>
/// <returns>
/// true if the enumerator was successfully advanced to the next element; false if the enumerator has passed the end of the collection.
/// </returns>
/// <exception cref="T:System.InvalidOperationException">The collection was modified after the enumerator was created. </exception><filterpriority>2</filterpriority>
public bool MoveNext()
{
if (_endOfStream)
{
return false;
}
if (Current != null && Current.DataType != MarketDataType.Auxiliary)
{
// only save previous price data
_previous = Current;
}
if (_subscriptionFactoryEnumerator == null)
{
// in live mode the trade able dates will eventually advance to the next
if (_isLiveMode)
{
// HACK attack -- we don't want to block in live mode
Current = null;
return true;
}
_endOfStream = true;
return false;
}
do
{
// check for aux data first
if (HasAuxDataBefore(_lastInstanceBeforeAuxilliaryData))
{
// check for any auxilliary data before reading a line, but make sure
// it should be going ahead of '_lastInstanceBeforeAuxilliaryData'
Current = _auxiliaryData.Dequeue();
return true;
}
if (_emittedAuxilliaryData)
{
_emittedAuxilliaryData = false;
Current = _lastInstanceBeforeAuxilliaryData;
_lastInstanceBeforeAuxilliaryData = null;
return true;
}
// keep enumerating until we find something that is within our time frame
while (_subscriptionFactoryEnumerator.MoveNext())
{
var instance = _subscriptionFactoryEnumerator.Current;
if (instance == null)
{
// keep reading until we get valid data
continue;
}
// prevent emitting past data, this can happen when switching symbols on daily data
if (_previous != null && _config.Resolution != Resolution.Tick)
{
if (_config.Resolution == Resolution.Tick)
{
// allow duplicate times for tick data
if (instance.EndTime < _previous.EndTime) continue;
}
else
{
// all other resolutions don't allow duplicate end times
if (instance.EndTime <= _previous.EndTime) continue;
}
}
if (instance.EndTime < _periodStart)
{
// keep reading until we get a value on or after the start
_previous = instance;
continue;
}
if (instance.Time > _periodFinish)
{
// stop reading when we get a value after the end
_endOfStream = true;
return false;
}
// if we move past our current 'date' then we need to do daily things, such
// as updating factors and symbol mapping as well as detecting aux data
if (instance.EndTime.Date > _tradeableDates.Current)
{
// this is fairly hacky and could be solved by removing the aux data from this class
// the case is with coarse data files which have many daily sized data points for the
// same date,
if (!_config.IsInternalFeed)
{
// this will advance the date enumerator and determine if a new
// instance of the subscription enumerator is required
_subscriptionFactoryEnumerator = ResolveDataEnumerator(false);
}
// we produce auxiliary data on date changes, but make sure our current instance
// isn't before it in time
if (HasAuxDataBefore(instance))
{
// since we're emitting this here we need to save off the instance for next time
Current = _auxiliaryData.Dequeue();
_emittedAuxilliaryData = true;
_lastInstanceBeforeAuxilliaryData = instance;
return true;
}
}
// we've made it past all of our filters, we're withing the requested start/end of the subscription,
// we've satisfied user and market hour filters, so this data is good to go as current
Current = instance;
return true;
}
// we've ended the enumerator, time to refresh
_subscriptionFactoryEnumerator = ResolveDataEnumerator(true);
}
while (_subscriptionFactoryEnumerator != null);
_endOfStream = true;
return false;
}
private bool HasAuxDataBefore(BaseData instance)
{
// this function is always used to check for aux data, as such, we'll implement the
// feature of whether to include or not here so if other aux data is added we won't
// need to remember this feature. this is mostly here until aux data gets moved into
// its own subscription class
if (!_includeAuxilliaryData) _auxiliaryData.Clear();
if (_auxiliaryData.Count == 0) return false;
if (instance == null) return true;
return _auxiliaryData.Peek().EndTime < instance.EndTime;
}
/// <summary>
/// Resolves the next enumerator to be used in <see cref="MoveNext"/>
/// </summary>
private IEnumerator<BaseData> ResolveDataEnumerator(bool endOfEnumerator)
{
do
{
// always advance the date enumerator, this function is intended to be
// called on date changes, never return null for live mode, we'll always
// just keep trying to refresh the subscription
DateTime date;
if (!TryGetNextDate(out date) && !_isLiveMode)
{
// if we run out of dates then we're finished with this subscription
return null;
}
// fetch the new source, using the data time zone for the date
var dateInDataTimeZone = date.ConvertTo(_config.ExchangeTimeZone, _config.DataTimeZone);
var newSource = _dataFactory.GetSource(_config, dateInDataTimeZone, _isLiveMode);
// check if we should create a new subscription factory
var sourceChanged = _source != newSource && newSource.Source != "";
var liveRemoteFile = _isLiveMode && (_source == null || _source.TransportMedium == SubscriptionTransportMedium.RemoteFile);
if (sourceChanged || liveRemoteFile)
{
// dispose of the current enumerator before creating a new one
if (_subscriptionFactoryEnumerator != null)
{
_subscriptionFactoryEnumerator.Dispose();
}
// save off for comparison next time
_source = newSource;
var subscriptionFactory = CreateSubscriptionFactory(newSource);
return subscriptionFactory.Read(newSource).GetEnumerator();
}
// if there's still more in the enumerator and we received the same source from the GetSource call
// above, then just keep using the same enumerator as we were before
if (!endOfEnumerator) // && !sourceChanged is always true here
{
return _subscriptionFactoryEnumerator;
}
// keep churning until we find a new source or run out of tradeable dates
// in live mode tradeable dates won't advance beyond today's date, but
// TryGetNextDate will return false if it's already at today
}
while (true);
}
private ISubscriptionFactory CreateSubscriptionFactory(SubscriptionDataSource source)
{
switch (source.Format)
{
case FileFormat.Csv:
return HandleCsvFileFormat(source);
case FileFormat.Binary:
throw new NotSupportedException("Binary file format is not supported");
default:
throw new ArgumentOutOfRangeException();
}
}
private ISubscriptionFactory HandleCsvFileFormat(SubscriptionDataSource source)
{
// convert the date to the data time zone
var dateInDataTimeZone = _tradeableDates.Current.ConvertTo(_config.ExchangeTimeZone, _config.DataTimeZone).Date;
var factory = SubscriptionFactory.ForSource(source, _config, dateInDataTimeZone, _isLiveMode);
// handle missing files
factory.InvalidSource += (sender, args) =>
{
switch (args.Source.TransportMedium)
{
case SubscriptionTransportMedium.LocalFile:
// the local uri doesn't exist, write an error and return null so we we don't try to get data for today
Log.Trace(string.Format("SubscriptionDataReader.GetReader(): Could not find QC Data, skipped: {0}", source));
_resultHandler.SamplePerformance(_tradeableDates.Current, 0);
break;
case SubscriptionTransportMedium.RemoteFile:
_resultHandler.ErrorMessage(string.Format("Error downloading custom data source file, skipped: {0} Error: {1}", source, args.Exception.Message), args.Exception.StackTrace);
_resultHandler.SamplePerformance(_tradeableDates.Current.Date, 0);
break;
case SubscriptionTransportMedium.Rest:
break;
default:
throw new ArgumentOutOfRangeException();
}
};
if (factory is TextSubscriptionFactory)
{
// handle empty files/instantiation errors
var textSubscriptionFactory = (TextSubscriptionFactory)factory;
textSubscriptionFactory.CreateStreamReaderError += (sender, args) =>
{
Log.Error(string.Format("Failed to get StreamReader for data source({0}), symbol({1}). Skipping date({2}). Reader is null.", args.Source.Source, _mappedSymbol, args.Date.ToShortDateString()));
if (_config.IsCustomData)
{
_resultHandler.ErrorMessage(string.Format("We could not fetch the requested data. This may not be valid data, or a failed download of custom data. Skipping source ({0}).", args.Source.Source));
}
};
// handle parser errors
textSubscriptionFactory.ReaderError += (sender, args) =>
{
_resultHandler.RuntimeError(string.Format("Error invoking {0} data reader. Line: {1} Error: {2}", _config.Symbol, args.Line, args.Exception.Message), args.Exception.StackTrace);
};
}
return factory;
}
/// <summary>
/// Iterates the tradeable dates enumerator
/// </summary>
/// <param name="date">The next tradeable date</param>
/// <returns>True if we got a new date from the enumerator, false if it's exhausted, or in live mode if we're already at today</returns>
private bool TryGetNextDate(out DateTime date)
{
if (_isLiveMode && _tradeableDates.Current >= DateTime.Today)
{
// special behavior for live mode, don't advance past today
date = _tradeableDates.Current;
return false;
}
while (_tradeableDates.MoveNext())
{
date = _tradeableDates.Current;
CheckForDelisting(date);
if (!_mapFile.HasData(date))
{
continue;
}
// don't do other checks if we haven't goten data for this date yet
if (_previous != null && _previous.EndTime > _tradeableDates.Current)
{
continue;
}
// check for dividends and split for this security
CheckForDividend(date);
CheckForSplit(date);
// if we have factor files check to see if we need to update the scale factors
if (_hasScaleFactors)
{
// check to see if the symbol was remapped
var newSymbol = _mapFile.GetMappedSymbol(date);
if (_mappedSymbol != "" && newSymbol != _mappedSymbol)
{
var changed = new SymbolChangedEvent(_config.Symbol, date, _mappedSymbol, newSymbol);
_auxiliaryData.Enqueue(changed);
}
_config.MappedSymbol = _mappedSymbol = newSymbol;
// update our price scaling factors in light of the normalization mode
UpdateScaleFactors(date);
}
// we've passed initial checks,now go get data for this date!
return true;
}
// no more tradeable dates, we've exhausted the enumerator
date = DateTime.MaxValue.Date;
return false;
}
/// <summary>
/// For backwards adjusted data the price is adjusted by a scale factor which is a combination of splits and dividends.
/// This backwards adjusted price is used by default and fed as the current price.
/// </summary>
/// <param name="date">Current date of the backtest.</param>
private void UpdateScaleFactors(DateTime date)
{
switch (_config.DataNormalizationMode)
{
case DataNormalizationMode.Raw:
return;
case DataNormalizationMode.TotalReturn:
case DataNormalizationMode.SplitAdjusted:
_config.PriceScaleFactor = _factorFile.GetSplitFactor(date);
break;
case DataNormalizationMode.Adjusted:
_config.PriceScaleFactor = _factorFile.GetPriceScaleFactor(date);
break;
default:
throw new ArgumentOutOfRangeException();
}
}
/// <summary>
/// Reset the IEnumeration
/// </summary>
/// <remarks>Not used</remarks>
public void Reset()
{
throw new NotImplementedException("Reset method not implemented. Assumes loop will only be used once.");
}
/// <summary>
/// Check for dividends and emit them into the aux data queue
/// </summary>
private void CheckForSplit(DateTime date)
{
if (_splitFactor != null)
{
var close = GetRawClose();
var split = new Split(_config.Symbol, date, close, _splitFactor.Value);
_auxiliaryData.Enqueue(split);
_splitFactor = null;
}
decimal splitFactor;
if (_factorFile.HasSplitEventOnNextTradingDay(date, out splitFactor))
{
_splitFactor = splitFactor;
}
}
/// <summary>
/// Check for dividends and emit them into the aux data queue
/// </summary>
private void CheckForDividend(DateTime date)
{
if (_priceFactorRatio != null)
{
var close = GetRawClose();
var dividend = new Dividend(_config.Symbol, date, close, _priceFactorRatio.Value);
// let the config know about it for normalization
_config.SumOfDividends += dividend.Distribution;
_auxiliaryData.Enqueue(dividend);
_priceFactorRatio = null;
}
// check the factor file to see if we have a dividend event tomorrow
decimal priceFactorRatio;
if (_factorFile.HasDividendEventOnNextTradingDay(date, out priceFactorRatio))
{
_priceFactorRatio = priceFactorRatio;
}
}
/// <summary>
/// Check for delistings and emit them into the aux data queue
/// </summary>
private void CheckForDelisting(DateTime date)
{
// these ifs set flags to tell us to produce a delisting instance
if (!_delistedWarning && date >= _mapFile.DelistingDate)
{
_delistedWarning = true;
var price = _previous != null ? _previous.Price : 0;
_auxiliaryData.Enqueue(new Delisting(_config.Symbol, date, price, DelistingType.Warning));
}
else if (!_delisted && date > _mapFile.DelistingDate)
{
_delisted = true;
var price = _previous != null ? _previous.Price : 0;
// delisted at EOD
_auxiliaryData.Enqueue(new Delisting(_config.Symbol, _mapFile.DelistingDate.AddDays(1), price, DelistingType.Delisted));
}
}
/// <summary>
/// Un-normalizes the Previous.Value
/// </summary>
private decimal GetRawClose()
{
if (_previous == null) return 0m;
var close = _previous.Value;
switch (_config.DataNormalizationMode)
{
case DataNormalizationMode.Raw:
break;
case DataNormalizationMode.SplitAdjusted:
case DataNormalizationMode.Adjusted:
// we need to 'unscale' the price
close = close / _config.PriceScaleFactor;
break;
case DataNormalizationMode.TotalReturn:
// we need to remove the dividends since we've been accumulating them in the price
close = (close - _config.SumOfDividends) / _config.PriceScaleFactor;
break;
default:
throw new ArgumentOutOfRangeException();
}
return close;
}
/// <summary>
/// Dispose of the Stream Reader and close out the source stream and file connections.
/// </summary>
public void Dispose()
{
if (_subscriptionFactoryEnumerator != null)
{
_subscriptionFactoryEnumerator.Dispose();
}
}
}
} | apache-2.0 |
yukuai518/gobblin | gobblin-cluster/src/main/java/gobblin/cluster/NoopReplyHandler.java | 676 | package gobblin.cluster;
import org.apache.helix.messaging.AsyncCallback;
import org.apache.helix.model.Message;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Class that handles Helix messaging response via no-op.
*
* @author Abhishek Tiwari
*/
public class NoopReplyHandler extends AsyncCallback {
private static final Logger LOGGER = LoggerFactory.getLogger(NoopReplyHandler.class);
private String bootstrapUrl;
private String bootstrapTime;
public NoopReplyHandler() {
}
public void onTimeOut() {
LOGGER.error("Timed out");
}
public void onReplyMessage(Message message) {
LOGGER.info("Received reply: " + message);
}
}
| apache-2.0 |
hs-web/hsweb-framework | hsweb-system/hsweb-system-authorization/hsweb-system-authorization-oauth2/src/main/java/org/hswebframework/web/oauth2/service/InDBOAuth2ClientManager.java | 708 | package org.hswebframework.web.oauth2.service;
import lombok.AllArgsConstructor;
import org.hswebframework.web.oauth2.entity.OAuth2ClientEntity;
import org.hswebframework.web.oauth2.server.OAuth2Client;
import org.hswebframework.web.oauth2.server.OAuth2ClientManager;
import reactor.core.publisher.Mono;
@AllArgsConstructor
public class InDBOAuth2ClientManager implements OAuth2ClientManager {
private final OAuth2ClientService clientService;
@Override
public Mono<OAuth2Client> getClient(String clientId) {
return clientService
.findById(clientId)
.filter(OAuth2ClientEntity::enabled)
.map(OAuth2ClientEntity::toOAuth2Client);
}
}
| apache-2.0 |
CruGlobal/MissionHub-iOS-V1 | Resources/mh/ui/login/login.js | 7194 | /*!
* MissionHub Login Window
* https://www.missionhub.com
*
* Copyright 2011, Campus Crusade for Christ International
*
* Description: Builds and Controls Login Window and Processes
* Author: Chris Roemmich <[email protected]>
* Date: Wed, 29 Jun 2011 14:29:42 -0400
*/
(function() {
mh.ui.login = {};
mh.ui.login.window = function() {
var loginWindow, authWebView, signingIn, callback, indicator;
var show = function(cb) {
debug('running mh.ui.login.window.show');
if (signingIn !== true) {
signingIn = true;
callback = cb;
loginWindow = Ti.UI.createWindow({
navBarHidden: true
});
loginWindow.addEventListener('android:back', function() {
destroy();
});
authWebView = Ti.UI.createWebView({
url: mh.auth.wvUrl,
top: 0,
zIndex: 99,
autoDetect: [ Ti.UI.AUTODETECT_NONE ],
canGoBack:false,
canGoForward:false
});
// Force Landscape mode only
var t = Ti.UI.create2DMatrix().scale(0);
var authView = Ti.UI.createView({
top: 5,
left: 5,
width: Ti.Platform.displayCaps.platformWidth-10,
height: Ti.Platform.displayCaps.platformHeight-30,
border: 5,
backgroundColor: '#e4e4e4',
borderColor: '#333',
borderRadius: 5,
borderWidth: 5,
zIndex: -1,
transform: t
});
loginWindow.add(authView);
// Activity indicator AJAX
indicator = Ti.UI.createActivityIndicator({
backgroundColor: "black",
borderRadius: 4,
height: 50,
width: 50,
zIndex: 90,
style:Ti.UI.iPhone.ActivityIndicatorStyle.PLAIN,
visible: false
});
authWebView.add(indicator);
//Close button
var btn_close = Titanium.UI.createButton({
backgroundImage: 'images/close5.png',
width: 20,
height: 20,
top: 14,
right: 12,
zIndex: 100,
visible: true
});
authView.add(btn_close);
authView.add(authWebView);
loginWindow.open();
authWebView.addEventListener("beforeload", function(e) {
indicator.show();
});
authWebView.addEventListener('load', webViewOnLoad);
authWebView.addEventListener('error', webViewOnError);
// Creating the Open Transition
// create first transform to go beyond normal size
var t1 = Titanium.UI.create2DMatrix();
t1 = t1.scale(1.1);
var a = Titanium.UI.createAnimation();
a.transform = t1;
a.duration = 200;
// when this animation completes, scale to normal size
a.addEventListener('complete', function() {
var t2 = Titanium.UI.create2DMatrix();
t2 = t2.scale(1.0);
authView.animate({
transform:t2,
duration:200
});
});
// Starts the Animation
authView.animate(a);
// Closes the Authentication Window
btn_close.addEventListener('click', destroy);
}
};
var destroy = function() {
debug('running mh.ui.login.window.destroy');
if (loginWindow === null) {
return;
}
try {
loginWindow.removeEventListener('load', webViewOnLoad);
loginWindow.removeEventListener('error', webViewOnError);
loginWindow.close();
signingIn = false;
} catch(ex) {
debug('Cannot destroy the authorize UI, ignoring. reason: '+ ex.message);
}
loginWindow = null;
authWebView = null;
};
var errorOptions = {
errorCallback: function(click) {
if (click.index === 0) {
authWebView.url = mh.auth.wvUrl;
}
if (click.index === 1) {
destroy();
}
},
buttonNames: [L('retry'),L('cancel')]
};
var webViewOnLoad = function (e) {
debug('running mh.ui.login.window.webViewOnLoad');
if (e.url) {
var params = mh.util.uriParams(e.url);
if (params.error) {
debug("params.error : " + params.error);
mh.error.handleError('', errorOptions, params.error);
return;
}
if (params.authorization) {
destroy();
mh.ui.main.showIndicator('grantAccess');
mh.auth.oauth.grantAccess(params.authorization, grantAccessOnLoad, grantAccessOnError);
} else if (params.code) {
destroy();
mh.ui.main.showIndicator('getToken');
mh.auth.oauth.getTokenFromCode(params.code, getTokenOnLoad, getTokenOnError);
}
}
else {
mh.error.handleError('', errorOptions, 'unknown');
}
indicator.hide();
mh.ui.main.hideIndicator('webViewLoad');
};
var webViewOnError = function(e) {
debug('running mh.ui.login.window.webViewOnError');
indicator.hide();
mh.ui.main.hideIndicator('webViewLoad');
mh.error.handleError('', errorOptions, 'no_data');
};
var grantAccessOnLoad = function (e) {
debug('running mh.ui.login.window.grantAccessOnLoad');
var options = {
errorCallback: function(click) {
if (click.index === 0) {
mh.auth.oauth.grantAccess(e.authorization, grantAccessOnLoad, grantAccessOnError);
}
},
buttonNames: [L('retry'),L('cancel')]
};
var response = mh.util.makeValid(e.response);
if (response.error || !response.code) {
if (response.error) {
mh.error.handleError(response.error, options);
}
else {
mh.error.handleError('', options, 'authentication');
}
} else {
mh.ui.main.showIndicator('getToken');
mh.auth.oauth.getTokenFromCode(response.code, getTokenOnLoad, getTokenOnError);
}
mh.ui.main.hideIndicator('grantAccess');
};
var grantAccessOnError = function (e) {
debug('running mh.ui.login.window.grantAccessOnError');
info(e);
var options = {
errorCallback: function(click) {
if (click.index === 0) {
mh.auth.oauth.grantAccess(e.authorization, grantAccessOnLoad, grantAccessOnError);
}
},
buttonNames: [L('retry'),L('cancel')]
};
mh.error.handleError('', options, 'authentication');
mh.ui.main.hideIndicator('grantAccess');
};
var getTokenOnLoad = function(e) {
debug('running mh.ui.login.window.getTokenOnLoad');
var options = {
errorCallback: function(click) {
if (click.index === 0) {
mh.auth.oauth.getTokenFromCode(e.code, getTokenOnLoad, getTokenOnError);
}
},
buttonNames: [L('retry'),L('cancel')]
};
var response = mh.util.makeValid(e.response);
if (response.error || !response.access_token) {
if (response.error) {
mh.error.handleError(response.error, options);
}
else {
mh.error.handleError('', options, 'authentication');
}
} else {
mh.auth.oauth.setToken(response.access_token);
mh.app.setPerson(response.person);
info("Logged in with access token: " + response.access_token);
destroy();
callback();
}
mh.ui.main.hideIndicator('getToken');
};
var getTokenOnError = function(e) {
debug('running mh.ui.login.window.getTokenOnError');
var options = {
errorCallback: function(click) {
if (click.index === 0) {
mh.auth.oauth.getTokenFromCode(e.code, getTokenOnLoad, getTokenOnError);
}
},
buttonNames: [L('retry'),L('cancel')]
};
info(e);
mh.error.handleError('', options, 'authentication');
mh.ui.main.hideIndicator('getToken');
};
return {
show: show
};
}();
})(); | apache-2.0 |
PhenixP2P/WebSDK | node/index.js | 1643 | /**
* Copyright 2020 Phenix Real Time Solutions, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var sdk = require('phenix-web-sdk/dist/phenix-node-sdk');
var adminApiProxyClient = new sdk.net.AdminApiProxyClient();
adminApiProxyClient.setBackendUri('https://demo.phenixrts.com/pcast');
var channelExpress = new sdk.express.ChannelExpress({adminApiProxyClient: adminApiProxyClient});
var streamUri = 'http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/ElephantsDream.mp4';
var capabilities = ['fhd'];
var channelAlias = 'channelNodeExample'; // https://phenixrts.com/channel/?m=r#channelNodeExample
channelExpress.publishToChannel({
streamUri: streamUri,
capabilities: capabilities,
screenName: 'Node Publisher',
channel: {
alias: channelAlias,
name: channelAlias
}
}, function publishCallback(error, response) {
if (error) {
console.error(error);
return;
}
if (response.status !== 'ok') {
console.warn(response.status);
return;
}
console.log('Success', response.publisher.getStreamId());
}); | apache-2.0 |
vmware/govmomi | vapi/namespace/internal/internal.go | 1189 | /*
Copyright (c) 2020 VMware, Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package internal
const (
// NamespaceClusterPath is the rest endpoint for the namespace cluster management API
NamespaceClusterPath = "/api/vcenter/namespace-management/clusters"
NamespaceDistributedSwitchCompatibility = "/api/vcenter/namespace-management/distributed-switch-compatibility"
NamespaceEdgeClusterCompatibility = "/api/vcenter/namespace-management/edge-cluster-compatibility"
SupervisorServicesPath = "/api/vcenter/namespace-management/supervisor-services"
)
type SupportBundleToken struct {
Value string `json:"wcp-support-bundle-token"`
}
| apache-2.0 |
ManishJayaswal/roslyn | src/Features/CSharp/EditAndContinue/CSharpEditAndContinueAnalyzer.cs | 123429 | // Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Composition;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp.Extensions;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Differencing;
using Microsoft.CodeAnalysis.EditAndContinue;
using Microsoft.CodeAnalysis.Host.Mef;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
using CompilerSyntaxUtilities = Microsoft.CodeAnalysis.CSharp.SyntaxUtilities;
namespace Microsoft.CodeAnalysis.CSharp.EditAndContinue
{
[ExportLanguageService(typeof(IEditAndContinueAnalyzer), LanguageNames.CSharp), Shared]
internal sealed class CSharpEditAndContinueAnalyzer : AbstractEditAndContinueAnalyzer
{
#region Syntax Analysis
private enum ConstructorPart
{
None = 0,
DefaultBaseConstructorCall = 1,
}
private enum BlockPart
{
None = 0,
OpenBrace = 1,
CloseBrace = 2,
}
private enum ForEachPart
{
None = 0,
ForEach = 1,
VariableDeclaration = 2,
In = 3,
Expression = 4,
}
/// <returns>
/// <see cref="BaseMethodDeclarationSyntax"/> for methods, operators, constructors, destructors and accessors.
/// <see cref="VariableDeclaratorSyntax"/> for field initializers.
/// <see cref="PropertyDeclarationSyntax"/> for property initializers and expression bodies.
/// <see cref="IndexerDeclarationSyntax"/> for indexer expression bodies.
/// </returns>
internal override SyntaxNode FindMemberDeclaration(SyntaxNode root, SyntaxNode node)
{
while (node != root)
{
switch (node.Kind())
{
case SyntaxKind.MethodDeclaration:
case SyntaxKind.ConversionOperatorDeclaration:
case SyntaxKind.OperatorDeclaration:
case SyntaxKind.SetAccessorDeclaration:
case SyntaxKind.AddAccessorDeclaration:
case SyntaxKind.RemoveAccessorDeclaration:
case SyntaxKind.GetAccessorDeclaration:
case SyntaxKind.ConstructorDeclaration:
case SyntaxKind.DestructorDeclaration:
return node;
case SyntaxKind.PropertyDeclaration:
// int P { get; } = [|initializer|];
Debug.Assert(((PropertyDeclarationSyntax)node).Initializer != null);
return node;
case SyntaxKind.FieldDeclaration:
case SyntaxKind.EventFieldDeclaration:
// Active statements encompassing modifiers or type correspond to the first initialized field.
// [|public static int F = 1|], G = 2;
return ((BaseFieldDeclarationSyntax)node).Declaration.Variables.First();
case SyntaxKind.VariableDeclarator:
// public static int F = 1, [|G = 2|];
Debug.Assert(node.Parent.IsKind(SyntaxKind.VariableDeclaration));
switch (node.Parent.Parent.Kind())
{
case SyntaxKind.FieldDeclaration:
case SyntaxKind.EventFieldDeclaration:
return node;
}
node = node.Parent;
break;
}
node = node.Parent;
}
return null;
}
/// <returns>
/// Given a node representing a declaration (<paramref name="isMember"/> = true) or a top-level match node (<paramref name="isMember"/> = false) returns:
/// - <see cref="BlockSyntax"/> for method-like member declarations with block bodies (methods, operators, constructors, destructors, accessors).
/// - <see cref="ExpressionSyntax"/> for variable declarators of fields, properties with an initializer expression, or
/// for method-like member declarations with expression bodies (methods, properties, indexers, operators)
///
/// A null reference otherwise.
/// </returns>
internal override SyntaxNode TryGetDeclarationBody(SyntaxNode node, bool isMember)
{
if (node.IsKind(SyntaxKind.VariableDeclarator))
{
return (((VariableDeclaratorSyntax)node).Initializer)?.Value;
}
return SyntaxUtilities.TryGetMethodDeclarationBody(node);
}
/// <returns>
/// If <paramref name="node"/> is a method, accessor, operator, destructor, or constructor without an initializer,
/// tokens of its block body, or tokens of the expression body if applicable.
///
/// If <paramref name="node"/> is an indexer declaration the tokens of its expression body.
///
/// If <paramref name="node"/> is a property declaration the tokens of its expression body or initializer.
///
/// If <paramref name="node"/> is a constructor with an initializer,
/// tokens of the initializer concatenated with tokens of the constructor body.
///
/// If <paramref name="node"/> is a variable declarator of a field with an initializer,
/// tokens of the field initializer.
///
/// Null reference otherwise.
/// </returns>
internal override IEnumerable<SyntaxToken> TryGetActiveTokens(SyntaxNode node)
{
if (node.IsKind(SyntaxKind.VariableDeclarator))
{
// TODO: The logic is similar to BreakpointSpans.TryCreateSpanForVariableDeclaration. Can we abstract it?
var declarator = node;
var fieldDeclaration = (BaseFieldDeclarationSyntax)declarator.Parent.Parent;
var variableDeclaration = fieldDeclaration.Declaration;
if (fieldDeclaration.Modifiers.Any(SyntaxKind.ConstKeyword))
{
return null;
}
if (variableDeclaration.Variables.Count == 1)
{
if (variableDeclaration.Variables[0].Initializer == null)
{
return null;
}
return fieldDeclaration.Modifiers.Concat(variableDeclaration.DescendantTokens()).Concat(fieldDeclaration.SemicolonToken);
}
if (declarator == variableDeclaration.Variables[0])
{
return fieldDeclaration.Modifiers.Concat(variableDeclaration.Type.DescendantTokens()).Concat(node.DescendantTokens());
}
return declarator.DescendantTokens();
}
if (node.IsKind(SyntaxKind.ConstructorDeclaration))
{
var ctor = (ConstructorDeclarationSyntax)node;
if (ctor.Initializer != null)
{
return ctor.Initializer.DescendantTokens().Concat(ctor.Body.DescendantTokens());
}
return ctor.Body.DescendantTokens();
}
return SyntaxUtilities.TryGetMethodDeclarationBody(node)?.DescendantTokens();
}
protected override SyntaxNode GetEncompassingAncestorImpl(SyntaxNode bodyOrMatchRoot)
{
// Constructor may contain active nodes outside of its body (constructor initializer),
// but within the body of the member declaration (the parent).
if (bodyOrMatchRoot.Parent.IsKind(SyntaxKind.ConstructorDeclaration))
{
return bodyOrMatchRoot.Parent;
}
// Field initializer match root -- an active statement may include the modifiers
// and type specification of the field declaration.
if (bodyOrMatchRoot.IsKind(SyntaxKind.EqualsValueClause) &&
bodyOrMatchRoot.Parent.IsKind(SyntaxKind.VariableDeclarator) &&
bodyOrMatchRoot.Parent.Parent.IsKind(SyntaxKind.FieldDeclaration))
{
return bodyOrMatchRoot.Parent.Parent;
}
// Field initializer body -- an active statement may include the modifiers
// and type specification of the field declaration.
if (bodyOrMatchRoot.Parent.IsKind(SyntaxKind.EqualsValueClause) &&
bodyOrMatchRoot.Parent.Parent.IsKind(SyntaxKind.VariableDeclarator) &&
bodyOrMatchRoot.Parent.Parent.Parent.IsKind(SyntaxKind.FieldDeclaration))
{
return bodyOrMatchRoot.Parent.Parent.Parent;
}
// otherwise all active statements are covered by the body/match root itself:
return bodyOrMatchRoot;
}
protected override SyntaxNode FindStatementAndPartner(SyntaxNode declarationBody, int position, SyntaxNode partnerDeclarationBodyOpt, out SyntaxNode partnerOpt, out int statementPart)
{
SyntaxUtilities.AssertIsBody(declarationBody, allowLambda: false);
if (position < declarationBody.SpanStart)
{
// Only constructors and the field initializers may have an [|active statement|] starting outside of the <<body>>.
// Constructor: [|public C()|] <<{ }>>
// Constructor initializer: public C() : [|base(expr)|] <<{ }>>
// Constructor initializer with lambda: public C() : base(() => { [|...|] }) <<{ }>>
// Field initializers: [|public int a = <<expr>>|], [|b = <<expr>>|];
// No need to special case property initializers here, the actiave statement always spans the initializer expression.
if (declarationBody.Parent.Kind() == SyntaxKind.ConstructorDeclaration)
{
var constructor = (ConstructorDeclarationSyntax)declarationBody.Parent;
var partnerConstructor = (ConstructorDeclarationSyntax)partnerDeclarationBodyOpt?.Parent;
if (constructor.Initializer == null || position < constructor.Initializer.ColonToken.SpanStart)
{
statementPart = (int)ConstructorPart.DefaultBaseConstructorCall;
partnerOpt = partnerConstructor;
return constructor;
}
declarationBody = constructor.Initializer;
partnerDeclarationBodyOpt = partnerConstructor?.Initializer;
}
else
{
Debug.Assert(!(declarationBody is BlockSyntax));
// let's find a labeled node that encompasses the body:
position = declarationBody.SpanStart;
}
}
SyntaxNode node;
if (partnerDeclarationBodyOpt != null)
{
SyntaxUtilities.FindLeafNodeAndPartner(declarationBody, position, partnerDeclarationBodyOpt, out node, out partnerOpt);
}
else
{
node = declarationBody.FindToken(position).Parent;
partnerOpt = null;
}
while (node != declarationBody && !StatementSyntaxComparer.HasLabel(node) && !SyntaxFacts.IsLambdaBody(node))
{
node = node.Parent;
if (partnerOpt != null)
{
partnerOpt = partnerOpt.Parent;
}
}
switch (node.Kind())
{
case SyntaxKind.Block:
statementPart = (int)GetStatementPart((BlockSyntax)node, position);
break;
case SyntaxKind.ForEachStatement:
statementPart = (int)GetStatementPart((ForEachStatementSyntax)node, position);
break;
case SyntaxKind.VariableDeclaration:
// VariableDeclaration ::= TypeSyntax CommaSeparatedList(VariableDeclarator)
//
// The compiler places sequence points after each local variable initialization.
// The TypeSyntax is considered to be part of the first sequence span.
node = ((VariableDeclarationSyntax)node).Variables.First();
if (partnerOpt != null)
{
partnerOpt = ((VariableDeclarationSyntax)partnerOpt).Variables.First();
}
statementPart = 0;
break;
default:
statementPart = 0;
break;
}
return node;
}
private static BlockPart GetStatementPart(BlockSyntax node, int position)
{
return position < node.OpenBraceToken.Span.End ? BlockPart.OpenBrace : BlockPart.CloseBrace;
}
private static TextSpan GetActiveSpan(BlockSyntax node, BlockPart part)
{
switch (part)
{
case BlockPart.OpenBrace:
return node.OpenBraceToken.Span;
case BlockPart.CloseBrace:
return node.CloseBraceToken.Span;
default:
throw ExceptionUtilities.UnexpectedValue(part);
}
}
private static ForEachPart GetStatementPart(ForEachStatementSyntax node, int position)
{
return position < node.OpenParenToken.SpanStart ? ForEachPart.ForEach :
position < node.InKeyword.SpanStart ? ForEachPart.VariableDeclaration :
position < node.Expression.SpanStart ? ForEachPart.In :
ForEachPart.Expression;
}
private static TextSpan GetActiveSpan(ForEachStatementSyntax node, ForEachPart part)
{
switch (part)
{
case ForEachPart.ForEach:
return node.ForEachKeyword.Span;
case ForEachPart.VariableDeclaration:
return TextSpan.FromBounds(node.Type.SpanStart, node.Identifier.Span.End);
case ForEachPart.In:
return node.InKeyword.Span;
case ForEachPart.Expression:
return node.Expression.Span;
default:
throw ExceptionUtilities.UnexpectedValue(part);
}
}
internal override Func<SyntaxNode, SyntaxNode> CreateSyntaxMapForEquivalentNodes(SyntaxNode oldRoot, SyntaxNode newRoot)
{
Debug.Assert(SyntaxFactory.AreEquivalent(oldRoot, newRoot));
return newNode => SyntaxUtilities.FindPartner(newRoot, oldRoot, newNode);
}
protected override SyntaxNode FindEnclosingLambdaBody(SyntaxNode containerOpt, SyntaxNode node)
{
SyntaxNode root = GetEncompassingAncestor(containerOpt);
while (node != root)
{
if (SyntaxFacts.IsLambdaBody(node))
{
return node;
}
node = node.Parent;
}
return null;
}
protected override SyntaxNode GetPartnerLambdaBody(SyntaxNode oldBody, SyntaxNode newLambda)
{
return CompilerSyntaxUtilities.GetCorrespondingLambdaBody(oldBody, newLambda);
}
protected override Match<SyntaxNode> ComputeTopLevelMatch(SyntaxNode oldCompilationUnit, SyntaxNode newCompilationUnit)
{
return TopSyntaxComparer.Instance.ComputeMatch(oldCompilationUnit, newCompilationUnit);
}
protected override Match<SyntaxNode> ComputeBodyMatch(SyntaxNode oldBody, SyntaxNode newBody, IEnumerable<KeyValuePair<SyntaxNode, SyntaxNode>> knownMatches)
{
SyntaxUtilities.AssertIsBody(oldBody, allowLambda: true);
SyntaxUtilities.AssertIsBody(newBody, allowLambda: true);
if (oldBody is ExpressionSyntax || newBody is ExpressionSyntax)
{
Debug.Assert(oldBody is ExpressionSyntax || oldBody is BlockSyntax);
Debug.Assert(newBody is ExpressionSyntax || newBody is BlockSyntax);
// The matching algorithm requires the roots to match each other.
// Lambda bodies, field/property initializers, and method/property/indexer/operator expression-bodies may also be lambda expressions.
// Say we have oldBody 'x => x' and newBody 'F(x => x + 1)', then
// the algorithm would match 'x => x' to 'F(x => x + 1)' instead of
// matching 'x => x' to 'x => x + 1'.
// We use the parent node as a root:
// - for field/property initializers the root is EqualsValueClause.
// - for expression-bodies the root is ArrowExpressionClauseSyntax.
// - for block bodies the root is a method/operator/accessor declaration (only happens when matching expression body with a block body)
// - for lambdas the root is a LambdaExpression.
// - for query lambdas the root is the query clause containing the lambda (e.g. where).
return new StatementSyntaxComparer(oldBody, newBody).ComputeMatch(oldBody.Parent, newBody.Parent, knownMatches);
}
return StatementSyntaxComparer.Default.ComputeMatch(oldBody, newBody, knownMatches);
}
protected override bool TryMatchActiveStatement(
SyntaxNode oldStatement,
int statementPart,
SyntaxNode oldBody,
SyntaxNode newBody,
out SyntaxNode newStatement)
{
SyntaxUtilities.AssertIsBody(oldBody, allowLambda: true);
SyntaxUtilities.AssertIsBody(newBody, allowLambda: true);
switch (oldStatement.Kind())
{
case SyntaxKind.ThisConstructorInitializer:
case SyntaxKind.BaseConstructorInitializer:
case SyntaxKind.ConstructorDeclaration:
var newConstructor = (ConstructorDeclarationSyntax)newBody.Parent;
newStatement = (SyntaxNode)newConstructor.Initializer ?? newConstructor;
return true;
default:
// TODO: Consider mapping an expression body to an equivalent statement expression or return statement and vice versa.
// It would benefit transformations of expression bodies to block bodies of lambdas, methods, operators and properties.
// field initializer, lambda and query expressions:
if (oldStatement == oldBody && !newBody.IsKind(SyntaxKind.Block))
{
newStatement = newBody;
return true;
}
newStatement = null;
return false;
}
}
#endregion
#region Syntax and Semantic Utils
protected override IEnumerable<SequenceEdit> GetSyntaxSequenceEdits(ImmutableArray<SyntaxNode> oldNodes, ImmutableArray<SyntaxNode> newNodes)
{
return SyntaxComparer.GetSequenceEdits(oldNodes, newNodes);
}
internal override SyntaxNode EmptyCompilationUnit
{
get
{
return SyntaxFactory.CompilationUnit();
}
}
internal override bool ExperimentalFeaturesEnabled(SyntaxTree tree)
{
// there are no experimental features at this time.
return false;
}
protected override bool StatementLabelEquals(SyntaxNode node1, SyntaxNode node2)
{
return StatementSyntaxComparer.GetLabelImpl(node1) == StatementSyntaxComparer.GetLabelImpl(node2);
}
protected override bool TryGetEnclosingBreakpointSpan(SyntaxNode root, int position, out TextSpan span)
{
return root.TryGetClosestBreakpointSpan(position, out span);
}
protected override bool TryGetActiveSpan(SyntaxNode node, int statementPart, out TextSpan span)
{
switch (node.Kind())
{
case SyntaxKind.Block:
span = GetActiveSpan((BlockSyntax)node, (BlockPart)statementPart);
return true;
case SyntaxKind.ForEachStatement:
span = GetActiveSpan((ForEachStatementSyntax)node, (ForEachPart)statementPart);
return true;
case SyntaxKind.DoStatement:
// The active statement of DoStatement node is the while condition,
// which is lexically not the closest breakpoint span (the body is).
// do { ... } [|while (condition);|]
var doStatement = (DoStatementSyntax)node;
return node.TryGetClosestBreakpointSpan(doStatement.WhileKeyword.SpanStart, out span);
case SyntaxKind.PropertyDeclaration:
// The active span corresponding to a property declaration is the span corresponding to its initializer (if any),
// not the span correspoding to the accessor.
// int P { [|get;|] } = [|<initializer>|];
var propertyDeclaration = (PropertyDeclarationSyntax)node;
if (propertyDeclaration.Initializer != null &&
node.TryGetClosestBreakpointSpan(propertyDeclaration.Initializer.SpanStart, out span))
{
return true;
}
else
{
span = default(TextSpan);
return false;
}
default:
return node.TryGetClosestBreakpointSpan(node.SpanStart, out span);
}
}
protected override IEnumerable<KeyValuePair<SyntaxNode, int>> EnumerateNearStatements(SyntaxNode statement)
{
int direction = +1;
SyntaxNodeOrToken nodeOrToken = statement;
var fieldOrPropertyModifiers = SyntaxUtilities.TryGetFieldOrPropertyModifiers(statement);
while (true)
{
nodeOrToken = (direction < 0) ? nodeOrToken.GetPreviousSibling() : nodeOrToken.GetNextSibling();
if (nodeOrToken.RawKind == 0)
{
var parent = statement.Parent;
if (parent == null)
{
yield break;
}
if (parent.IsKind(SyntaxKind.Block))
{
yield return KeyValuePair.Create(parent, (int)(direction > 0 ? BlockPart.CloseBrace : BlockPart.OpenBrace));
}
else if (parent.IsKind(SyntaxKind.ForEachStatement))
{
yield return KeyValuePair.Create(parent, (int)ForEachPart.ForEach);
}
if (direction > 0)
{
nodeOrToken = statement;
direction = -1;
continue;
}
if (fieldOrPropertyModifiers.HasValue)
{
// We enumerated all members and none of them has an initializer.
// We don't have any better place where to place the span than the initial field.
// Consider: in nonpartial classes we could find a single constructor.
// Otherwise, it would be confusing to select one arbitrarily.
yield return KeyValuePair.Create(statement, -1);
}
nodeOrToken = statement = parent;
fieldOrPropertyModifiers = SyntaxUtilities.TryGetFieldOrPropertyModifiers(statement);
direction = +1;
yield return KeyValuePair.Create(nodeOrToken.AsNode(), 0);
}
else
{
var node = nodeOrToken.AsNode();
if (node == null)
{
continue;
}
if (fieldOrPropertyModifiers.HasValue)
{
var nodeModifiers = SyntaxUtilities.TryGetFieldOrPropertyModifiers(node);
if (!nodeModifiers.HasValue ||
nodeModifiers.Value.Any(SyntaxKind.StaticKeyword) != fieldOrPropertyModifiers.Value.Any(SyntaxKind.StaticKeyword))
{
continue;
}
}
if (node.IsKind(SyntaxKind.Block))
{
yield return KeyValuePair.Create(node, (int)(direction > 0 ? BlockPart.OpenBrace : BlockPart.CloseBrace));
}
else if (node.IsKind(SyntaxKind.ForEachStatement))
{
yield return KeyValuePair.Create(node, (int)ForEachPart.ForEach);
}
yield return KeyValuePair.Create(node, 0);
}
}
}
protected override bool AreEquivalentActiveStatements(SyntaxNode oldStatement, SyntaxNode newStatement, int statementPart)
{
if (oldStatement.Kind() != newStatement.Kind())
{
return false;
}
switch (oldStatement.Kind())
{
case SyntaxKind.Block:
Debug.Assert(statementPart != 0);
return true;
case SyntaxKind.ConstructorDeclaration:
Debug.Assert(statementPart != 0);
// The call could only change if the base type of the containing class changed.
return true;
case SyntaxKind.ForEachStatement:
Debug.Assert(statementPart != 0);
// only check the expression, edits in the body and the variable declaration are allowed:
return AreEquivalentActiveStatements((ForEachStatementSyntax)oldStatement, (ForEachStatementSyntax)newStatement);
case SyntaxKind.IfStatement:
// only check the condition, edits in the body are allowed:
return AreEquivalentActiveStatements((IfStatementSyntax)oldStatement, (IfStatementSyntax)newStatement);
case SyntaxKind.WhileStatement:
// only check the condition, edits in the body are allowed:
return AreEquivalentActiveStatements((WhileStatementSyntax)oldStatement, (WhileStatementSyntax)newStatement);
case SyntaxKind.DoStatement:
// only check the condition, edits in the body are allowed:
return AreEquivalentActiveStatements((DoStatementSyntax)oldStatement, (DoStatementSyntax)newStatement);
case SyntaxKind.SwitchStatement:
return AreEquivalentActiveStatements((SwitchStatementSyntax)oldStatement, (SwitchStatementSyntax)newStatement);
case SyntaxKind.LockStatement:
return AreEquivalentActiveStatements((LockStatementSyntax)oldStatement, (LockStatementSyntax)newStatement);
case SyntaxKind.UsingStatement:
return AreEquivalentActiveStatements((UsingStatementSyntax)oldStatement, (UsingStatementSyntax)newStatement);
// fixed and for statements don't need special handling since the active statement is a variable declaration
default:
return SyntaxFactory.AreEquivalent(oldStatement, newStatement);
}
}
private static bool AreEquivalentActiveStatements(IfStatementSyntax oldNode, IfStatementSyntax newNode)
{
// only check the condition, edits in the body are allowed:
return SyntaxFactory.AreEquivalent(oldNode.Condition, newNode.Condition);
}
private static bool AreEquivalentActiveStatements(WhileStatementSyntax oldNode, WhileStatementSyntax newNode)
{
// only check the condition, edits in the body are allowed:
return SyntaxFactory.AreEquivalent(oldNode.Condition, newNode.Condition);
}
private static bool AreEquivalentActiveStatements(DoStatementSyntax oldNode, DoStatementSyntax newNode)
{
// only check the condition, edits in the body are allowed:
return SyntaxFactory.AreEquivalent(oldNode.Condition, newNode.Condition);
}
private static bool AreEquivalentActiveStatements(SwitchStatementSyntax oldNode, SwitchStatementSyntax newNode)
{
// only check the expression, edits in the body are allowed:
return SyntaxFactory.AreEquivalent(oldNode.Expression, newNode.Expression);
}
private static bool AreEquivalentActiveStatements(LockStatementSyntax oldNode, LockStatementSyntax newNode)
{
// only check the expression, edits in the body are allowed:
return SyntaxFactory.AreEquivalent(oldNode.Expression, newNode.Expression);
}
private static bool AreEquivalentActiveStatements(FixedStatementSyntax oldNode, FixedStatementSyntax newNode)
{
return SyntaxFactory.AreEquivalent(oldNode.Declaration, newNode.Declaration);
}
private static bool AreEquivalentActiveStatements(UsingStatementSyntax oldNode, UsingStatementSyntax newNode)
{
// only check the expression/declaration, edits in the body are allowed:
return SyntaxFactory.AreEquivalent(
(SyntaxNode)oldNode.Declaration ?? oldNode.Expression,
(SyntaxNode)newNode.Declaration ?? newNode.Expression);
}
private static bool AreEquivalentActiveStatements(ForEachStatementSyntax oldNode, ForEachStatementSyntax newNode)
{
// This is conservative, we might be able to allow changing the type.
return SyntaxFactory.AreEquivalent(oldNode.Type, newNode.Type)
&& SyntaxFactory.AreEquivalent(oldNode.Expression, newNode.Expression);
}
internal override bool IsMethod(SyntaxNode declaration)
{
return SyntaxUtilities.IsMethod(declaration);
}
internal override SyntaxNode TryGetContainingTypeDeclaration(SyntaxNode memberDeclaration)
{
return memberDeclaration.Parent.FirstAncestorOrSelf<TypeDeclarationSyntax>();
}
internal override bool HasBackingField(SyntaxNode propertyDeclaration)
{
return SyntaxUtilities.HasBackingField((PropertyDeclarationSyntax)propertyDeclaration);
}
internal override bool HasInitializer(SyntaxNode declaration, out bool isStatic)
{
switch (declaration.Kind())
{
case SyntaxKind.VariableDeclarator:
var fieldDeclaration = (VariableDeclaratorSyntax)declaration;
if (fieldDeclaration.Initializer != null)
{
isStatic = ((FieldDeclarationSyntax)declaration.Parent.Parent).Modifiers.Any(SyntaxKind.StaticKeyword);
return true;
}
isStatic = false;
return false;
case SyntaxKind.PropertyDeclaration:
var propertyDeclaration = (PropertyDeclarationSyntax)declaration;
if (propertyDeclaration.Initializer != null)
{
isStatic = propertyDeclaration.Modifiers.Any(SyntaxKind.StaticKeyword);
return true;
}
isStatic = false;
return false;
default:
isStatic = false;
return false;
}
}
internal override bool IncludesInitializers(SyntaxNode constructorDeclaration)
{
var ctor = (ConstructorDeclarationSyntax)constructorDeclaration;
return ctor.Initializer == null || ctor.Initializer.IsKind(SyntaxKind.BaseConstructorInitializer);
}
internal override bool IsPartial(INamedTypeSymbol type)
{
var syntaxRefs = type.DeclaringSyntaxReferences;
return syntaxRefs.Length > 1
|| ((TypeDeclarationSyntax)syntaxRefs.Single().GetSyntax()).Modifiers.Any(SyntaxKind.PartialKeyword);
}
protected override ISymbol GetSymbolForEdit(SemanticModel model, SyntaxNode node, EditKind editKind, Dictionary<SyntaxNode, EditKind> editMap, CancellationToken cancellationToken)
{
if (editKind == EditKind.Update)
{
if (node.IsKind(SyntaxKind.EnumDeclaration))
{
// Enum declaration update that removes/adds a trailing comma.
return null;
}
if (node.IsKind(SyntaxKind.IndexerDeclaration) || node.IsKind(SyntaxKind.PropertyDeclaration))
{
// The only legitimate update of an indexer/property declaration is an update of its expression body.
// The expression body itself may have been updated, replaced with an explicit getter, or added to replace an explicit getter.
// In any case, the update is to the property getter symbol.
var propertyOrIndexer = model.GetDeclaredSymbol(node, cancellationToken);
return ((IPropertySymbol)propertyOrIndexer).GetMethod;
}
}
if (IsGetterToExpressionBodyTransformation(editKind, node, editMap))
{
return null;
}
return model.GetDeclaredSymbol(node, cancellationToken);
}
protected override bool TryGetDeclarationBodyEdit(Edit<SyntaxNode> edit, Dictionary<SyntaxNode, EditKind> editMap, out SyntaxNode oldBody, out SyntaxNode newBody)
{
// Detect a transition between a property/indexer with an expression body and with an explicit getter.
// int P => old_body; <-> int P { get { new_body } }
// int this[args] => old_body; <-> int this[args] { get { new_body } }
// First, return getter or expression body for property/indexer update:
if (edit.Kind == EditKind.Update && (edit.OldNode.IsKind(SyntaxKind.PropertyDeclaration) || edit.OldNode.IsKind(SyntaxKind.IndexerDeclaration)))
{
oldBody = SyntaxUtilities.TryGetEffectiveGetterBody(edit.OldNode);
newBody = SyntaxUtilities.TryGetEffectiveGetterBody(edit.NewNode);
if (oldBody != null && newBody != null)
{
return true;
}
}
// Second, ignore deletion of a getter body:
if (IsGetterToExpressionBodyTransformation(edit.Kind, edit.OldNode ?? edit.NewNode, editMap))
{
oldBody = newBody = null;
return false;
}
return base.TryGetDeclarationBodyEdit(edit, editMap, out oldBody, out newBody);
}
private static bool IsGetterToExpressionBodyTransformation(EditKind editKind, SyntaxNode node, Dictionary<SyntaxNode, EditKind> editMap)
{
if ((editKind == EditKind.Insert || editKind == EditKind.Delete) && node.IsKind(SyntaxKind.GetAccessorDeclaration))
{
Debug.Assert(node.Parent.IsKind(SyntaxKind.AccessorList));
Debug.Assert(node.Parent.Parent.IsKind(SyntaxKind.PropertyDeclaration) || node.Parent.Parent.IsKind(SyntaxKind.IndexerDeclaration));
EditKind parentEdit;
return editMap.TryGetValue(node.Parent, out parentEdit) && parentEdit == editKind &&
editMap.TryGetValue(node.Parent.Parent, out parentEdit) && parentEdit == EditKind.Update;
}
return false;
}
internal override bool ContainsLambda(SyntaxNode declaration)
{
return declaration.DescendantNodes().Any(SyntaxUtilities.IsLambda);
}
internal override bool IsLambda(SyntaxNode node)
{
return SyntaxUtilities.IsLambda(node);
}
internal override bool TryGetLambdaBodies(SyntaxNode node, out SyntaxNode body1, out SyntaxNode body2)
{
return SyntaxUtilities.TryGetLambdaBodies(node, out body1, out body2);
}
#endregion
#region Diagnostic Info
protected override TextSpan GetDiagnosticSpan(SyntaxNode node, EditKind editKind)
{
return GetDiagnosticSpanImpl(node, editKind);
}
private static TextSpan GetDiagnosticSpanImpl(SyntaxNode node, EditKind editKind)
{
return GetDiagnosticSpanImpl(node.Kind(), node, editKind);
}
// internal for testing; kind is passed explicitly for testing as well
internal static TextSpan GetDiagnosticSpanImpl(SyntaxKind kind, SyntaxNode node, EditKind editKind)
{
switch (kind)
{
case SyntaxKind.CompilationUnit:
return default(TextSpan);
case SyntaxKind.GlobalStatement:
// TODO:
return default(TextSpan);
case SyntaxKind.ExternAliasDirective:
case SyntaxKind.UsingDirective:
return node.Span;
case SyntaxKind.NamespaceDeclaration:
var ns = (NamespaceDeclarationSyntax)node;
return TextSpan.FromBounds(ns.NamespaceKeyword.SpanStart, ns.Name.Span.End);
case SyntaxKind.ClassDeclaration:
case SyntaxKind.StructDeclaration:
case SyntaxKind.InterfaceDeclaration:
var typeDeclaration = (TypeDeclarationSyntax)node;
return GetDiagnosticSpan(typeDeclaration.Modifiers, typeDeclaration.Keyword,
typeDeclaration.TypeParameterList ?? (SyntaxNodeOrToken)typeDeclaration.Identifier);
case SyntaxKind.EnumDeclaration:
var enumDeclaration = (EnumDeclarationSyntax)node;
return GetDiagnosticSpan(enumDeclaration.Modifiers, enumDeclaration.EnumKeyword, enumDeclaration.Identifier);
case SyntaxKind.DelegateDeclaration:
var delegateDeclaration = (DelegateDeclarationSyntax)node;
return GetDiagnosticSpan(delegateDeclaration.Modifiers, delegateDeclaration.DelegateKeyword, delegateDeclaration.ParameterList);
case SyntaxKind.FieldDeclaration:
var fieldDeclaration = (BaseFieldDeclarationSyntax)node;
return GetDiagnosticSpan(fieldDeclaration.Modifiers, fieldDeclaration.Declaration, fieldDeclaration.Declaration);
case SyntaxKind.EventFieldDeclaration:
var eventFieldDeclaration = (EventFieldDeclarationSyntax)node;
return GetDiagnosticSpan(eventFieldDeclaration.Modifiers, eventFieldDeclaration.EventKeyword, eventFieldDeclaration.Declaration);
case SyntaxKind.VariableDeclaration:
return GetDiagnosticSpanImpl(node.Parent, editKind);
case SyntaxKind.VariableDeclarator:
return node.Span;
case SyntaxKind.MethodDeclaration:
var methodDeclaration = (MethodDeclarationSyntax)node;
return GetDiagnosticSpan(methodDeclaration.Modifiers, methodDeclaration.ReturnType, methodDeclaration.ParameterList);
case SyntaxKind.ConversionOperatorDeclaration:
var conversionOperatorDeclaration = (ConversionOperatorDeclarationSyntax)node;
return GetDiagnosticSpan(conversionOperatorDeclaration.Modifiers, conversionOperatorDeclaration.ImplicitOrExplicitKeyword, conversionOperatorDeclaration.ParameterList);
case SyntaxKind.OperatorDeclaration:
var operatorDeclaration = (OperatorDeclarationSyntax)node;
return GetDiagnosticSpan(operatorDeclaration.Modifiers, operatorDeclaration.ReturnType, operatorDeclaration.ParameterList);
case SyntaxKind.ConstructorDeclaration:
var constructorDeclaration = (ConstructorDeclarationSyntax)node;
return GetDiagnosticSpan(constructorDeclaration.Modifiers, constructorDeclaration.Identifier, constructorDeclaration.ParameterList);
case SyntaxKind.DestructorDeclaration:
var destructorDeclaration = (DestructorDeclarationSyntax)node;
return GetDiagnosticSpan(destructorDeclaration.Modifiers, destructorDeclaration.TildeToken, destructorDeclaration.ParameterList);
case SyntaxKind.PropertyDeclaration:
var propertyDeclaration = (PropertyDeclarationSyntax)node;
return GetDiagnosticSpan(propertyDeclaration.Modifiers, propertyDeclaration.Type, propertyDeclaration.Identifier);
case SyntaxKind.IndexerDeclaration:
var indexerDeclaration = (IndexerDeclarationSyntax)node;
return GetDiagnosticSpan(indexerDeclaration.Modifiers, indexerDeclaration.Type, indexerDeclaration.ParameterList);
case SyntaxKind.EventDeclaration:
var eventDeclaration = (EventDeclarationSyntax)node;
return GetDiagnosticSpan(eventDeclaration.Modifiers, eventDeclaration.EventKeyword, eventDeclaration.Identifier);
case SyntaxKind.EnumMemberDeclaration:
return node.Span;
case SyntaxKind.GetAccessorDeclaration:
case SyntaxKind.SetAccessorDeclaration:
case SyntaxKind.AddAccessorDeclaration:
case SyntaxKind.RemoveAccessorDeclaration:
case SyntaxKind.UnknownAccessorDeclaration:
var accessorDeclaration = (AccessorDeclarationSyntax)node;
return GetDiagnosticSpan(accessorDeclaration.Modifiers, accessorDeclaration.Keyword, accessorDeclaration.Keyword);
case SyntaxKind.TypeParameterConstraintClause:
var constraint = (TypeParameterConstraintClauseSyntax)node;
return TextSpan.FromBounds(constraint.WhereKeyword.SpanStart, constraint.Constraints.Last().Span.End);
case SyntaxKind.TypeParameter:
var typeParameter = (TypeParameterSyntax)node;
return typeParameter.Identifier.Span;
case SyntaxKind.AccessorList:
case SyntaxKind.TypeParameterList:
case SyntaxKind.ParameterList:
case SyntaxKind.BracketedParameterList:
if (editKind == EditKind.Delete)
{
return GetDiagnosticSpanImpl(node.Parent, editKind);
}
else
{
return node.Span;
}
case SyntaxKind.Parameter:
// We ignore anonymous methods and lambdas,
// we only care about parameters of member declarations.
var parameter = (ParameterSyntax)node;
return GetDiagnosticSpan(parameter.Modifiers, parameter.Type, parameter);
case SyntaxKind.AttributeList:
var attributeList = (AttributeListSyntax)node;
if (editKind == EditKind.Update)
{
return (attributeList.Target != null) ? attributeList.Target.Span : attributeList.Span;
}
else
{
return attributeList.Span;
}
case SyntaxKind.Attribute:
case SyntaxKind.ArrowExpressionClause:
return node.Span;
// We only need a diagnostic span if reporting an error for a child statement.
// The following statements may have child statements.
case SyntaxKind.Block:
return ((BlockSyntax)node).OpenBraceToken.Span;
case SyntaxKind.UsingStatement:
var usingStatement = (UsingStatementSyntax)node;
return TextSpan.FromBounds(usingStatement.UsingKeyword.SpanStart, usingStatement.CloseParenToken.Span.End);
case SyntaxKind.FixedStatement:
var fixedStatement = (FixedStatementSyntax)node;
return TextSpan.FromBounds(fixedStatement.FixedKeyword.SpanStart, fixedStatement.CloseParenToken.Span.End);
case SyntaxKind.LockStatement:
var lockStatement = (LockStatementSyntax)node;
return TextSpan.FromBounds(lockStatement.LockKeyword.SpanStart, lockStatement.CloseParenToken.Span.End);
case SyntaxKind.StackAllocArrayCreationExpression:
return ((StackAllocArrayCreationExpressionSyntax)node).StackAllocKeyword.Span;
case SyntaxKind.TryStatement:
return ((TryStatementSyntax)node).TryKeyword.Span;
case SyntaxKind.CatchClause:
return ((CatchClauseSyntax)node).CatchKeyword.Span;
case SyntaxKind.CatchFilterClause:
return node.Span;
case SyntaxKind.FinallyClause:
return ((FinallyClauseSyntax)node).FinallyKeyword.Span;
case SyntaxKind.IfStatement:
var ifStatement = (IfStatementSyntax)node;
return TextSpan.FromBounds(ifStatement.IfKeyword.SpanStart, ifStatement.CloseParenToken.Span.End);
case SyntaxKind.ElseClause:
return ((ElseClauseSyntax)node).ElseKeyword.Span;
case SyntaxKind.SwitchStatement:
var switchStatement = (SwitchStatementSyntax)node;
return TextSpan.FromBounds(switchStatement.SwitchKeyword.SpanStart, switchStatement.CloseParenToken.Span.End);
case SyntaxKind.SwitchSection:
return ((SwitchSectionSyntax)node).Labels.Last().Span;
case SyntaxKind.WhileStatement:
var whileStatement = (WhileStatementSyntax)node;
return TextSpan.FromBounds(whileStatement.WhileKeyword.SpanStart, whileStatement.CloseParenToken.Span.End);
case SyntaxKind.DoStatement:
return ((DoStatementSyntax)node).DoKeyword.Span;
case SyntaxKind.ForStatement:
var forStatement = (ForStatementSyntax)node;
return TextSpan.FromBounds(forStatement.ForKeyword.SpanStart, forStatement.CloseParenToken.Span.End);
case SyntaxKind.ForEachStatement:
var forEachStatement = (ForEachStatementSyntax)node;
return TextSpan.FromBounds(forEachStatement.ForEachKeyword.SpanStart, forEachStatement.CloseParenToken.Span.End);
case SyntaxKind.LabeledStatement:
return ((LabeledStatementSyntax)node).Identifier.Span;
case SyntaxKind.CheckedStatement:
case SyntaxKind.UncheckedStatement:
return ((CheckedStatementSyntax)node).Keyword.Span;
case SyntaxKind.UnsafeStatement:
return ((UnsafeStatementSyntax)node).UnsafeKeyword.Span;
case SyntaxKind.YieldBreakStatement:
case SyntaxKind.YieldReturnStatement:
case SyntaxKind.ReturnStatement:
case SyntaxKind.ThrowStatement:
case SyntaxKind.ExpressionStatement:
case SyntaxKind.LocalDeclarationStatement:
case SyntaxKind.GotoStatement:
case SyntaxKind.GotoCaseStatement:
case SyntaxKind.GotoDefaultStatement:
case SyntaxKind.BreakStatement:
case SyntaxKind.ContinueStatement:
return node.Span;
case SyntaxKind.AwaitExpression:
return ((AwaitExpressionSyntax)node).AwaitKeyword.Span;
case SyntaxKind.AnonymousObjectCreationExpression:
return ((AnonymousObjectCreationExpressionSyntax)node).NewKeyword.Span;
case SyntaxKind.ParenthesizedLambdaExpression:
return ((ParenthesizedLambdaExpressionSyntax)node).ParameterList.Span;
case SyntaxKind.SimpleLambdaExpression:
return ((SimpleLambdaExpressionSyntax)node).Parameter.Span;
case SyntaxKind.AnonymousMethodExpression:
return ((AnonymousMethodExpressionSyntax)node).DelegateKeyword.Span;
case SyntaxKind.QueryExpression:
return ((QueryExpressionSyntax)node).FromClause.FromKeyword.Span;
case SyntaxKind.FromClause:
return ((FromClauseSyntax)node).FromKeyword.Span;
case SyntaxKind.JoinClause:
return ((JoinClauseSyntax)node).JoinKeyword.Span;
case SyntaxKind.LetClause:
return ((LetClauseSyntax)node).LetKeyword.Span;
case SyntaxKind.WhereClause:
return ((WhereClauseSyntax)node).WhereKeyword.Span;
case SyntaxKind.AscendingOrdering:
case SyntaxKind.DescendingOrdering:
return node.Span;
case SyntaxKind.SelectClause:
return ((SelectClauseSyntax)node).SelectKeyword.Span;
case SyntaxKind.GroupClause:
return ((GroupClauseSyntax)node).GroupKeyword.Span;
default:
throw ExceptionUtilities.Unreachable;
}
}
private static TextSpan GetDiagnosticSpan(SyntaxTokenList modifiers, SyntaxNodeOrToken start, SyntaxNodeOrToken end)
{
return TextSpan.FromBounds((modifiers.Count != 0) ? modifiers.First().SpanStart : start.SpanStart, end.Span.End);
}
protected override string GetTopLevelDisplayName(SyntaxNode node, EditKind editKind)
{
return GetTopLevelDisplayNameImpl(node, editKind);
}
protected override string GetStatementDisplayName(SyntaxNode node, EditKind editKind)
{
return GetStatementDisplayNameImpl(node);
}
protected override string GetLambdaDisplayName(SyntaxNode lambda)
{
return GetStatementDisplayNameImpl(lambda);
}
// internal for testing
internal static string GetTopLevelDisplayNameImpl(SyntaxNode node, EditKind editKind)
{
switch (node.Kind())
{
case SyntaxKind.GlobalStatement:
return "global statement";
case SyntaxKind.ExternAliasDirective:
return "using namespace";
case SyntaxKind.UsingDirective:
// Dev12 distinguishes using alias from using namespace and reports different errors for removing alias.
// None of these changes are allowed anyways, so let's keep it simple.
return "using directive";
case SyntaxKind.NamespaceDeclaration:
return "namespace";
case SyntaxKind.ClassDeclaration:
return "class";
case SyntaxKind.StructDeclaration:
return "struct";
case SyntaxKind.InterfaceDeclaration:
return "interface";
case SyntaxKind.EnumDeclaration:
return "enum";
case SyntaxKind.DelegateDeclaration:
return "delegate";
case SyntaxKind.FieldDeclaration:
return "field";
case SyntaxKind.EventFieldDeclaration:
return "event field";
case SyntaxKind.VariableDeclaration:
case SyntaxKind.VariableDeclarator:
return GetTopLevelDisplayNameImpl(node.Parent, editKind);
case SyntaxKind.MethodDeclaration:
return "method";
case SyntaxKind.ConversionOperatorDeclaration:
return "conversion operator";
case SyntaxKind.OperatorDeclaration:
return "operator";
case SyntaxKind.ConstructorDeclaration:
return "constructor";
case SyntaxKind.DestructorDeclaration:
return "destructor";
case SyntaxKind.PropertyDeclaration:
return SyntaxUtilities.HasBackingField((PropertyDeclarationSyntax)node) ? "auto-property" : "property";
case SyntaxKind.IndexerDeclaration:
return "indexer";
case SyntaxKind.EventDeclaration:
return "event";
case SyntaxKind.EnumMemberDeclaration:
return "enum value";
case SyntaxKind.GetAccessorDeclaration:
if (node.Parent.Parent.IsKind(SyntaxKind.PropertyDeclaration))
{
return "property getter";
}
else
{
Debug.Assert(node.Parent.Parent.IsKind(SyntaxKind.IndexerDeclaration));
return "indexer getter";
}
case SyntaxKind.SetAccessorDeclaration:
if (node.Parent.Parent.IsKind(SyntaxKind.PropertyDeclaration))
{
return "property setter";
}
else
{
Debug.Assert(node.Parent.Parent.IsKind(SyntaxKind.IndexerDeclaration));
return "indexer setter";
}
case SyntaxKind.AddAccessorDeclaration:
case SyntaxKind.RemoveAccessorDeclaration:
return "event accessor";
case SyntaxKind.TypeParameterConstraintClause:
return "type constraint";
case SyntaxKind.TypeParameterList:
case SyntaxKind.TypeParameter:
return "type parameter";
case SyntaxKind.Parameter:
return "parameter";
case SyntaxKind.AttributeList:
return (editKind == EditKind.Update) ? "attribute target" : "attribute";
case SyntaxKind.Attribute:
return "attribute";
default:
throw ExceptionUtilities.Unreachable;
}
}
// internal for testing
internal static string GetStatementDisplayNameImpl(SyntaxNode node)
{
switch (node.Kind())
{
case SyntaxKind.TryStatement:
return "try block";
case SyntaxKind.CatchClause:
return "catch clause";
case SyntaxKind.CatchFilterClause:
return "filter clause";
case SyntaxKind.FinallyClause:
return "finally clause";
case SyntaxKind.FixedStatement:
return "fixed statement";
case SyntaxKind.UsingStatement:
return "using statement";
case SyntaxKind.LockStatement:
return "lock statement";
case SyntaxKind.ForEachStatement:
return "foreach statement";
case SyntaxKind.CheckedStatement:
return "checked statement";
case SyntaxKind.UncheckedStatement:
return "unchecked statement";
case SyntaxKind.YieldBreakStatement:
case SyntaxKind.YieldReturnStatement:
return "yield statement";
case SyntaxKind.AwaitExpression:
return "await expression";
case SyntaxKind.ParenthesizedLambdaExpression:
case SyntaxKind.SimpleLambdaExpression:
return "lambda";
case SyntaxKind.AnonymousMethodExpression:
return "anonymous method";
case SyntaxKind.FromClause:
return "from clause";
case SyntaxKind.JoinClause:
return "join clause";
case SyntaxKind.LetClause:
return "let clause";
case SyntaxKind.WhereClause:
return "where clause";
case SyntaxKind.AscendingOrdering:
case SyntaxKind.DescendingOrdering:
return "orderby clause";
case SyntaxKind.SelectClause:
return "select clause";
case SyntaxKind.GroupClause:
return "groupby clause";
default:
throw ExceptionUtilities.Unreachable;
}
}
#endregion
#region Top-Level Syntactic Rude Edits
private struct EditClassifier
{
private readonly CSharpEditAndContinueAnalyzer _analyzer;
private readonly List<RudeEditDiagnostic> _diagnostics;
private readonly Match<SyntaxNode> _match;
private readonly SyntaxNode _oldNode;
private readonly SyntaxNode _newNode;
private readonly EditKind _kind;
private readonly TextSpan? _span;
public EditClassifier(
CSharpEditAndContinueAnalyzer analyzer,
List<RudeEditDiagnostic> diagnostics,
SyntaxNode oldNode,
SyntaxNode newNode,
EditKind kind,
Match<SyntaxNode> match = null,
TextSpan? span = null)
{
_analyzer = analyzer;
_diagnostics = diagnostics;
_oldNode = oldNode;
_newNode = newNode;
_kind = kind;
_span = span;
_match = match;
}
private void ReportError(RudeEditKind kind, SyntaxNode spanNode = null, SyntaxNode displayNode = null)
{
var span = (spanNode != null) ? GetDiagnosticSpanImpl(spanNode, _kind) : GetSpan();
var node = displayNode ?? _newNode ?? _oldNode;
var displayName = (displayNode != null) ? GetTopLevelDisplayNameImpl(displayNode, _kind) : GetDisplayName();
_diagnostics.Add(new RudeEditDiagnostic(kind, span, node, arguments: new[] { displayName }));
}
private string GetDisplayName()
{
return GetTopLevelDisplayNameImpl(_newNode ?? _oldNode, _kind);
}
private TextSpan GetSpan()
{
if (_span.HasValue)
{
return _span.Value;
}
if (_newNode == null)
{
return _analyzer.GetDeletedNodeDiagnosticSpan(_match, _oldNode);
}
else
{
return GetDiagnosticSpanImpl(_newNode, _kind);
}
}
public void ClassifyEdit()
{
switch (_kind)
{
case EditKind.Delete:
ClassifyDelete(_oldNode);
return;
case EditKind.Update:
ClassifyUpdate(_oldNode, _newNode);
return;
case EditKind.Move:
ClassifyMove(_oldNode, _newNode);
return;
case EditKind.Insert:
ClassifyInsert(_newNode);
return;
case EditKind.Reorder:
ClassifyReorder(_oldNode, _newNode);
return;
default:
throw ExceptionUtilities.Unreachable;
}
}
#region Move and Reorder
private void ClassifyMove(SyntaxNode oldNode, SyntaxNode newNode)
{
// We could perhaps allow moving a type declaration to a different namespace syntax node
// as long as it represents semantically the same namespace as the one of the original type declaration.
ReportError(RudeEditKind.Move);
}
private void ClassifyReorder(SyntaxNode oldNode, SyntaxNode newNode)
{
switch (newNode.Kind())
{
case SyntaxKind.GlobalStatement:
// TODO:
ReportError(RudeEditKind.Move);
return;
case SyntaxKind.ExternAliasDirective:
case SyntaxKind.UsingDirective:
case SyntaxKind.NamespaceDeclaration:
case SyntaxKind.ClassDeclaration:
case SyntaxKind.StructDeclaration:
case SyntaxKind.InterfaceDeclaration:
case SyntaxKind.EnumDeclaration:
case SyntaxKind.DelegateDeclaration:
case SyntaxKind.VariableDeclaration:
case SyntaxKind.MethodDeclaration:
case SyntaxKind.ConversionOperatorDeclaration:
case SyntaxKind.OperatorDeclaration:
case SyntaxKind.ConstructorDeclaration:
case SyntaxKind.DestructorDeclaration:
case SyntaxKind.IndexerDeclaration:
case SyntaxKind.EventDeclaration:
case SyntaxKind.GetAccessorDeclaration:
case SyntaxKind.SetAccessorDeclaration:
case SyntaxKind.AddAccessorDeclaration:
case SyntaxKind.RemoveAccessorDeclaration:
case SyntaxKind.TypeParameterConstraintClause:
case SyntaxKind.AttributeList:
case SyntaxKind.Attribute:
// We'll ignore these edits. A general policy is to ignore edits that are only discoverable via reflection.
return;
case SyntaxKind.PropertyDeclaration:
case SyntaxKind.FieldDeclaration:
case SyntaxKind.EventFieldDeclaration:
case SyntaxKind.VariableDeclarator:
// Maybe we could allow changing order of field declarations unless the containing type layout is sequential.
ReportError(RudeEditKind.Move);
return;
case SyntaxKind.EnumMemberDeclaration:
// To allow this change we would need to check that values of all fields of the enum
// are preserved, or make sure we can update all method bodies that accessed those that changed.
ReportError(RudeEditKind.Move);
return;
case SyntaxKind.TypeParameter:
case SyntaxKind.Parameter:
ReportError(RudeEditKind.Move);
return;
default:
throw ExceptionUtilities.Unreachable;
}
}
#endregion
#region Insert
private void ClassifyInsert(SyntaxNode node)
{
switch (node.Kind())
{
case SyntaxKind.GlobalStatement:
// TODO:
ReportError(RudeEditKind.Insert);
return;
case SyntaxKind.ExternAliasDirective:
case SyntaxKind.UsingDirective:
case SyntaxKind.NamespaceDeclaration:
case SyntaxKind.DestructorDeclaration:
ReportError(RudeEditKind.Insert);
return;
case SyntaxKind.ClassDeclaration:
case SyntaxKind.StructDeclaration:
ClassifyTypeWithPossibleExternMembersInsert((TypeDeclarationSyntax)node);
return;
case SyntaxKind.InterfaceDeclaration:
case SyntaxKind.EnumDeclaration:
case SyntaxKind.DelegateDeclaration:
return;
case SyntaxKind.PropertyDeclaration:
case SyntaxKind.IndexerDeclaration:
case SyntaxKind.EventDeclaration:
ClassifyModifiedMemberInsert(((BasePropertyDeclarationSyntax)node).Modifiers);
return;
case SyntaxKind.ConversionOperatorDeclaration:
case SyntaxKind.OperatorDeclaration:
ReportError(RudeEditKind.InsertOperator);
return;
case SyntaxKind.MethodDeclaration:
ClassifyMethodInsert((MethodDeclarationSyntax)node);
return;
case SyntaxKind.ConstructorDeclaration:
// Allow adding parameterless constructor.
// Semantic analysis will determine if it's an actual addition or
// just an update of an existing implicit constructor.
if (SyntaxUtilities.IsParameterlessConstructor(node))
{
return;
}
ClassifyModifiedMemberInsert(((BaseMethodDeclarationSyntax)node).Modifiers);
return;
case SyntaxKind.GetAccessorDeclaration:
case SyntaxKind.SetAccessorDeclaration:
case SyntaxKind.AddAccessorDeclaration:
case SyntaxKind.RemoveAccessorDeclaration:
ClassifyAccessorInsert((AccessorDeclarationSyntax)node);
return;
case SyntaxKind.AccessorList:
// an error will be reported for each accessor
return;
case SyntaxKind.FieldDeclaration:
case SyntaxKind.EventFieldDeclaration:
// allowed: private fields in classes
ClassifyFieldInsert((BaseFieldDeclarationSyntax)node);
return;
case SyntaxKind.VariableDeclarator:
// allowed: private fields in classes
ClassifyFieldInsert((VariableDeclaratorSyntax)node);
return;
case SyntaxKind.VariableDeclaration:
// allowed: private fields in classes
ClassifyFieldInsert((VariableDeclarationSyntax)node);
return;
case SyntaxKind.EnumMemberDeclaration:
case SyntaxKind.TypeParameter:
case SyntaxKind.TypeParameterConstraintClause:
case SyntaxKind.TypeParameterList:
case SyntaxKind.Parameter:
case SyntaxKind.Attribute:
case SyntaxKind.AttributeList:
ReportError(RudeEditKind.Insert);
return;
default:
throw ExceptionUtilities.UnexpectedValue(node.Kind());
}
}
private bool ClassifyModifiedMemberInsert(SyntaxTokenList modifiers)
{
if (modifiers.Any(SyntaxKind.ExternKeyword))
{
ReportError(RudeEditKind.InsertExtern);
return false;
}
if (modifiers.Any(SyntaxKind.VirtualKeyword) || modifiers.Any(SyntaxKind.AbstractKeyword) || modifiers.Any(SyntaxKind.OverrideKeyword))
{
ReportError(RudeEditKind.InsertVirtual);
return false;
}
return true;
}
private void ClassifyTypeWithPossibleExternMembersInsert(TypeDeclarationSyntax type)
{
// extern members are not allowed, even in a new type
foreach (var member in type.Members)
{
var modifiers = default(SyntaxTokenList);
switch (member.Kind())
{
case SyntaxKind.PropertyDeclaration:
case SyntaxKind.IndexerDeclaration:
case SyntaxKind.EventDeclaration:
modifiers = ((BasePropertyDeclarationSyntax)member).Modifiers;
break;
case SyntaxKind.ConversionOperatorDeclaration:
case SyntaxKind.OperatorDeclaration:
case SyntaxKind.MethodDeclaration:
case SyntaxKind.ConstructorDeclaration:
modifiers = ((BaseMethodDeclarationSyntax)member).Modifiers;
break;
}
if (modifiers.Any(SyntaxKind.ExternKeyword))
{
ReportError(RudeEditKind.InsertExtern, member, member);
}
}
}
private void ClassifyMethodInsert(MethodDeclarationSyntax method)
{
ClassifyModifiedMemberInsert(method.Modifiers);
if (method.Arity > 0)
{
ReportError(RudeEditKind.InsertGenericMethod);
}
}
private void ClassifyAccessorInsert(AccessorDeclarationSyntax accessor)
{
var baseProperty = (BasePropertyDeclarationSyntax)accessor.Parent.Parent;
ClassifyModifiedMemberInsert(baseProperty.Modifiers);
}
private void ClassifyFieldInsert(BaseFieldDeclarationSyntax field)
{
ClassifyModifiedMemberInsert(field.Modifiers);
}
private void ClassifyFieldInsert(VariableDeclaratorSyntax fieldVariable)
{
ClassifyFieldInsert((VariableDeclarationSyntax)fieldVariable.Parent);
}
private void ClassifyFieldInsert(VariableDeclarationSyntax fieldVariable)
{
ClassifyFieldInsert((BaseFieldDeclarationSyntax)fieldVariable.Parent);
}
#endregion
#region Delete
private void ClassifyDelete(SyntaxNode oldNode)
{
switch (oldNode.Kind())
{
case SyntaxKind.GlobalStatement:
// TODO:
ReportError(RudeEditKind.Delete);
return;
case SyntaxKind.ExternAliasDirective:
case SyntaxKind.UsingDirective:
case SyntaxKind.NamespaceDeclaration:
case SyntaxKind.ClassDeclaration:
case SyntaxKind.StructDeclaration:
case SyntaxKind.InterfaceDeclaration:
case SyntaxKind.EnumDeclaration:
case SyntaxKind.DelegateDeclaration:
case SyntaxKind.MethodDeclaration:
case SyntaxKind.ConversionOperatorDeclaration:
case SyntaxKind.OperatorDeclaration:
case SyntaxKind.DestructorDeclaration:
case SyntaxKind.PropertyDeclaration:
case SyntaxKind.IndexerDeclaration:
case SyntaxKind.EventDeclaration:
case SyntaxKind.FieldDeclaration:
case SyntaxKind.EventFieldDeclaration:
case SyntaxKind.VariableDeclarator:
case SyntaxKind.VariableDeclaration:
// To allow removal of declarations we would need to update method bodies that
// were previously binding to them but now are binding to another symbol that was previously hidden.
ReportError(RudeEditKind.Delete);
return;
case SyntaxKind.ConstructorDeclaration:
// Allow deletion of a parameterless constructor.
// Semantic analysis reports an error if the parameterless ctor isn't replaced by a default ctor.
if (!SyntaxUtilities.IsParameterlessConstructor(oldNode))
{
ReportError(RudeEditKind.Delete);
}
return;
case SyntaxKind.GetAccessorDeclaration:
case SyntaxKind.SetAccessorDeclaration:
case SyntaxKind.AddAccessorDeclaration:
case SyntaxKind.RemoveAccessorDeclaration:
// An accessor can be removed. Accessors are not hiding other symbols.
// If the new compilation still uses the removed accessor a semantic error will be reported.
// For simplicity though we disallow deletion of accessors for now.
// The compiler would need to remember that the accessor has been deleted,
// so that its addition back is interpreted as an update.
// Additional issues might involve changing accessibility of the accessor.
ReportError(RudeEditKind.Delete);
return;
case SyntaxKind.AccessorList:
Debug.Assert(
oldNode.Parent.IsKind(SyntaxKind.PropertyDeclaration) ||
oldNode.Parent.IsKind(SyntaxKind.IndexerDeclaration));
var accessorList = (AccessorListSyntax)oldNode;
var setter = accessorList.Accessors.FirstOrDefault(a => a.IsKind(SyntaxKind.SetAccessorDeclaration));
if (setter != null)
{
ReportError(RudeEditKind.Delete, accessorList.Parent, setter);
}
return;
case SyntaxKind.AttributeList:
case SyntaxKind.Attribute:
// To allow removal of attributes we would need to check if the removed attribute
// is a pseudo-custom attribute that CLR allows us to change, or if it is a compiler well-know attribute
// that affects the generated IL.
ReportError(RudeEditKind.Delete);
return;
case SyntaxKind.EnumMemberDeclaration:
// We could allow removing enum member if it didn't affect the values of other enum members.
// If the updated compilation binds without errors it means that the enum value wasn't used.
ReportError(RudeEditKind.Delete);
return;
case SyntaxKind.TypeParameter:
case SyntaxKind.TypeParameterList:
case SyntaxKind.Parameter:
case SyntaxKind.ParameterList:
case SyntaxKind.TypeParameterConstraintClause:
ReportError(RudeEditKind.Delete);
return;
default:
throw ExceptionUtilities.UnexpectedValue(oldNode.Kind());
}
}
#endregion
#region Update
private void ClassifyUpdate(SyntaxNode oldNode, SyntaxNode newNode)
{
switch (newNode.Kind())
{
case SyntaxKind.GlobalStatement:
ReportError(RudeEditKind.Update);
return;
case SyntaxKind.ExternAliasDirective:
ReportError(RudeEditKind.Update);
return;
case SyntaxKind.UsingDirective:
// Dev12 distinguishes using alias from using namespace and reports different errors for removing alias.
// None of these changes are allowed anyways, so let's keep it simple.
ReportError(RudeEditKind.Update);
return;
case SyntaxKind.NamespaceDeclaration:
ClassifyUpdate((NamespaceDeclarationSyntax)oldNode, (NamespaceDeclarationSyntax)newNode);
return;
case SyntaxKind.ClassDeclaration:
case SyntaxKind.StructDeclaration:
case SyntaxKind.InterfaceDeclaration:
ClassifyUpdate((TypeDeclarationSyntax)oldNode, (TypeDeclarationSyntax)newNode);
return;
case SyntaxKind.EnumDeclaration:
ClassifyUpdate((EnumDeclarationSyntax)oldNode, (EnumDeclarationSyntax)newNode);
return;
case SyntaxKind.DelegateDeclaration:
ClassifyUpdate((DelegateDeclarationSyntax)oldNode, (DelegateDeclarationSyntax)newNode);
return;
case SyntaxKind.FieldDeclaration:
ClassifyUpdate((BaseFieldDeclarationSyntax)oldNode, (BaseFieldDeclarationSyntax)newNode);
return;
case SyntaxKind.EventFieldDeclaration:
ClassifyUpdate((BaseFieldDeclarationSyntax)oldNode, (BaseFieldDeclarationSyntax)newNode);
return;
case SyntaxKind.VariableDeclaration:
ClassifyUpdate((VariableDeclarationSyntax)oldNode, (VariableDeclarationSyntax)newNode);
return;
case SyntaxKind.VariableDeclarator:
ClassifyUpdate((VariableDeclaratorSyntax)oldNode, (VariableDeclaratorSyntax)newNode);
return;
case SyntaxKind.MethodDeclaration:
ClassifyUpdate((MethodDeclarationSyntax)oldNode, (MethodDeclarationSyntax)newNode);
return;
case SyntaxKind.ConversionOperatorDeclaration:
ClassifyUpdate((ConversionOperatorDeclarationSyntax)oldNode, (ConversionOperatorDeclarationSyntax)newNode);
return;
case SyntaxKind.OperatorDeclaration:
ClassifyUpdate((OperatorDeclarationSyntax)oldNode, (OperatorDeclarationSyntax)newNode);
return;
case SyntaxKind.ConstructorDeclaration:
ClassifyUpdate((ConstructorDeclarationSyntax)oldNode, (ConstructorDeclarationSyntax)newNode);
return;
case SyntaxKind.DestructorDeclaration:
ClassifyUpdate((DestructorDeclarationSyntax)oldNode, (DestructorDeclarationSyntax)newNode);
return;
case SyntaxKind.PropertyDeclaration:
ClassifyUpdate((PropertyDeclarationSyntax)oldNode, (PropertyDeclarationSyntax)newNode);
return;
case SyntaxKind.IndexerDeclaration:
ClassifyUpdate((IndexerDeclarationSyntax)oldNode, (IndexerDeclarationSyntax)newNode);
return;
case SyntaxKind.EventDeclaration:
return;
case SyntaxKind.EnumMemberDeclaration:
ClassifyUpdate((EnumMemberDeclarationSyntax)oldNode, (EnumMemberDeclarationSyntax)newNode);
return;
case SyntaxKind.GetAccessorDeclaration:
case SyntaxKind.SetAccessorDeclaration:
case SyntaxKind.AddAccessorDeclaration:
case SyntaxKind.RemoveAccessorDeclaration:
ClassifyUpdate((AccessorDeclarationSyntax)oldNode, (AccessorDeclarationSyntax)newNode);
return;
case SyntaxKind.TypeParameterConstraintClause:
ClassifyUpdate((TypeParameterConstraintClauseSyntax)oldNode, (TypeParameterConstraintClauseSyntax)newNode);
return;
case SyntaxKind.TypeParameter:
ClassifyUpdate((TypeParameterSyntax)oldNode, (TypeParameterSyntax)newNode);
return;
case SyntaxKind.Parameter:
ClassifyUpdate((ParameterSyntax)oldNode, (ParameterSyntax)newNode);
return;
case SyntaxKind.AttributeList:
ClassifyUpdate((AttributeListSyntax)oldNode, (AttributeListSyntax)newNode);
return;
case SyntaxKind.Attribute:
// Dev12 reports "Rename" if the attribute type name is changed.
// But such update is actually not renaming the attribute, it's changing what attribute is applied.
ReportError(RudeEditKind.Update);
return;
case SyntaxKind.TypeParameterList:
case SyntaxKind.ParameterList:
case SyntaxKind.BracketedParameterList:
case SyntaxKind.AccessorList:
return;
default:
throw ExceptionUtilities.Unreachable;
}
}
private void ClassifyUpdate(NamespaceDeclarationSyntax oldNode, NamespaceDeclarationSyntax newNode)
{
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.Name, newNode.Name));
ReportError(RudeEditKind.Renamed);
}
private void ClassifyUpdate(TypeDeclarationSyntax oldNode, TypeDeclarationSyntax newNode)
{
if (oldNode.Kind() != newNode.Kind())
{
ReportError(RudeEditKind.TypeKindUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.Identifier, newNode.Identifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.BaseList, newNode.BaseList));
ReportError(RudeEditKind.BaseTypeOrInterfaceUpdate);
}
private void ClassifyUpdate(EnumDeclarationSyntax oldNode, EnumDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Identifier, newNode.Identifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.BaseList, newNode.BaseList))
{
ReportError(RudeEditKind.EnumUnderlyingTypeUpdate);
return;
}
// The list of members has been updated (separators added).
// We report a Rude Edit for each updated member.
}
private void ClassifyUpdate(DelegateDeclarationSyntax oldNode, DelegateDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.ReturnType, newNode.ReturnType))
{
ReportError(RudeEditKind.TypeUpdate);
return;
}
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.Identifier, newNode.Identifier));
ReportError(RudeEditKind.Renamed);
}
private void ClassifyUpdate(BaseFieldDeclarationSyntax oldNode, BaseFieldDeclarationSyntax newNode)
{
if (oldNode.Kind() != newNode.Kind())
{
ReportError(RudeEditKind.FieldKindUpdate);
return;
}
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers));
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
private void ClassifyUpdate(VariableDeclarationSyntax oldNode, VariableDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Type, newNode.Type))
{
ReportError(RudeEditKind.TypeUpdate);
return;
}
// separators may be added/removed:
}
private void ClassifyUpdate(VariableDeclaratorSyntax oldNode, VariableDeclaratorSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Identifier, newNode.Identifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
// If the argument lists are mismatched the field must have mismatched "fixed" modifier,
// which is reported by the field declaration.
if ((oldNode.ArgumentList == null) == (newNode.ArgumentList == null))
{
if (!SyntaxFactory.AreEquivalent(oldNode.ArgumentList, newNode.ArgumentList))
{
ReportError(RudeEditKind.FixedSizeFieldUpdate);
return;
}
}
var typeDeclaration = (TypeDeclarationSyntax)oldNode.Parent.Parent.Parent;
if (typeDeclaration.Arity > 0)
{
ReportError(RudeEditKind.GenericTypeInitializerUpdate);
return;
}
ClassifyDeclarationBodyRudeUpdates(newNode);
}
private void ClassifyUpdate(MethodDeclarationSyntax oldNode, MethodDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Identifier, newNode.Identifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
if (!ClassifyMethodModifierUpdate(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.ReturnType, newNode.ReturnType))
{
ReportError(RudeEditKind.TypeUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.ExplicitInterfaceSpecifier, newNode.ExplicitInterfaceSpecifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
ClassifyMethodBodyRudeUpdate(
(SyntaxNode)oldNode.Body ?? oldNode.ExpressionBody?.Expression,
(SyntaxNode)newNode.Body ?? newNode.ExpressionBody?.Expression,
containingMethodOpt: newNode,
containingType: (TypeDeclarationSyntax)newNode.Parent);
}
private bool ClassifyMethodModifierUpdate(SyntaxTokenList oldModifiers, SyntaxTokenList newModifiers)
{
var oldAsyncIndex = oldModifiers.IndexOf(SyntaxKind.AsyncKeyword);
var newAsyncIndex = newModifiers.IndexOf(SyntaxKind.AsyncKeyword);
if (oldAsyncIndex >= 0)
{
oldModifiers = oldModifiers.RemoveAt(oldAsyncIndex);
}
if (newAsyncIndex >= 0)
{
newModifiers = newModifiers.RemoveAt(newAsyncIndex);
}
return SyntaxFactory.AreEquivalent(oldModifiers, newModifiers);
}
private void ClassifyUpdate(ConversionOperatorDeclarationSyntax oldNode, ConversionOperatorDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.ImplicitOrExplicitKeyword, newNode.ImplicitOrExplicitKeyword))
{
ReportError(RudeEditKind.Renamed);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.Type, newNode.Type))
{
ReportError(RudeEditKind.TypeUpdate);
return;
}
ClassifyMethodBodyRudeUpdate(
(SyntaxNode)oldNode.Body ?? oldNode.ExpressionBody?.Expression,
(SyntaxNode)newNode.Body ?? newNode.ExpressionBody?.Expression,
containingMethodOpt: null,
containingType: (TypeDeclarationSyntax)newNode.Parent);
}
private void ClassifyUpdate(OperatorDeclarationSyntax oldNode, OperatorDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.OperatorToken, newNode.OperatorToken))
{
ReportError(RudeEditKind.Renamed);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.ReturnType, newNode.ReturnType))
{
ReportError(RudeEditKind.TypeUpdate);
return;
}
ClassifyMethodBodyRudeUpdate(
(SyntaxNode)oldNode.Body ?? oldNode.ExpressionBody?.Expression,
(SyntaxNode)newNode.Body ?? newNode.ExpressionBody?.Expression,
containingMethodOpt: null,
containingType: (TypeDeclarationSyntax)newNode.Parent);
}
private void ClassifyUpdate(AccessorDeclarationSyntax oldNode, AccessorDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (oldNode.Kind() != newNode.Kind())
{
ReportError(RudeEditKind.AccessorKindUpdate);
return;
}
Debug.Assert(newNode.Parent is AccessorListSyntax);
Debug.Assert(newNode.Parent.Parent is BasePropertyDeclarationSyntax);
ClassifyMethodBodyRudeUpdate(
oldNode.Body,
newNode.Body,
containingMethodOpt: null,
containingType: (TypeDeclarationSyntax)newNode.Parent.Parent.Parent);
}
private void ClassifyUpdate(EnumMemberDeclarationSyntax oldNode, EnumMemberDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Identifier, newNode.Identifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.EqualsValue, newNode.EqualsValue));
ReportError(RudeEditKind.InitializerUpdate);
}
private void ClassifyUpdate(ConstructorDeclarationSyntax oldNode, ConstructorDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
ClassifyMethodBodyRudeUpdate(
oldNode.Body,
newNode.Body,
containingMethodOpt: null,
containingType: (TypeDeclarationSyntax)newNode.Parent);
}
private void ClassifyUpdate(DestructorDeclarationSyntax oldNode, DestructorDeclarationSyntax newNode)
{
ClassifyMethodBodyRudeUpdate(
oldNode.Body,
newNode.Body,
containingMethodOpt: null,
containingType: (TypeDeclarationSyntax)newNode.Parent);
}
private void ClassifyUpdate(PropertyDeclarationSyntax oldNode, PropertyDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.Type, newNode.Type))
{
ReportError(RudeEditKind.TypeUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.Identifier, newNode.Identifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.ExplicitInterfaceSpecifier, newNode.ExplicitInterfaceSpecifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
var containingType = (TypeDeclarationSyntax)newNode.Parent;
// TODO: We currently don't support switching from auto-props to properties with accessors and vice versa.
// If we do we should also allow it for expression bodies.
if (!SyntaxFactory.AreEquivalent(oldNode.ExpressionBody, newNode.ExpressionBody))
{
var oldBody = SyntaxUtilities.TryGetEffectiveGetterBody(oldNode.ExpressionBody, oldNode.AccessorList);
var newBody = SyntaxUtilities.TryGetEffectiveGetterBody(newNode.ExpressionBody, newNode.AccessorList);
ClassifyMethodBodyRudeUpdate(
oldBody,
newBody,
containingMethodOpt: null,
containingType: containingType);
return;
}
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.Initializer, newNode.Initializer));
if (containingType.Arity > 0)
{
ReportError(RudeEditKind.GenericTypeInitializerUpdate);
return;
}
if (newNode.Initializer != null)
{
ClassifyDeclarationBodyRudeUpdates(newNode.Initializer);
}
}
private void ClassifyUpdate(IndexerDeclarationSyntax oldNode, IndexerDeclarationSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.Type, newNode.Type))
{
ReportError(RudeEditKind.TypeUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.ExplicitInterfaceSpecifier, newNode.ExplicitInterfaceSpecifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.ExpressionBody, newNode.ExpressionBody));
var oldBody = SyntaxUtilities.TryGetEffectiveGetterBody(oldNode.ExpressionBody, oldNode.AccessorList);
var newBody = SyntaxUtilities.TryGetEffectiveGetterBody(newNode.ExpressionBody, newNode.AccessorList);
ClassifyMethodBodyRudeUpdate(
oldBody,
newBody,
containingMethodOpt: null,
containingType: (TypeDeclarationSyntax)newNode.Parent);
}
private void ClassifyUpdate(TypeParameterSyntax oldNode, TypeParameterSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Identifier, newNode.Identifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.VarianceKeyword, newNode.VarianceKeyword));
ReportError(RudeEditKind.VarianceUpdate);
}
private void ClassifyUpdate(TypeParameterConstraintClauseSyntax oldNode, TypeParameterConstraintClauseSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Name, newNode.Name))
{
ReportError(RudeEditKind.Renamed);
return;
}
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.Constraints, newNode.Constraints));
ReportError(RudeEditKind.TypeUpdate);
}
private void ClassifyUpdate(ParameterSyntax oldNode, ParameterSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Identifier, newNode.Identifier))
{
ReportError(RudeEditKind.Renamed);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.Modifiers, newNode.Modifiers))
{
ReportError(RudeEditKind.ModifiersUpdate);
return;
}
if (!SyntaxFactory.AreEquivalent(oldNode.Type, newNode.Type))
{
ReportError(RudeEditKind.TypeUpdate);
return;
}
Debug.Assert(!SyntaxFactory.AreEquivalent(oldNode.Default, newNode.Default));
ReportError(RudeEditKind.InitializerUpdate);
}
private void ClassifyUpdate(AttributeListSyntax oldNode, AttributeListSyntax newNode)
{
if (!SyntaxFactory.AreEquivalent(oldNode.Target, newNode.Target))
{
ReportError(RudeEditKind.Update);
return;
}
// changes in attribute separators are not interesting:
}
private void ClassifyMethodBodyRudeUpdate(
SyntaxNode oldBody,
SyntaxNode newBody,
MethodDeclarationSyntax containingMethodOpt,
TypeDeclarationSyntax containingType)
{
Debug.Assert(oldBody is BlockSyntax || oldBody is ExpressionSyntax || oldBody == null);
Debug.Assert(newBody is BlockSyntax || newBody is ExpressionSyntax || newBody == null);
if ((oldBody == null) != (newBody == null))
{
if (oldBody == null)
{
ReportError(RudeEditKind.MethodBodyAdd);
return;
}
else
{
ReportError(RudeEditKind.MethodBodyDelete);
return;
}
}
ClassifyMemberBodyRudeUpdate(containingMethodOpt, containingType, isTriviaUpdate: false);
if (newBody != null)
{
ClassifyDeclarationBodyRudeUpdates(newBody);
}
}
public void ClassifyMemberBodyRudeUpdate(
MethodDeclarationSyntax containingMethodOpt,
TypeDeclarationSyntax containingTypeOpt,
bool isTriviaUpdate)
{
if (SyntaxUtilities.Any(containingMethodOpt?.TypeParameterList))
{
ReportError(isTriviaUpdate ? RudeEditKind.GenericMethodTriviaUpdate : RudeEditKind.GenericMethodUpdate);
return;
}
if (SyntaxUtilities.Any(containingTypeOpt?.TypeParameterList))
{
ReportError(isTriviaUpdate ? RudeEditKind.GenericTypeTriviaUpdate : RudeEditKind.GenericTypeUpdate);
return;
}
}
public void ClassifyDeclarationBodyRudeUpdates(SyntaxNode newDeclarationOrBody)
{
foreach (var node in newDeclarationOrBody.DescendantNodesAndSelf(ChildrenCompiledInBody))
{
switch (node.Kind())
{
case SyntaxKind.StackAllocArrayCreationExpression:
ReportError(RudeEditKind.StackAllocUpdate, node, _newNode);
return;
case SyntaxKind.ParenthesizedLambdaExpression:
case SyntaxKind.SimpleLambdaExpression:
// TODO (tomat): allow
ReportError(RudeEditKind.RUDE_EDIT_LAMBDA_EXPRESSION, node, _newNode);
return;
case SyntaxKind.AnonymousMethodExpression:
// TODO (tomat): allow
ReportError(RudeEditKind.RUDE_EDIT_ANON_METHOD, node, _newNode);
return;
case SyntaxKind.QueryExpression:
// TODO (tomat): allow
ReportError(RudeEditKind.RUDE_EDIT_QUERY_EXPRESSION, node, _newNode);
return;
case SyntaxKind.AnonymousObjectCreationExpression:
// TODO (tomat): allow
ReportError(RudeEditKind.RUDE_EDIT_ANONYMOUS_TYPE, node, _newNode);
return;
}
}
}
private static bool ChildrenCompiledInBody(SyntaxNode node)
{
return node.Kind() != SyntaxKind.ParenthesizedLambdaExpression
&& node.Kind() != SyntaxKind.SimpleLambdaExpression;
}
#endregion
}
internal override void ReportSyntacticRudeEdits(
List<RudeEditDiagnostic> diagnostics,
Match<SyntaxNode> match,
Edit<SyntaxNode> edit,
Dictionary<SyntaxNode, EditKind> editMap)
{
if (HasParentEdit(editMap, edit))
{
return;
}
var classifier = new EditClassifier(this, diagnostics, edit.OldNode, edit.NewNode, edit.Kind, match);
classifier.ClassifyEdit();
}
internal override void ReportMemberUpdateRudeEdits(List<RudeEditDiagnostic> diagnostics, SyntaxNode newMember, TextSpan? span)
{
var classifier = new EditClassifier(this, diagnostics, null, newMember, EditKind.Update, span: span);
classifier.ClassifyMemberBodyRudeUpdate(
newMember as MethodDeclarationSyntax,
newMember.FirstAncestorOrSelf<TypeDeclarationSyntax>(),
isTriviaUpdate: true);
classifier.ClassifyDeclarationBodyRudeUpdates(newMember);
}
#endregion
#region Semantic Rude Edits
internal override void ReportInsertedMemberSymbolRudeEdits(List<RudeEditDiagnostic> diagnostics, ISymbol newSymbol)
{
// We rejected all exported methods during syntax analysis, so no additional work is needed here.
}
#endregion
#region Exception Handling Rude Edits
protected override List<SyntaxNode> GetExceptionHandlingAncestors(SyntaxNode node, bool isLeaf)
{
var result = new List<SyntaxNode>();
while (node != null)
{
var kind = node.Kind();
switch (kind)
{
case SyntaxKind.TryStatement:
if (!isLeaf)
{
result.Add(node);
}
break;
case SyntaxKind.CatchClause:
case SyntaxKind.FinallyClause:
result.Add(node);
// skip try:
Debug.Assert(node.Parent.Kind() == SyntaxKind.TryStatement);
node = node.Parent;
break;
// stop at type declaration:
case SyntaxKind.ClassDeclaration:
case SyntaxKind.StructDeclaration:
return result;
}
// stop at lambda:
if (SyntaxUtilities.IsLambda(kind))
{
return result;
}
node = node.Parent;
}
return result;
}
internal override void ReportEnclosingExceptionHandlingRudeEdits(
List<RudeEditDiagnostic> diagnostics,
IEnumerable<Edit<SyntaxNode>> exceptionHandlingEdits,
SyntaxNode oldStatement,
SyntaxNode newStatement)
{
foreach (var edit in exceptionHandlingEdits)
{
// try/catch/finally have distinct labels so only the nodes of the same kind may match:
Debug.Assert(edit.Kind != EditKind.Update || edit.OldNode.RawKind == edit.NewNode.RawKind);
if (edit.Kind != EditKind.Update || !AreExceptionClausesEquivalent(edit.OldNode, edit.NewNode))
{
AddRudeDiagnostic(diagnostics, edit.OldNode, edit.NewNode, newStatement);
}
}
}
private static bool AreExceptionClausesEquivalent(SyntaxNode oldNode, SyntaxNode newNode)
{
switch (oldNode.Kind())
{
case SyntaxKind.TryStatement:
var oldTryStatement = (TryStatementSyntax)oldNode;
var newTryStatement = (TryStatementSyntax)newNode;
return SyntaxFactory.AreEquivalent(oldTryStatement.Finally, newTryStatement.Finally)
&& SyntaxFactory.AreEquivalent(oldTryStatement.Catches, newTryStatement.Catches);
case SyntaxKind.CatchClause:
case SyntaxKind.FinallyClause:
return SyntaxFactory.AreEquivalent(oldNode, newNode);
default:
throw ExceptionUtilities.UnexpectedValue(oldNode.Kind());
}
}
/// <summary>
/// An active statement (leaf or not) inside a "catch" makes the catch block read-only.
/// An active statement (leaf or not) inside a "finally" makes the whole try/catch/finally block read-only.
/// An active statement (non leaf) inside a "try" makes the catch/finally block read-only.
/// </summary>
protected override TextSpan GetExceptionHandlingRegion(SyntaxNode node, out bool coversAllChildren)
{
TryStatementSyntax tryStatement;
switch (node.Kind())
{
case SyntaxKind.TryStatement:
tryStatement = (TryStatementSyntax)node;
coversAllChildren = false;
if (tryStatement.Catches.Count == 0)
{
Debug.Assert(tryStatement.Finally != null);
return tryStatement.Finally.Span;
}
return TextSpan.FromBounds(
tryStatement.Catches.First().SpanStart,
(tryStatement.Finally != null) ?
tryStatement.Finally.Span.End :
tryStatement.Catches.Last().Span.End);
case SyntaxKind.CatchClause:
coversAllChildren = true;
return node.Span;
case SyntaxKind.FinallyClause:
coversAllChildren = true;
tryStatement = (TryStatementSyntax)node.Parent;
return tryStatement.Span;
default:
throw ExceptionUtilities.UnexpectedValue(node.Kind());
}
}
#endregion
#region State Machines
internal override bool IsStateMachineMethod(SyntaxNode declaration)
{
return SyntaxUtilities.IsAsyncMethodOrLambda(declaration) ||
SyntaxUtilities.IsIteratorMethod(declaration);
}
protected override ImmutableArray<SyntaxNode> GetStateMachineSuspensionPoints(SyntaxNode body)
{
if (SyntaxUtilities.IsAsyncMethodOrLambda(body.Parent))
{
return SyntaxUtilities.GetAwaitExpressions(body);
}
else
{
return SyntaxUtilities.GetYieldStatements(body);
}
}
internal override void ReportStateMachineSuspensionPointRudeEdits(List<RudeEditDiagnostic> diagnostics, SyntaxNode oldNode, SyntaxNode newNode)
{
// TODO: changes around suspension points (foreach, lock, using, etc.)
if (oldNode.RawKind != newNode.RawKind)
{
Debug.Assert(oldNode is YieldStatementSyntax && newNode is YieldStatementSyntax);
// changing yield return to yield break
diagnostics.Add(new RudeEditDiagnostic(
RudeEditKind.Update,
newNode.Span,
newNode,
new[] { GetStatementDisplayName(newNode, EditKind.Update) }));
}
else if (newNode.IsKind(SyntaxKind.AwaitExpression))
{
var oldContainingStatementPart = FindContainingStatementPart(oldNode);
var newContainingStatementPart = FindContainingStatementPart(newNode);
// If the old statememnt has spilled state and the new doesn't the edit is ok. We'll just not use the spilled state.
if (!SyntaxFactory.AreEquivalent(oldContainingStatementPart, newContainingStatementPart) &&
!HasNoSpilledState(newNode, newContainingStatementPart))
{
diagnostics.Add(new RudeEditDiagnostic(RudeEditKind.AwaitStatementUpdate, newContainingStatementPart.Span));
}
}
}
private static SyntaxNode FindContainingStatementPart(SyntaxNode node)
{
while (true)
{
var statement = node as StatementSyntax;
if (statement != null)
{
return statement;
}
switch (node.Parent.Kind())
{
case SyntaxKind.ForStatement:
case SyntaxKind.ForEachStatement:
case SyntaxKind.IfStatement:
case SyntaxKind.WhileStatement:
case SyntaxKind.DoStatement:
case SyntaxKind.SwitchStatement:
case SyntaxKind.LockStatement:
case SyntaxKind.UsingStatement:
case SyntaxKind.ArrowExpressionClause:
return node;
}
if (SyntaxFacts.IsLambdaBody(node))
{
return node;
}
node = node.Parent;
}
}
private static bool HasNoSpilledState(SyntaxNode awaitExpression, SyntaxNode containingStatementPart)
{
Debug.Assert(awaitExpression.IsKind(SyntaxKind.AwaitExpression));
// There is nothing within the statement part surrounding the await expression.
if (containingStatementPart == awaitExpression)
{
return true;
}
switch (containingStatementPart.Kind())
{
case SyntaxKind.ExpressionStatement:
case SyntaxKind.ReturnStatement:
var expression = GetExpressionFromStatementPart(containingStatementPart);
// await expr;
// return await expr;
if (expression == awaitExpression)
{
return true;
}
// identifier = await expr;
// return identifier = await expr;
return IsSimpleAwaitAssignment(expression, awaitExpression);
case SyntaxKind.VariableDeclaration:
// var idf = await expr in using, for, etc.
// EqualsValueClause -> VariableDeclarator -> VariableDeclaration
return awaitExpression.Parent.Parent.Parent == containingStatementPart;
case SyntaxKind.LocalDeclarationStatement:
// var idf = await expr;
// EqualsValueClause -> VariableDeclarator -> VariableDeclaration -> LocalDeclarationStatement
return awaitExpression.Parent.Parent.Parent.Parent == containingStatementPart;
}
return IsSimpleAwaitAssignment(containingStatementPart, awaitExpression);
}
private static ExpressionSyntax GetExpressionFromStatementPart(SyntaxNode statement)
{
switch (statement.Kind())
{
case SyntaxKind.ExpressionStatement:
return ((ExpressionStatementSyntax)statement).Expression;
case SyntaxKind.ReturnStatement:
return ((ReturnStatementSyntax)statement).Expression;
default:
throw ExceptionUtilities.Unreachable;
}
}
private static bool IsSimpleAwaitAssignment(SyntaxNode node, SyntaxNode awaitExpression)
{
if (node.IsKind(SyntaxKind.SimpleAssignmentExpression))
{
var assignment = (AssignmentExpressionSyntax)node;
return assignment.Left.IsKind(SyntaxKind.IdentifierName) && assignment.Right == awaitExpression;
}
return false;
}
#endregion
#region Rude Edits around Active Statement
internal override void ReportOtherRudeEditsAroundActiveStatement(
List<RudeEditDiagnostic> diagnostics,
Match<SyntaxNode> match,
SyntaxNode oldActiveStatement,
SyntaxNode newActiveStatement,
bool isLeaf)
{
ReportRudeEditsForAncestorsDeclaringInterStatementTemps(diagnostics, match, oldActiveStatement, newActiveStatement, isLeaf);
ReportRudeEditsForCheckedStatements(diagnostics, oldActiveStatement, newActiveStatement, isLeaf);
}
private void ReportRudeEditsForCheckedStatements(
List<RudeEditDiagnostic> diagnostics,
SyntaxNode oldActiveStatement,
SyntaxNode newActiveStatement,
bool isLeaf)
{
// checked context can be changed around leaf active statement:
if (isLeaf)
{
return;
}
// Changing checked context around an internal active statement may change the instructions
// executed after method calls in the active statement but before the next sequence point.
// Since the debugger remaps the IP at the first sequence point following a call instruction
// allowing overflow context to be changed may lead to execution of code with old semantics.
var oldCheckedStatement = TryGetCheckedStatementAncestor(oldActiveStatement);
var newCheckedStatement = TryGetCheckedStatementAncestor(newActiveStatement);
bool isRude;
if (oldCheckedStatement == null || newCheckedStatement == null)
{
isRude = oldCheckedStatement != newCheckedStatement;
}
else
{
isRude = oldCheckedStatement.Kind() != newCheckedStatement.Kind();
}
if (isRude)
{
AddRudeDiagnostic(diagnostics, oldCheckedStatement, newCheckedStatement, newActiveStatement);
}
}
private static CheckedStatementSyntax TryGetCheckedStatementAncestor(SyntaxNode node)
{
// Ignoring lambda boundaries since checked context flows thru.
while (node != null)
{
switch (node.Kind())
{
case SyntaxKind.CheckedStatement:
case SyntaxKind.UncheckedStatement:
return (CheckedStatementSyntax)node;
}
node = node.Parent;
}
return null;
}
private void ReportRudeEditsForAncestorsDeclaringInterStatementTemps(
List<RudeEditDiagnostic> diagnostics,
Match<SyntaxNode> match,
SyntaxNode oldActiveStatement,
SyntaxNode newActiveStatement,
bool isLeaf)
{
// Rude Edits for fixed/using/lock/foreach statements that are added/updated around an active statement.
// Although such changes are technically possible, they might lead to confusion since
// the temporary variables these statements generate won't be properly initialized.
//
// We use a simple algorithm to match each new node with its old counterpart.
// If all nodes match this algorithm is linear, otherwise it's quadratic.
//
// Unlike exception regions matching where we use LCS, we allow reordering of the statements.
ReportUnmatchedStatements<LockStatementSyntax>(diagnostics, match, (int)SyntaxKind.LockStatement, oldActiveStatement, newActiveStatement,
areEquivalent: AreEquivalentActiveStatements,
areSimilar: null);
ReportUnmatchedStatements<FixedStatementSyntax>(diagnostics, match, (int)SyntaxKind.FixedStatement, oldActiveStatement, newActiveStatement,
areEquivalent: AreEquivalentActiveStatements,
areSimilar: (n1, n2) => DeclareSameIdentifiers(n1.Declaration.Variables, n2.Declaration.Variables));
ReportUnmatchedStatements<UsingStatementSyntax>(diagnostics, match, (int)SyntaxKind.UsingStatement, oldActiveStatement, newActiveStatement,
areEquivalent: AreEquivalentActiveStatements,
areSimilar: (using1, using2) =>
{
return using1.Declaration != null && using2.Declaration != null &&
DeclareSameIdentifiers(using1.Declaration.Variables, using2.Declaration.Variables);
});
ReportUnmatchedStatements<ForEachStatementSyntax>(diagnostics, match, (int)SyntaxKind.ForEachStatement, oldActiveStatement, newActiveStatement,
areEquivalent: AreEquivalentActiveStatements,
areSimilar: (n1, n2) => SyntaxFactory.AreEquivalent(n1.Identifier, n2.Identifier));
}
private static bool DeclareSameIdentifiers(SeparatedSyntaxList<VariableDeclaratorSyntax> oldVariables, SeparatedSyntaxList<VariableDeclaratorSyntax> newVariables)
{
if (oldVariables.Count != newVariables.Count)
{
return false;
}
for (int i = 0; i < oldVariables.Count; i++)
{
if (!SyntaxFactory.AreEquivalent(oldVariables[i].Identifier, newVariables[i].Identifier))
{
return false;
}
}
return true;
}
#endregion
}
}
| apache-2.0 |
KAMP-Research/KAMP | bundles/Toometa/toometa.qualities/src/qualities/Reliability.java | 310 | /**
*/
package qualities;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>Reliability</b></em>'.
* <!-- end-user-doc -->
*
*
* @see qualities.QualitiesPackage#getReliability()
* @model
* @generated
*/
public interface Reliability extends QualityType {
} // Reliability
| apache-2.0 |
ImJasonH/tros | benchmark_test.go | 1616 | package tros
import (
"math/rand"
"sort"
"strings"
"testing"
)
const (
alphabet = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
sliceLen = 1000
)
/// Regular field comparison
type el struct {
A string
}
func randSlice() []el {
s := make([]el, sliceLen)
for i := 0; i < len(s); i++ {
s[i] = el{strings.Repeat(
string(alphabet[rand.Intn(len(alphabet))]),
rand.Intn(10))}
}
return s
}
func BenchmarkTros(b *testing.B) {
rand.Seed(1)
for i := 0; i < b.N; i++ {
Sort(randSlice(), "A")
}
}
func BenchmarkSort(b *testing.B) {
rand.Seed(1)
for i := 0; i < b.N; i++ {
sort.Sort(sort.Interface(iface(randSlice())))
}
}
type iface []el
func (s iface) Len() int { return len(s) }
func (s iface) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s iface) Less(i, j int) bool { return s[i].A < s[j].A }
/// Lesser field comparison
type el2 struct {
A lenLesserString
}
func randLesserSlice() []el2 {
s := make([]el2, sliceLen)
for i := 0; i < len(s); i++ {
s[i] = el2{lenLesserString(strings.Repeat(
string(alphabet[rand.Intn(len(alphabet))]),
rand.Intn(10)))}
}
return s
}
func BenchmarkTrosLesser(b *testing.B) {
rand.Seed(1)
for i := 0; i < b.N; i++ {
Sort(randLesserSlice(), "A")
}
}
func BenchmarkSortLesser(b *testing.B) {
rand.Seed(1)
for i := 0; i < b.N; i++ {
sort.Sort(sort.Interface(iface2(randLesserSlice())))
}
}
type iface2 []el2
func (s iface2) Len() int { return len(s) }
func (s iface2) Swap(i, j int) { s[i], s[j] = s[j], s[i] }
func (s iface2) Less(i, j int) bool { return s[i].A.Less(s[j].A) }
| apache-2.0 |
google/framework-for-osdu | compatibility-layer/common/src/main/java/com/osdu/exception/OsduNotFoundException.java | 1004 | /*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.osdu.exception;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(HttpStatus.NOT_FOUND)
public class OsduNotFoundException extends OsduException {
public OsduNotFoundException(String message) {
super(message);
}
public OsduNotFoundException(String message, Throwable cause) {
super(message, cause);
}
} | apache-2.0 |
nisrulz/screenshott | screenshott/src/main/java/github/nisrulz/screenshott/ScreenShott.java | 4354 | /*
* Copyright (C) 2016 Nishant Srivastava
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package github.nisrulz.screenshott;
import android.content.Context;
import android.graphics.Bitmap;
import android.media.MediaScannerConnection;
import android.os.Environment;
import android.view.TextureView;
import android.view.View;
import java.io.File;
import java.io.FileOutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import static android.view.View.MeasureSpec;
/**
* The type ScreenShott class.
*/
public class ScreenShott {
private static final ScreenShott ourInstance = new ScreenShott();
private ScreenShott() {
}
/**
* Gets instance.
*
* @return the instance
*/
public static ScreenShott getInstance() {
return ourInstance;
}
/**
* Take screen shot of root view.
*
* @param v
* the v
* @return the bitmap
*/
public Bitmap takeScreenShotOfRootView(View v) {
v = v.getRootView();
return takeScreenShotOfView(v);
}
/**
* Take screen shot of the View with spaces as per constraints
*
* @param v
* the v
* @return the bitmap
*/
public Bitmap takeScreenShotOfView(View v) {
v.setDrawingCacheEnabled(true);
v.buildDrawingCache(true);
// creates immutable clone
Bitmap b = Bitmap.createBitmap(v.getDrawingCache());
v.setDrawingCacheEnabled(false); // clear drawing cache
return b;
}
/**
* Take screen shot of texture view as bitmap.
*
* @param v
* the TextureView
* @return the bitmap
*/
public Bitmap takeScreenShotOfTextureView(TextureView v) {
return v.getBitmap();
}
/**
* Take screen shot of just the View without any constraints
*
* @param v
* the v
* @return the bitmap
*/
public Bitmap takeScreenShotOfJustView(View v) {
v.measure(MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED),
MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED));
v.layout(0, 0, v.getMeasuredWidth(), v.getMeasuredHeight());
return takeScreenShotOfView(v);
}
/**
* Save screenshot to pictures folder.
*
* @param context
* the context
* @param image
* the image
* @param filename
* the filename
* @return the bitmap file object
* @throws Exception
* the exception
*/
public File saveScreenshotToPicturesFolder(Context context, Bitmap image, String filename)
throws Exception {
File bitmapFile = getOutputMediaFile(filename);
if (bitmapFile == null) {
throw new NullPointerException("Error creating media file, check storage permissions!");
}
FileOutputStream fos = new FileOutputStream(bitmapFile);
image.compress(Bitmap.CompressFormat.PNG, 90, fos);
fos.close();
// Initiate media scanning to make the image available in gallery apps
MediaScannerConnection.scanFile(context, new String[] { bitmapFile.getPath() },
new String[] { "image/jpeg" }, null);
return bitmapFile;
}
private File getOutputMediaFile(String filename) {
// To be safe, you should check that the SDCard is mounted
// using Environment.getExternalStorageState() before doing this.
File mediaStorageDirectory = new File(
Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES)
+ File.separator);
// Create the storage directory if it does not exist
if (!mediaStorageDirectory.exists()) {
if (!mediaStorageDirectory.mkdirs()) {
return null;
}
}
// Create a media file name
String timeStamp = new SimpleDateFormat("ddMMyyyy_HHmmss").format(new Date());
File mediaFile;
String mImageName = filename + timeStamp + ".jpg";
mediaFile = new File(mediaStorageDirectory.getPath() + File.separator + mImageName);
return mediaFile;
}
}
| apache-2.0 |
twitter/algebird | algebird-util/src/test/scala/com/twitter/algebird/util/PromiseLinkMonoidProperties.scala | 1116 | /*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.algebird.util
import com.twitter.algebird._
import org.scalacheck.Prop._
class PromiseLinkMonoidProperties extends CheckProperties {
property("associative") {
def makeTunnel(seed: Int) = PromiseLink.toPromiseLink(seed)
def collapseFinalValues(finalTunnel: PromiseLink[Int], tunnels: Seq[PromiseLink[Int]], toFeed: Int) = {
finalTunnel.completeWithStartingValue(toFeed)
finalTunnel.promise +: tunnels.map(_.promise)
}
TunnelMonoidProperties.testTunnelMonoid(identity, makeTunnel, collapseFinalValues)
}
}
| apache-2.0 |
darranl/directory-server | xdbm-partition/src/test/java/org/apache/directory/server/xdbm/PartitionTest.java | 15646 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.xdbm;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
import java.util.Iterator;
import net.sf.ehcache.store.AbstractStore;
import org.apache.directory.api.ldap.model.constants.SchemaConstants;
import org.apache.directory.api.ldap.model.csn.CsnFactory;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.DefaultAttribute;
import org.apache.directory.api.ldap.model.entry.DefaultEntry;
import org.apache.directory.api.ldap.model.entry.DefaultModification;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Modification;
import org.apache.directory.api.ldap.model.entry.ModificationOperation;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.api.ldap.model.name.Rdn;
import org.apache.directory.api.ldap.model.schema.AttributeType;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.api.ldap.schemaextractor.SchemaLdifExtractor;
import org.apache.directory.api.ldap.schemaextractor.impl.DefaultSchemaLdifExtractor;
import org.apache.directory.api.ldap.schemaloader.LdifSchemaLoader;
import org.apache.directory.api.ldap.schemamanager.impl.DefaultSchemaManager;
import org.apache.directory.api.util.exception.Exceptions;
import org.apache.directory.server.core.api.CacheService;
import org.apache.directory.server.core.api.DnFactory;
import org.apache.directory.server.core.partition.impl.avl.AvlPartition;
import org.apache.directory.server.xdbm.impl.avl.AvlIndex;
import org.apache.directory.server.xdbm.impl.avl.AvlPartitionTest;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Tests the {@link AbstractStore} class.
*
* @author <a href="mailto:[email protected]">Apache Directory Project</a>
*/
public class PartitionTest
{
private static final Logger LOG = LoggerFactory.getLogger( PartitionTest.class );
private static AvlPartition partition;
private static SchemaManager schemaManager = null;
private static DnFactory dnFactory;
/** The OU AttributType instance */
private static AttributeType OU_AT;
/** The UID AttributType instance */
private static AttributeType UID_AT;
/** The CN AttributType instance */
private static AttributeType CN_AT;
private static CacheService cacheService;
@BeforeClass
public static void setup() throws Exception
{
String workingDirectory = System.getProperty( "workingDirectory" );
if ( workingDirectory == null )
{
String path = AvlPartitionTest.class.getResource( "" ).getPath();
int targetPos = path.indexOf( "target" );
workingDirectory = path.substring( 0, targetPos + 6 );
}
File schemaRepository = new File( workingDirectory, "schema" );
SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( new File( workingDirectory ) );
extractor.extractOrCopy( true );
LdifSchemaLoader loader = new LdifSchemaLoader( schemaRepository );
cacheService = new CacheService();
cacheService.initialize( null );
schemaManager = new DefaultSchemaManager( loader );
boolean loaded = schemaManager.loadAllEnabled();
if ( !loaded )
{
fail( "Schema load failed : " + Exceptions.printErrors( schemaManager.getErrors() ) );
}
OU_AT = schemaManager.getAttributeType( SchemaConstants.OU_AT );
UID_AT = schemaManager.getAttributeType( SchemaConstants.UID_AT );
CN_AT = schemaManager.getAttributeType( SchemaConstants.CN_AT );
}
@Before
public void createStore() throws Exception
{
// initialize the partition
partition = new AvlPartition( schemaManager, dnFactory );
partition.setId( "example" );
partition.setSyncOnWrite( false );
partition.addIndex( new AvlIndex<String>( SchemaConstants.OU_AT_OID ) );
partition.addIndex( new AvlIndex<String>( SchemaConstants.UID_AT_OID ) );
partition.addIndex( new AvlIndex<String>( SchemaConstants.CN_AT_OID ) );
partition.setSuffixDn( new Dn( schemaManager, "o=Good Times Co." ) );
partition.setCacheService( cacheService );
partition.initialize();
StoreUtils.loadExampleData( partition, schemaManager );
LOG.debug( "Created new partition" );
}
@After
public void destroyStore() throws Exception
{
partition.destroy();
}
@Test
public void testExampleDataIndices() throws Exception
{
assertEquals( 11, partition.getRdnIndex().count() );
assertEquals( 3, partition.getAliasIndex().count() );
assertEquals( 3, partition.getOneAliasIndex().count() );
assertEquals( 3, partition.getSubAliasIndex().count() );
assertEquals( 15, partition.getPresenceIndex().count() );
assertEquals( 17, partition.getObjectClassIndex().count() );
assertEquals( 11, partition.getEntryCsnIndex().count() );
Iterator<String> userIndices = partition.getUserIndices();
int count = 0;
while ( userIndices.hasNext() )
{
userIndices.next();
count++;
}
assertEquals( 3, count );
assertEquals( 9, partition.getUserIndex( OU_AT ).count() );
assertEquals( 0, partition.getUserIndex( UID_AT ).count() );
assertEquals( 6, partition.getUserIndex( CN_AT ).count() );
}
/**
* Adding an objectClass value should also add it to the objectClass index.
*/
@Test
public void testModifyAddObjectClass() throws Exception
{
Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." );
Attribute attrib = new DefaultAttribute( SchemaConstants.OBJECT_CLASS_AT, schemaManager
.lookupAttributeTypeRegistry( SchemaConstants.OBJECT_CLASS_AT ) );
String attribVal = "uidObject";
attrib.add( attribVal );
Modification add = new DefaultModification( ModificationOperation.ADD_ATTRIBUTE, attrib );
String entryId = partition.getEntryId( dn );
Entry lookedup = partition.fetch( entryId );
// before modification: no "uidObject" tuple in objectClass index
assertFalse( partition.getObjectClassIndex().forward( "uidObject", entryId ) );
assertFalse( lookedup.get( "objectClass" ).contains( "uidObject" ) );
lookedup = partition.modify( dn, add );
// after modification: expect "uidObject" tuple in objectClass index
assertTrue( partition.getObjectClassIndex().forward( "uidObject", entryId ) );
assertTrue( lookedup.get( "objectClass" ).contains( "uidObject" ) );
}
/**
* Removing a value of an indexed attribute should also remove it from the index.
*/
@Test
public void testModifyRemoveIndexedAttribute() throws Exception
{
Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." );
Attribute attrib = new DefaultAttribute( SchemaConstants.OU_AT, OU_AT );
String attribVal = "sales";
attrib.add( attribVal );
Modification add = new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attrib );
String entryId = partition.getEntryId( dn );
Entry lookedup = partition.fetch( entryId );
// before modification: expect "sales" tuple in ou index
Index<String, String> ouIndex = ( Index<String, String> ) partition.getUserIndex( OU_AT );
assertTrue( ouIndex.forward( "sales", entryId ) );
assertTrue( lookedup.get( "ou" ).contains( "sales" ) );
lookedup = partition.modify( dn, add );
// after modification: no "sales" tuple in ou index
assertFalse( ouIndex.forward( "sales", entryId ) );
assertNull( lookedup.get( "ou" ) );
}
/**
* Removing all values of an indexed attribute should not leave any tuples in the index,
* nor in the presence index.
*/
@Test
public void testModifyRemoveAllIndexedAttribute() throws Exception
{
Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." );
Attribute attrib = new DefaultAttribute( SchemaConstants.OU_AT, OU_AT );
Modification add = new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attrib );
String entryId = partition.getEntryId( dn );
Entry lookedup = partition.fetch( entryId );
// before modification: expect "sales" tuple in ou index
Index<String, String> ouIndex = ( Index<String, String> ) partition.getUserIndex( OU_AT );
assertTrue( partition.getPresenceIndex().forward( SchemaConstants.OU_AT_OID, entryId ) );
assertTrue( ouIndex.forward( "sales", entryId ) );
assertTrue( lookedup.get( "ou" ).contains( "sales" ) );
lookedup = partition.modify( dn, add );
// after modification: no "sales" tuple in ou index
assertFalse( partition.getPresenceIndex().forward( SchemaConstants.OU_AT_OID, entryId ) );
assertFalse( ouIndex.reverse( entryId ) );
assertFalse( ouIndex.forward( "sales", entryId ) );
assertNull( lookedup.get( "ou" ) );
}
/**
* Removing an objectClass value should also remove it from the objectClass index.
*/
@Test
public void testModifyRemoveObjectClass() throws Exception
{
Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." );
Attribute attrib = new DefaultAttribute( SchemaConstants.OBJECT_CLASS_AT, schemaManager
.lookupAttributeTypeRegistry( SchemaConstants.OBJECT_CLASS_AT ) );
String attribVal = "person";
attrib.add( attribVal );
Modification add = new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attrib );
String entryId = partition.getEntryId( dn );
Entry lookedup = partition.fetch( entryId );
// before modification: expect "person" tuple in objectClass index
assertTrue( partition.getObjectClassIndex().forward( "person", entryId ) );
assertTrue( lookedup.get( "objectClass" ).contains( "person" ) );
lookedup = partition.modify( dn, add );
// after modification: no "person" tuple in objectClass index
assertFalse( partition.getObjectClassIndex().forward( "person", entryId ) );
assertFalse( lookedup.get( "objectClass" ).contains( "person" ) );
}
/**
* Removing all values of the objectClass attribute should not leave any tuples in index.
*/
@Test
public void testModifyRemoveAllObjectClass() throws Exception
{
Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." );
Attribute attrib = new DefaultAttribute( "ObjectClass", schemaManager
.lookupAttributeTypeRegistry( "ObjectClass" ) );
Modification add = new DefaultModification( ModificationOperation.REMOVE_ATTRIBUTE, attrib );
String entryId = partition.getEntryId( dn );
Entry lookedup = partition.fetch( entryId );
// before modification: expect "person" tuple in objectClass index
assertTrue( partition.getObjectClassIndex().forward( "person", entryId ) );
assertTrue( lookedup.get( "objectClass" ).contains( "person" ) );
lookedup = partition.modify( dn, add );
// after modification: no tuple in objectClass index
assertFalse( partition.getObjectClassIndex().forward( "person", entryId ) );
assertNull( lookedup.get( "objectClass" ) );
}
@Test
public void testCheckCsnIndexUpdate() throws Exception
{
Dn dn = new Dn( schemaManager, "cn=JOhnny WAlkeR,ou=Sales,o=Good Times Co." );
AttributeType csnAt = schemaManager.lookupAttributeTypeRegistry( SchemaConstants.ENTRY_CSN_AT );
Attribute attrib = new DefaultAttribute( csnAt );
CsnFactory csnF = new CsnFactory( 0 );
String csn = csnF.newInstance().toString();
attrib.add( csn );
Modification add = new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, attrib );
String entryId = partition.getEntryId( dn );
Entry lookedup = partition.fetch( entryId );
assertNotSame( csn, lookedup.get( csnAt ).getString() );
lookedup = partition.modify( dn, add );
String updateCsn = lookedup.get( csnAt ).getString();
assertEquals( csn, updateCsn );
csn = csnF.newInstance().toString();
Entry modEntry = new DefaultEntry( schemaManager );
modEntry.add( csnAt, csn );
assertNotSame( csn, updateCsn );
lookedup = partition
.modify( dn, new DefaultModification( ModificationOperation.REPLACE_ATTRIBUTE, csnAt, csn ) );
assertEquals( csn, lookedup.get( csnAt ).getString() );
}
@Test
public void testEntryParentIdPresence() throws Exception
{
Dn dn = new Dn( schemaManager, "cn=user,ou=Sales,o=Good Times Co." );
Entry entry = new DefaultEntry( schemaManager, dn,
"objectClass: top",
"objectClass: person",
"cn: user",
"sn: user sn" );
// add
StoreUtils.injectEntryInStore( partition, entry, 12 );
verifyParentId( dn );
// move
Dn newSuperior = new Dn( schemaManager, "o=Good Times Co." );
Dn newDn = new Dn( schemaManager, "cn=user,o=Good Times Co." );
partition.move( dn, newSuperior, newDn, null );
entry = verifyParentId( newDn );
// move and rename
Dn newParentDn = new Dn( schemaManager, "ou=Sales,o=Good Times Co." );
Dn oldDn = newDn;
Rdn newRdn = new Rdn( schemaManager, "cn=userMovedAndRenamed" );
partition.moveAndRename( oldDn, newParentDn, newRdn, entry, false );
verifyParentId( newParentDn.add( newRdn ) );
}
private Entry verifyParentId( Dn dn ) throws Exception
{
String entryId = partition.getEntryId( dn );
Entry entry = partition.fetch( entryId );
String parentId = partition.getParentId( entryId );
Attribute parentIdAt = entry.get( SchemaConstants.ENTRY_PARENT_ID_AT );
assertNotNull( parentIdAt );
//assertEquals( parentId.toString(), parentIdAt.getString() );
return entry;
}
}
| apache-2.0 |
xiaguangme/struts2-src-study | src/com/opensymphony/xwork2/validator/FieldValidator.java | 1271 | /*
* Copyright 2002-2007,2009 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.opensymphony.xwork2.validator;
/**
* The FieldValidator interface defines the methods to be implemented by FieldValidators.
* Which are used by the XWork validation framework to validate Action properties before
* executing the Action.
*/
public interface FieldValidator extends Validator {
/**
* Sets the field name to validate with this FieldValidator
*
* @param fieldName the field name
*/
void setFieldName(String fieldName);
/**
* Gets the field name to be validated
*
* @return the field name
*/
String getFieldName();
}
| apache-2.0 |
altavant/Fusion | Android/Source/Graphics/Size.cs | 4100 | /*
© 2017 Altavant Technologies Inc.
Web: http://www.altavant.com
*/
namespace Altavant.Fusion.Graphics
{
using System;
using Fusion.Utils;
public class Size : IDimension
{
private static readonly Size _default;
static Size()
{
_default = new Size();
}
public static Size Default
{
get
{
return _default;
}
}
private float _width;
private float _height;
public Size()
{
}
public Size(float width, float height)
{
_width = width;
_height = height;
}
public Size(System.Drawing.Size size)
{
_width = size.Width;
_height = size.Height;
}
public Size(System.Drawing.SizeF size)
{
_width = size.Width;
_height = size.Height;
}
public float Width
{
get
{
return _width;
}
set
{
_width = value;
}
}
public float Height
{
get
{
return _height;
}
set
{
_height = value;
}
}
public bool IsEmpty
{
get
{
if ((_width == 0F) && (_height == 0F))
return true;
return false;
}
}
public void SetValues(float width, float height)
{
_width = width;
_height = height;
}
public void Round()
{
_width = Utils.Round(_width);
_height = Utils.Round(_height);
}
public void Round(float factor)
{
_width = Utils.Round(_width, factor);
_height = Utils.Round(_height, factor);
}
public void Reset()
{
_width = 0F;
_height = 0F;
}
public Size Clone()
{
return new Size(_width, _height);
}
public Size ToHeight(float delta)
{
return new Size(_width, _height + delta);
}
public Size ToWidth(float delta)
{
return new Size(_width + delta, _height);
}
public Size ToDelta(float width, float height)
{
return new Size(_width + width, _height + height);
}
public override int GetHashCode()
{
return _width.GetHashCode() ^ _height.GetHashCode();
}
public override bool Equals(object obj)
{
if (obj == null)
return false;
if (obj == this)
return true;
if (obj is Size)
{
Size sz = (Size)obj;
if ((_width == sz.Width) && (_height == sz.Height))
return true;
}
return false;
}
public override string ToString()
{
CharBuffer cb = new CharBuffer();
cb.Add(Converter.ToString(_width, 3));
cb.Add(", ");
cb.Add(Converter.ToString(_height, 3));
return cb.ToString();
}
public static Size Parse(string s)
{
if (string.IsNullOrEmpty(s))
throw new ArgumentException(nameof(s));
string[] vals = Utils.Split(s);
float width = Convert.ToSingle(vals[0], Converter.NumberFormatInfo);
float height = Convert.ToSingle(vals[1], Converter.NumberFormatInfo);
return new Size(width, height);
}
public Android.Graphics.PointF ToNative()
{
return new Android.Graphics.PointF(_width, _height);
}
public void ToNative(Android.Graphics.PointF sz)
{
sz.Set(_width, _height);
}
public static Size operator +(Size sz1, Size sz2)
{
return new Size(sz1.Width + sz2.Width, sz1.Height + sz2.Height);
}
public static Size operator -(Size sz1, Size sz2)
{
return new Size(sz1.Width - sz2.Width, sz1.Height - sz2.Height);
}
public static Size operator +(Size sz, float value)
{
return new Size(sz.Width + value, sz.Height + value);
}
public static Size operator +(float value, Size sz)
{
return sz + value;
}
public static Size operator -(Size sz, float value)
{
return new Size(sz.Width - value, sz.Height - value);
}
public static Size operator *(Size sz, float value)
{
return new Size(sz.Width * value, sz.Height * value);
}
public static Size operator *(float value, Size sz)
{
return sz * value;
}
public static Size operator /(Size sz, float value)
{
return new Size(sz.Width / value, sz.Height / value);
}
}
} | apache-2.0 |
Talvish/Tales | product/common/src/com/talvish/tales/serialization/json/translators/JsonElementToLongTranslator.java | 2062 | //***************************************************************************
//* Copyright 2016 Joseph Molnar
//*
//* Licensed under the Apache License, Version 2.0 (the "License");
//* you may not use this file except in compliance with the License.
//* You may obtain a copy of the License at
//*
//* http://www.apache.org/licenses/LICENSE-2.0
//*
//* Unless required by applicable law or agreed to in writing, software
//* distributed under the License is distributed on an "AS IS" BASIS,
//* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//* See the License for the specific language governing permissions and
//* limitations under the License.
//***************************************************************************
package com.talvish.tales.serialization.json.translators;
import com.google.gson.JsonElement;
import com.google.gson.JsonNull;
import com.talvish.tales.parts.translators.NullTranslatorBase;
import com.talvish.tales.parts.translators.TranslationException;
import com.talvish.tales.parts.translators.Translator;
/**
* A translator that takes a JsonElement and translates it into the desired type.
* @author jmolnar
*
*/
public class JsonElementToLongTranslator extends NullTranslatorBase implements Translator {
/**
* Constructor taking the value to use if a null.
* @param theNullValue the null value to return if the value translating is null
*/
public JsonElementToLongTranslator( Object theNullValue ) {
super( theNullValue );
}
/**
* Translates json element into the desired value type.
*/
@Override
public Object translate(Object anObject) {
Object returnValue;
if( anObject == null || anObject.equals( JsonNull.INSTANCE )) {
returnValue = this.nullValue;
} else {
try {
returnValue = ( ( JsonElement )anObject ).getAsLong();
} catch( ClassCastException | IllegalStateException | UnsupportedOperationException e ) {
throw new TranslationException( e );
}
}
return returnValue;
}
} | apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.