prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>rpcrawtransaction.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2010 Satoshi Nakamoto // Copyright (c) 2009-2012 The Bitcoin developers // Distributed under the MIT/X11 software license, see the accompanying // file COPYING or http://www.opensource.org/licenses/mit-license.php. #include <boost/assign/list_of.hpp> #include "base58.h" #include "bitcoinrpc.h" #include "txdb.h" #include "init.h" #include "main.h" #include "net.h" #include "wallet.h" using namespace std; using namespace boost; using namespace boost::assign; using namespace json_spirit; void ScriptPubKeyToJSON(const CScript& scriptPubKey, Object& out, bool fIncludeHex) { txnouttype type; vector<CTxDestination> addresses; int nRequired; out.push_back(Pair("asm", scriptPubKey.ToString())); if (fIncludeHex) out.push_back(Pair("hex", HexStr(scriptPubKey.begin(), scriptPubKey.end()))); if (!ExtractDestinations(scriptPubKey, type, addresses, nRequired)) { out.push_back(Pair("type", GetTxnOutputType(TX_NONSTANDARD))); return; } out.push_back(Pair("reqSigs", nRequired)); out.push_back(Pair("type", GetTxnOutputType(type))); Array a; BOOST_FOREACH(const CTxDestination& addr, addresses) a.push_back(CBitcoinAddress(addr).ToString()); out.push_back(Pair("addresses", a)); } void TxToJSON(const CTransaction& tx, const uint256 hashBlock, Object& entry) { entry.push_back(Pair("txid", tx.GetHash().GetHex())); entry.push_back(Pair("version", tx.nVersion)); entry.push_back(Pair("time", (boost::int64_t)tx.nTime)); entry.push_back(Pair("locktime", (boost::int64_t)tx.nLockTime)); Array vin; BOOST_FOREACH(const CTxIn& txin, tx.vin) { Object in; if (tx.IsCoinBase()) in.push_back(Pair("coinbase", HexStr(txin.scriptSig.begin(), txin.scriptSig.end()))); else { in.push_back(Pair("txid", txin.prevout.hash.GetHex())); in.push_back(Pair("vout", (boost::int64_t)txin.prevout.n)); Object o; o.push_back(Pair("asm", txin.scriptSig.ToString())); o.push_back(Pair("hex", HexStr(txin.scriptSig.begin(), txin.scriptSig.end()))); in.push_back(Pair("scriptSig", o)); } in.push_back(Pair("sequence", (boost::int64_t)txin.nSequence)); vin.push_back(in); } entry.push_back(Pair("vin", vin)); Array vout; for (unsigned int i = 0; i < tx.vout.size(); i++) { const CTxOut& txout = tx.vout[i]; Object out; out.push_back(Pair("value", ValueFromAmount(txout.nValue))); out.push_back(Pair("n", (boost::int64_t)i)); Object o; ScriptPubKeyToJSON(txout.scriptPubKey, o, false); out.push_back(Pair("scriptPubKey", o)); vout.push_back(out); } entry.push_back(Pair("vout", vout)); if (hashBlock != 0) { entry.push_back(Pair("blockhash", hashBlock.GetHex())); map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock); if (mi != mapBlockIndex.end() && (*mi).second) { CBlockIndex* pindex = (*mi).second; if (pindex->IsInMainChain()) { entry.push_back(Pair("confirmations", 1 + nBestHeight - pindex->nHeight)); entry.push_back(Pair("time", (boost::int64_t)pindex->nTime)); entry.push_back(Pair("blocktime", (boost::int64_t)pindex->nTime)); } else entry.push_back(Pair("confirmations", 0)); } } } Value getrawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() < 1 || params.size() > 2) throw runtime_error( "getrawtransaction <txid> [verbose=0]\n" "If verbose=0, returns a string that is\n" "serialized, hex-encoded data for <txid>.\n" "If verbose is non-zero, returns an Object\n" "with information about <txid>."); uint256 hash; hash.SetHex(params[0].get_str()); bool fVerbose = false; if (params.size() > 1) fVerbose = (params[1].get_int() != 0); CTransaction tx; uint256 hashBlock = 0; if (!GetTransaction(hash, tx, hashBlock)) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "No information available about transaction"); CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION); ssTx << tx; string strHex = HexStr(ssTx.begin(), ssTx.end()); if (!fVerbose) return strHex; Object result; result.push_back(Pair("hex", strHex)); TxToJSON(tx, hashBlock, result); return result; } Value listunspent(const Array& params, bool fHelp) { if (fHelp || params.size() > 3) throw runtime_error( "listunspent [minconf=1] [maxconf=9999999] [\"address\",...]\n" "Returns array of unspent transaction outputs\n" "with between minconf and maxconf (inclusive) confirmations.\n" "Optionally filtered to only include txouts paid to specified addresses.\n" "Results are an array of Objects, each of which has:\n" "{txid, vout, scriptPubKey, amount, confirmations}"); RPCTypeCheck(params, list_of(int_type)(int_type)(array_type)); int nMinDepth = 1; if (params.size() > 0) nMinDepth = params[0].get_int(); int nMaxDepth = 9999999; if (params.size() > 1) nMaxDepth = params[1].get_int(); set<CBitcoinAddress> setAddress; if (params.size() > 2) { Array inputs = params[2].get_array(); BOOST_FOREACH(Value& input, inputs) { CBitcoinAddress address(input.get_str()); if (!address.IsValid()) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid Abundance address: ")+input.get_str()); if (setAddress.count(address)) throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+input.get_str()); setAddress.insert(address); } } Array results; vector<COutput> vecOutputs; pwalletMain->AvailableCoins(vecOutputs, false); BOOST_FOREACH(const COutput& out, vecOutputs) { if (out.nDepth < nMinDepth || out.nDepth > nMaxDepth) continue; if(setAddress.size()) { CTxDestination address; if(!ExtractDestination(out.tx->vout[out.i].scriptPubKey, address)) continue; if (!setAddress.count(address)) continue; } int64_t nValue = out.tx->vout[out.i].nValue; const CScript& pk = out.tx->vout[out.i].scriptPubKey; Object entry; entry.push_back(Pair("txid", out.tx->GetHash().GetHex())); entry.push_back(Pair("vout", out.i)); CTxDestination address; if (ExtractDestination(out.tx->vout[out.i].scriptPubKey, address)) { entry.push_back(Pair("address", CBitcoinAddress(address).ToString())); if (pwalletMain->mapAddressBook.count(address)) entry.push_back(Pair("account", pwalletMain->mapAddressBook[address])); } entry.push_back(Pair("scriptPubKey", HexStr(pk.begin(), pk.end()))); entry.push_back(Pair("amount",ValueFromAmount(nValue))); entry.push_back(Pair("confirmations",out.nDepth)); results.push_back(entry); } return results; } Value createrawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() != 2) throw runtime_error( "createrawtransaction [{\"txid\":txid,\"vout\":n},...] {address:amount,...}\n" "Create a transaction spending given inputs\n" "(array of objects containing transaction id and output number),\n" "sending to given address(es).\n" "Returns hex-encoded raw transaction.\n" "Note that the transaction's inputs are not signed, and\n" "it is not stored in the wallet or transmitted to the network."); RPCTypeCheck(params, list_of(array_type)(obj_type)); Array inputs = params[0].get_array(); Object sendTo = params[1].get_obj(); CTransaction rawTx; BOOST_FOREACH(Value& input, inputs) { const Object& o = input.get_obj(); const Value& txid_v = find_value(o, "txid"); if (txid_v.type() != str_type) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, missing txid key"); string txid = txid_v.get_str(); if (!IsHex(txid)) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, expected hex txid"); const Value& vout_v = find_value(o, "vout"); if (vout_v.type() != int_type) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, missing vout key"); int nOutput = vout_v.get_int(); if (nOutput < 0) throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, vout must be positive"); CTxIn in(COutPoint(uint256(txid), nOutput)); rawTx.vin.push_back(in); } set<CBitcoinAddress> setAddress; BOOST_FOREACH(const Pair& s, sendTo) { CBitcoinAddress address(s.name_); if (!address.IsValid()) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid Abundance address: ")+s.name_); if (setAddress.count(address)) throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+s.name_); setAddress.insert(address); CScript scriptPubKey; scriptPubKey.SetDestination(address.Get()); int64_t nAmount = AmountFromValue(s.value_); CTxOut out(nAmount, scriptPubKey); rawTx.vout.push_back(out); } CDataStream ss(SER_NETWORK, PROTOCOL_VERSION); ss << rawTx; return HexStr(ss.begin(), ss.end()); } Value decoderawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() != 1) throw runtime_error( "decoderawtransaction <hex string>\n" "Return a JSON object representing the serialized, hex-encoded transaction."); RPCTypeCheck(params, list_of(str_type)); vector<unsigned char> txData(ParseHex(params[0].get_str())); CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION); CTransaction tx; try { ssData >> tx; } catch (std::exception &e) { throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed"); } Object result; TxToJSON(tx, 0, result); return result; } Value decodescript(const Array& params, bool fHelp) { if (fHelp || params.size() != 1) throw runtime_error( "decodescript <hex string>\n" "Decode a hex-encoded script."); RPCTypeCheck(params, list_of(str_type)); Object r; CScript script; if (params[0].get_str().size() > 0){ vector<unsigned char> scriptData(ParseHexV(params[0], "argument")); script = CScript(scriptData.begin(), scriptData.end()); } else { // Empty scripts are valid } ScriptPubKeyToJSON(script, r, false); r.push_back(Pair("p2sh", CBitcoinAddress(script.GetID()).ToString())); return r; } Value signrawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() < 1 || params.size() > 4) throw runtime_error( "signrawtransaction <hex string> [{\"txid\":txid,\"vout\":n,\"scriptPubKey\":hex},...] [<privatekey1>,...] [sighashtype=\"ALL\"]\n" "Sign inputs for raw transaction (serialized, hex-encoded).\n" "Second optional argument (may be null) is an array of previous transaction outputs that\n" "this transaction depends on but may not yet be in the blockchain.\n" "Third optional argument (may be null) is an array of base58-encoded private\n" "keys that, if given, will be the only keys used to sign the transaction.\n" "Fourth optional argument is a string that is one of six values; ALL, NONE, SINGLE or\n" "ALL|ANYONECANPAY, NONE|ANYONECANPAY, SINGLE|ANYONECANPAY.\n" "Returns json object with keys:\n" " hex : raw transaction with signature(s) (hex-encoded string)\n" " complete : 1 if transaction has a complete set of signature (0 if not)" + HelpRequiringPassphrase()); RPCTypeCheck(params, list_of(str_type)(array_type)(array_type)(str_type), true); vector<unsigned char> txData(ParseHex(params[0].get_str())); CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION); vector<CTransaction> txVariants; while (!ssData.empty()) { try { CTransaction tx; ssData >> tx; txVariants.push_back(tx); } catch (std::exception &e) { throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed"); } } if (txVariants.empty()) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Missing transaction"); // mergedTx will end up with all the signatures; it // starts as a clone of the rawtx: CTransaction mergedTx(txVariants[0]); bool fComplete = true; // Fetch previous transactions (inputs): map<COutPoint, CScript> mapPrevOut; for (unsigned int i = 0; i < mergedTx.vin.size(); i++) { CTransaction tempTx; MapPrevTx mapPrevTx; CTxDB txdb("r"); map<uint256, CTxIndex> unused; bool fInvalid; // FetchInputs aborts on failure, so we go one at a time. tempTx.vin.push_back(mergedTx.vin[i]); tempTx.FetchInputs(txdb, unused, false, false, mapPrevTx, fInvalid); // Copy results into mapPrevOut: BOOST_FOREACH(const CTxIn& txin, tempTx.vin) { const uint256& prevHash = txin.prevout.hash; if (mapPrevTx.count(prevHash) && mapPrevTx[prevHash].second.vout.size()>txin.prevout.n) mapPrevOut[txin.prevout] = mapPrevTx[prevHash].second.vout[txin.prevout.n].scriptPubKey; } } // Add previous txouts given in the RPC call: if (params.size() > 1 && params[1].type() != null_type) { Array prevTxs = params[1].get_array(); BOOST_FOREACH(Value& p, prevTxs) { if (p.type() != obj_type) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "expected object with {\"txid'\",\"vout\",\"scriptPubKey\"}"); Object prevOut = p.get_obj(); RPCTypeCheck(prevOut, map_list_of("txid", str_type)("vout", int_type)("scriptPubKey", str_type)); string txidHex = find_value(prevOut, "txid").get_str(); if (!IsHex(txidHex)) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "txid must be hexadecimal"); uint256 txid; txid.SetHex(txidHex); int nOut = find_value(prevOut, "vout").get_int(); if (nOut < 0) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "vout must be positive"); string pkHex = find_value(prevOut, "scriptPubKey").get_str(); if (!IsHex(pkHex)) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "scriptPubKey must be hexadecimal"); vector<unsigned char> pkData(ParseHex(pkHex)); CScript scriptPubKey(pkData.begin(), pkData.end()); COutPoint outpoint(txid, nOut); if (mapPrevOut.count(outpoint)) { // Complain if scriptPubKey doesn't match if (mapPrevOut[outpoint] != scriptPubKey) { string err("Previous output scriptPubKey mismatch:\n"); err = err + mapPrevOut[outpoint].ToString() + "\nvs:\n"+ scriptPubKey.ToString(); throw JSONRPCError(RPC_DESERIALIZATION_ERROR, err); } } else mapPrevOut[outpoint] = scriptPubKey; } } bool fGivenKeys = false; CBasicKeyStore tempKeystore; if (params.size() > 2 && params[2].type() != null_type) { fGivenKeys = true; Array keys = params[2].get_array(); BOOST_FOREACH(Value k, keys) { CBitcoinSecret vchSecret; bool fGood = vchSecret.SetString(k.get_str()); if (!fGood) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY,"Invalid private key"); CKey key; bool fCompressed; CSecret secret = vchSecret.GetSecret(fCompressed); key.SetSecret(secret, fCompressed); tempKeystore.AddKey(key); } } else EnsureWalletIsUnlocked(); const CKeyStore& keystore = (fGivenKeys ? tempKeystore : *pwalletMain); int nHashType = SIGHASH_ALL; if (params.size() > 3 && params[3].type() != null_type) { static map<string, int> mapSigHashValues = boost::assign::map_list_of (string("ALL"), int(SIGHASH_ALL)) (string("ALL|ANYONECANPAY"), int(SIGHASH_ALL|SIGHASH_ANYONECANPAY)) (string("NONE"), int(SIGHASH_NONE)) (string("NONE|ANYONECANPAY"), int(SIGHASH_NONE|SIGHASH_ANYONECANPAY)) (string("SINGLE"), int(SIGHASH_SINGLE)) (string("SINGLE|ANYONECANPAY"), int(SIGHASH_SINGLE|SIGHASH_ANYONECANPAY)) ; string strHashType = params[3].get_str(); if (mapSigHashValues.count(strHashType)) nHashType = mapSigHashValues[strHashType]; else throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid sighash param"); } bool fHashSingle = ((nHashType & ~SIGHASH_ANYONECANPAY) == SIGHASH_SINGLE); // Sign what we can: for (unsigned int i = 0; i < mergedTx.vin.size(); i++) { CTxIn& txin = mergedTx.vin[i]; if (mapPrevOut.count(txin.prevout) == 0) { fComplete = false; continue;<|fim▁hole|> txin.scriptSig.clear(); // Only sign SIGHASH_SINGLE if there's a corresponding output: if (!fHashSingle || (i < mergedTx.vout.size())) SignSignature(keystore, prevPubKey, mergedTx, i, nHashType); // ... and merge in other signatures: BOOST_FOREACH(const CTransaction& txv, txVariants) { txin.scriptSig = CombineSignatures(prevPubKey, mergedTx, i, txin.scriptSig, txv.vin[i].scriptSig); } if (!VerifyScript(txin.scriptSig, prevPubKey, mergedTx, i, 0)) fComplete = false; } Object result; CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION); ssTx << mergedTx; result.push_back(Pair("hex", HexStr(ssTx.begin(), ssTx.end()))); result.push_back(Pair("complete", fComplete)); return result; } Value sendrawtransaction(const Array& params, bool fHelp) { if (fHelp || params.size() < 1 || params.size() > 1) throw runtime_error( "sendrawtransaction <hex string>\n" "Submits raw transaction (serialized, hex-encoded) to local node and network."); RPCTypeCheck(params, list_of(str_type)); // parse hex string from parameter vector<unsigned char> txData(ParseHex(params[0].get_str())); CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION); CTransaction tx; // deserialize binary data stream try { ssData >> tx; } catch (std::exception &e) { throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed"); } uint256 hashTx = tx.GetHash(); // See if the transaction is already in a block // or in the memory pool: CTransaction existingTx; uint256 hashBlock = 0; if (GetTransaction(hashTx, existingTx, hashBlock)) { if (hashBlock != 0) throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("transaction already in block ")+hashBlock.GetHex()); // Not in block, but already in the memory pool; will drop // through to re-relay it. } else { // push to local node CTxDB txdb("r"); if (!tx.AcceptToMemoryPool(txdb)) throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX rejected"); SyncWithWallets(tx, NULL, true); } RelayTransaction(tx, hashTx); return hashTx.GetHex(); }<|fim▁end|>
} const CScript& prevPubKey = mapPrevOut[txin.prevout];
<|file_name|>zz_generated_ssh_auth_controller.go<|end_file_name|><|fim▁begin|>package v3 import ( "context" "time" "github.com/rancher/norman/controller" "github.com/rancher/norman/objectclient" "github.com/rancher/norman/resource" "github.com/rancher/rancher/pkg/apis/project.cattle.io/v3" "k8s.io/apimachinery/pkg/api/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/labels" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/types" "k8s.io/apimachinery/pkg/watch" "k8s.io/client-go/tools/cache" ) var ( SSHAuthGroupVersionKind = schema.GroupVersionKind{ Version: Version, Group: GroupName, Kind: "SSHAuth", } SSHAuthResource = metav1.APIResource{ Name: "sshauths", SingularName: "sshauth", Namespaced: true, Kind: SSHAuthGroupVersionKind.Kind, } SSHAuthGroupVersionResource = schema.GroupVersionResource{ Group: GroupName, Version: Version, Resource: "sshauths", } ) func init() { resource.Put(SSHAuthGroupVersionResource) } // Deprecated use v3.SSHAuth instead type SSHAuth = v3.SSHAuth func NewSSHAuth(namespace, name string, obj v3.SSHAuth) *v3.SSHAuth { obj.APIVersion, obj.Kind = SSHAuthGroupVersionKind.ToAPIVersionAndKind() obj.Name = name obj.Namespace = namespace return &obj } type SSHAuthHandlerFunc func(key string, obj *v3.SSHAuth) (runtime.Object, error) type SSHAuthChangeHandlerFunc func(obj *v3.SSHAuth) (runtime.Object, error) type SSHAuthLister interface { List(namespace string, selector labels.Selector) (ret []*v3.SSHAuth, err error) Get(namespace, name string) (*v3.SSHAuth, error) } type SSHAuthController interface { Generic() controller.GenericController Informer() cache.SharedIndexInformer Lister() SSHAuthLister AddHandler(ctx context.Context, name string, handler SSHAuthHandlerFunc) AddFeatureHandler(ctx context.Context, enabled func() bool, name string, sync SSHAuthHandlerFunc) AddClusterScopedHandler(ctx context.Context, name, clusterName string, handler SSHAuthHandlerFunc) AddClusterScopedFeatureHandler(ctx context.Context, enabled func() bool, name, clusterName string, handler SSHAuthHandlerFunc) Enqueue(namespace, name string) EnqueueAfter(namespace, name string, after time.Duration) } type SSHAuthInterface interface { ObjectClient() *objectclient.ObjectClient Create(*v3.SSHAuth) (*v3.SSHAuth, error) GetNamespaced(namespace, name string, opts metav1.GetOptions) (*v3.SSHAuth, error) Get(name string, opts metav1.GetOptions) (*v3.SSHAuth, error) Update(*v3.SSHAuth) (*v3.SSHAuth, error) Delete(name string, options *metav1.DeleteOptions) error DeleteNamespaced(namespace, name string, options *metav1.DeleteOptions) error List(opts metav1.ListOptions) (*v3.SSHAuthList, error) ListNamespaced(namespace string, opts metav1.ListOptions) (*v3.SSHAuthList, error) Watch(opts metav1.ListOptions) (watch.Interface, error) DeleteCollection(deleteOpts *metav1.DeleteOptions, listOpts metav1.ListOptions) error Controller() SSHAuthController AddHandler(ctx context.Context, name string, sync SSHAuthHandlerFunc) AddFeatureHandler(ctx context.Context, enabled func() bool, name string, sync SSHAuthHandlerFunc) AddLifecycle(ctx context.Context, name string, lifecycle SSHAuthLifecycle) AddFeatureLifecycle(ctx context.Context, enabled func() bool, name string, lifecycle SSHAuthLifecycle) AddClusterScopedHandler(ctx context.Context, name, clusterName string, sync SSHAuthHandlerFunc) AddClusterScopedFeatureHandler(ctx context.Context, enabled func() bool, name, clusterName string, sync SSHAuthHandlerFunc) AddClusterScopedLifecycle(ctx context.Context, name, clusterName string, lifecycle SSHAuthLifecycle) AddClusterScopedFeatureLifecycle(ctx context.Context, enabled func() bool, name, clusterName string, lifecycle SSHAuthLifecycle) } type sshAuthLister struct { ns string controller *sshAuthController } func (l *sshAuthLister) List(namespace string, selector labels.Selector) (ret []*v3.SSHAuth, err error) { if namespace == "" { namespace = l.ns } err = cache.ListAllByNamespace(l.controller.Informer().GetIndexer(), namespace, selector, func(obj interface{}) { ret = append(ret, obj.(*v3.SSHAuth)) }) return } func (l *sshAuthLister) Get(namespace, name string) (*v3.SSHAuth, error) { var key string if namespace != "" { key = namespace + "/" + name } else { key = name } obj, exists, err := l.controller.Informer().GetIndexer().GetByKey(key) if err != nil { return nil, err } if !exists { return nil, errors.NewNotFound(schema.GroupResource{ Group: SSHAuthGroupVersionKind.Group, Resource: SSHAuthGroupVersionResource.Resource, }, key) } return obj.(*v3.SSHAuth), nil } type sshAuthController struct { ns string controller.GenericController } func (c *sshAuthController) Generic() controller.GenericController { return c.GenericController } func (c *sshAuthController) Lister() SSHAuthLister { return &sshAuthLister{ ns: c.ns, controller: c, } } func (c *sshAuthController) AddHandler(ctx context.Context, name string, handler SSHAuthHandlerFunc) { c.GenericController.AddHandler(ctx, name, func(key string, obj interface{}) (interface{}, error) { if obj == nil { return handler(key, nil) } else if v, ok := obj.(*v3.SSHAuth); ok { return handler(key, v) } else { return nil, nil } }) } func (c *sshAuthController) AddFeatureHandler(ctx context.Context, enabled func() bool, name string, handler SSHAuthHandlerFunc) { c.GenericController.AddHandler(ctx, name, func(key string, obj interface{}) (interface{}, error) { if !enabled() { return nil, nil } else if obj == nil { return handler(key, nil) } else if v, ok := obj.(*v3.SSHAuth); ok { return handler(key, v) } else { return nil, nil } }) } func (c *sshAuthController) AddClusterScopedHandler(ctx context.Context, name, cluster string, handler SSHAuthHandlerFunc) { c.GenericController.AddHandler(ctx, name, func(key string, obj interface{}) (interface{}, error) { if obj == nil { return handler(key, nil) } else if v, ok := obj.(*v3.SSHAuth); ok && controller.ObjectInCluster(cluster, obj) { return handler(key, v) } else { return nil, nil } }) } func (c *sshAuthController) AddClusterScopedFeatureHandler(ctx context.Context, enabled func() bool, name, cluster string, handler SSHAuthHandlerFunc) { c.GenericController.AddHandler(ctx, name, func(key string, obj interface{}) (interface{}, error) { if !enabled() { return nil, nil } else if obj == nil { return handler(key, nil) } else if v, ok := obj.(*v3.SSHAuth); ok && controller.ObjectInCluster(cluster, obj) { return handler(key, v) } else { return nil, nil } }) } type sshAuthFactory struct { } func (c sshAuthFactory) Object() runtime.Object { return &v3.SSHAuth{} } func (c sshAuthFactory) List() runtime.Object { return &v3.SSHAuthList{} } func (s *sshAuthClient) Controller() SSHAuthController { genericController := controller.NewGenericController(s.ns, SSHAuthGroupVersionKind.Kind+"Controller", s.client.controllerFactory.ForResourceKind(SSHAuthGroupVersionResource, SSHAuthGroupVersionKind.Kind, true)) return &sshAuthController{ ns: s.ns, GenericController: genericController, } } type sshAuthClient struct { client *Client ns string objectClient *objectclient.ObjectClient controller SSHAuthController } func (s *sshAuthClient) ObjectClient() *objectclient.ObjectClient { return s.objectClient } func (s *sshAuthClient) Create(o *v3.SSHAuth) (*v3.SSHAuth, error) { obj, err := s.objectClient.Create(o) return obj.(*v3.SSHAuth), err } func (s *sshAuthClient) Get(name string, opts metav1.GetOptions) (*v3.SSHAuth, error) { obj, err := s.objectClient.Get(name, opts) return obj.(*v3.SSHAuth), err } func (s *sshAuthClient) GetNamespaced(namespace, name string, opts metav1.GetOptions) (*v3.SSHAuth, error) { obj, err := s.objectClient.GetNamespaced(namespace, name, opts) return obj.(*v3.SSHAuth), err } func (s *sshAuthClient) Update(o *v3.SSHAuth) (*v3.SSHAuth, error) { obj, err := s.objectClient.Update(o.Name, o) return obj.(*v3.SSHAuth), err } func (s *sshAuthClient) UpdateStatus(o *v3.SSHAuth) (*v3.SSHAuth, error) { obj, err := s.objectClient.UpdateStatus(o.Name, o) return obj.(*v3.SSHAuth), err } func (s *sshAuthClient) Delete(name string, options *metav1.DeleteOptions) error { return s.objectClient.Delete(name, options) } func (s *sshAuthClient) DeleteNamespaced(namespace, name string, options *metav1.DeleteOptions) error { return s.objectClient.DeleteNamespaced(namespace, name, options) } func (s *sshAuthClient) List(opts metav1.ListOptions) (*v3.SSHAuthList, error) { obj, err := s.objectClient.List(opts) return obj.(*v3.SSHAuthList), err } func (s *sshAuthClient) ListNamespaced(namespace string, opts metav1.ListOptions) (*v3.SSHAuthList, error) { obj, err := s.objectClient.ListNamespaced(namespace, opts) return obj.(*v3.SSHAuthList), err } func (s *sshAuthClient) Watch(opts metav1.ListOptions) (watch.Interface, error) { return s.objectClient.Watch(opts) } // Patch applies the patch and returns the patched deployment. func (s *sshAuthClient) Patch(o *v3.SSHAuth, patchType types.PatchType, data []byte, subresources ...string) (*v3.SSHAuth, error) { obj, err := s.objectClient.Patch(o.Name, o, patchType, data, subresources...) return obj.(*v3.SSHAuth), err } func (s *sshAuthClient) DeleteCollection(deleteOpts *metav1.DeleteOptions, listOpts metav1.ListOptions) error { return s.objectClient.DeleteCollection(deleteOpts, listOpts) } func (s *sshAuthClient) AddHandler(ctx context.Context, name string, sync SSHAuthHandlerFunc) { s.Controller().AddHandler(ctx, name, sync) } func (s *sshAuthClient) AddFeatureHandler(ctx context.Context, enabled func() bool, name string, sync SSHAuthHandlerFunc) { s.Controller().AddFeatureHandler(ctx, enabled, name, sync) } func (s *sshAuthClient) AddLifecycle(ctx context.Context, name string, lifecycle SSHAuthLifecycle) { sync := NewSSHAuthLifecycleAdapter(name, false, s, lifecycle) s.Controller().AddHandler(ctx, name, sync) } func (s *sshAuthClient) AddFeatureLifecycle(ctx context.Context, enabled func() bool, name string, lifecycle SSHAuthLifecycle) { sync := NewSSHAuthLifecycleAdapter(name, false, s, lifecycle) s.Controller().AddFeatureHandler(ctx, enabled, name, sync) } func (s *sshAuthClient) AddClusterScopedHandler(ctx context.Context, name, clusterName string, sync SSHAuthHandlerFunc) {<|fim▁hole|>} func (s *sshAuthClient) AddClusterScopedFeatureHandler(ctx context.Context, enabled func() bool, name, clusterName string, sync SSHAuthHandlerFunc) { s.Controller().AddClusterScopedFeatureHandler(ctx, enabled, name, clusterName, sync) } func (s *sshAuthClient) AddClusterScopedLifecycle(ctx context.Context, name, clusterName string, lifecycle SSHAuthLifecycle) { sync := NewSSHAuthLifecycleAdapter(name+"_"+clusterName, true, s, lifecycle) s.Controller().AddClusterScopedHandler(ctx, name, clusterName, sync) } func (s *sshAuthClient) AddClusterScopedFeatureLifecycle(ctx context.Context, enabled func() bool, name, clusterName string, lifecycle SSHAuthLifecycle) { sync := NewSSHAuthLifecycleAdapter(name+"_"+clusterName, true, s, lifecycle) s.Controller().AddClusterScopedFeatureHandler(ctx, enabled, name, clusterName, sync) }<|fim▁end|>
s.Controller().AddClusterScopedHandler(ctx, name, clusterName, sync)
<|file_name|>FileNameSource.py<|end_file_name|><|fim▁begin|># coding=utf-8 """Ingest workflow management tool FileNameSource Class """ __copyright__ = "Copyright (C) 2016 University of Maryland" __license__ = "GNU AFFERO GENERAL PUBLIC LICENSE, Version 3" import abc import os import sys import psycopg2<|fim▁hole|> def __init__(self): pass def __iter__(self): return self @abc.abstractmethod def next(self): pass def confirm_completion(self, path): return True class FileList(FileNameSource): def __init__(self, args, cfg): FileNameSource.__init__(self) src = args['<source_directory>'] self.fp = sys.stdin if src == '-' else open(src, 'rU') self.prefix = args['--prefix'] self.offset = len(self.prefix) def next(self): v = self.fp.next().strip() if not v.startswith(self.prefix): print v, ' not in ', self.prefix, 'ignoring ' return return decode_str(v[self.offset:]) class DirectoryWalk(FileNameSource): def __init__(self, args, cfg): FileNameSource.__init__(self) src = args['<source_directory>'] if src == '-': print ' Incompatible mode -- Cannot Walk stdin ' raise ValueError self.prefix = args['--prefix'] self.offset = len(self.prefix) self.walker = os.walk(src, topdown=True, followlinks=True) self.dirname = None self.files = None def next(self): while not self.dirname or not self.files: self.dirname, _, self.files = self.walker.next() return os.path.join(self.dirname[self.offset:], self.files.pop()) class DB: def __init__(self, args, cfg): defaults = (('user', 'drastic'), ('database', 'drastic'), ('password', 'drastic'), ('host', 'localhost')) credentials = dict(user=cfg.get('postgres', 'user'), database=cfg.get('postgres', 'database'), password=cfg.get('postgres', 'password'), host=cfg.get('postgres', 'host')) for k, v in defaults: if not credentials[k]: credentials[k] = v self.credentials = credentials self.cnx = psycopg2.connect(**credentials) self.cs1 = self.cnx.cursor() table = args.get('--dataset', 'resource') if not table: table = 'resource' self.tablename = table ### Do JIT set up of other queries.... self.update_status = False self.db_initialized = False def summary(self): cmd = '''SELECT status,count(*) from "{0}" group by status order by status '''.format(self.tablename) try: self.cs1.execute(cmd) for v in self.cs1: print '{0:-10s}\t{1:,}'.format(*v) except Exception as e: print e def _setup_db(self, table): cs = self.cnx.cursor() # Create the status Enum try: cs.execute("CREATE TYPE resource_status AS ENUM ('READY','IN-PROGRESS','DONE','BROKEN','VERIFIED')") except: cs.connection.rollback() # cmds = [ '''CREATE TABLE IF NOT EXISTS "{0}" ( path TEXT PRIMARY KEY, status resource_status DEFAULT 'READY', started timestamp, fs_sync boolean)''', '''CREATE INDEX "IDX_{0}_01_status" ON "{0}" (status ) WHERE status <> 'DONE' ''', '''CREATE INDEX "IDX_{0}_01_fs_sync" ON "{0}" (fs_sync) WHERE fs_sync is not True'''] for cmd in cmds: try: cs.execute(cmd.format(table)) cs.connection.commit() except Exception as e: cs.connection.rollback() class DBPrepare(DB): """ Class to be used when preparing. """ def __init__(self, args, cfg): DB.__init__(self, args, cfg) self.prefix = (args['--prefix']) self.offset = len(self.prefix) self.cs = self.cnx.cursor('AB1', withhold=True) self._setup_db(self.tablename) cmd = '''PREPARE I1 ( text ) AS insert into "{0}" (path,status) SELECT $1,'READY'::resource_status WHERE NOT EXISTS (SELECT TRUE FROM "{0}" where path = $1)''' self.cs1.execute(cmd.format(self.tablename)) def prepare(self, path ): self.cs1.execute("EXECUTE I1(%s); commit", [path]) return True class DBQuery(FileNameSource, DB): """ Class to be used to get file names when injecting. """ def __init__(self, args, cfg): DB.__init__(self,args,cfg) FileNameSource.__init__(self) self.prefix = (args['--prefix']) self.offset = len(self.prefix) self.fetch_cs = self.cnx.cursor() cmd = '''PREPARE F1 (integer) AS SELECT path FROM "{0}" where status = 'READY' LIMIT $1 '''.format(self.tablename) self.fetch_cs.execute(cmd) self.fetch_cs.execute('EXECUTE F1 (1000)') # And prepare the update status cmd ucmd = '''PREPARE M1 (TEXT,resource_status) AS UPDATE "{0}" SET status='DONE' WHERE path = $1 and status <> $2 '''.format( self.tablename) self.cs1.execute(ucmd) # And retreive the values for the status self.cs1.execute('''SELECT unnest(enum_range(NULL::resource_status))''') self.status_values = set( ( k[0] for k in self.cs1.fetchall() )) return def confirm_completion(self, path, status = 'DONE'): if status not in self.status_values : if status == 'FAILED' : status = 'BROKEN' else : raise ValueError("bad value for enum -- {} : should be {}".format(status,self.status_values) ) #### try: self.cs1.execute('EXECUTE M1(%s,%s)', [path,status]) updates = self.cs1.rowcount self.cs1.connection.commit() return True except Exception as e: print 'failed to update status for ', path,'\n',e self.cs1.connection.rollback() return False def next(self): """ :return: next path from DB that is ready... This function will re-issue the Select when the current one is exhausted. This attempts to avoid two many locks on two many records. """ k = self.fetch_cs.fetchone() # if not k: self.fetch_cs.execute('EXECUTE F1 (1000)') k = self.fetch_cs.fetchone() # if k: return k[0].decode('utf-8') raise StopIteration def CreateFileNameSource(args, cfg): """ use the parameters to prepare an iterator that will deliver all the (suitably normalized) files to be injected :param args: command line args :param cfg: global, persistent parameters :return: iterator """ src = args['<source_directory>'] prefix = args['--prefix'] if not prefix: prefix = '/data' else: prefix = prefix.rstrip('/') if not src.startswith(prefix): print src, ' must be a subdirectory of the host data directory (--prefix=', prefix, ')' print 'If you did not specify it, please do so' sys.exit(1) ######### ## Set up a source that gets list of files from a file if args['--read'] : return FileList(args, cfg) if args['--walk']: return DirectoryWalk(args, cfg) if args['--postgres'] : return DBQuery(args, cfg) if args['--sqlite3'] : raise NotImplementedError def decode_str(s): """ :param s: string to be converted to unicode :return: unicode version """ if isinstance(s, unicode): return s try: return s.decode('utf8') except UnicodeDecodeError: try: return s.decode('iso8859-1') except UnicodeDecodeError: s_ignore = s.decode('utf8', 'ignore') return s_ignore<|fim▁end|>
class FileNameSource:
<|file_name|>my_mainwindow.py<|end_file_name|><|fim▁begin|>__author__ = 'PaleNeutron' import os from urllib.parse import urlparse, unquote import sys from PyQt5 import QtWidgets, QtCore, QtGui class MyMainWindow(QtWidgets.QMainWindow): file_loaded = QtCore.pyqtSignal(str) image_loaded = QtCore.pyqtSignal(QtGui.QImage) def __init__(self): super(MyMainWindow, self).__init__() self.windowList = [] self.text_path = '' self.epub_path = '' self.win_file_mime = "application/x-qt-windows-mime;value=\"FileNameW\"" self.text_uri_mime = "text/uri-list" self.create_content_browser() <|fim▁hole|> self.content_browser = QtWidgets.QTextBrowser() self.content_browser.setFontPointSize(12) self.content_browser.setGeometry(QtCore.QRect(300, 150, 600, 400)) self.windowList.append(self.content_browser) def dragEnterEvent(self, ev): ev.accept() def load_file(self, file_path): self.file_loaded.emit(file_path) # def image_loaded(self, file_path): # with open(file_path, "b") as f: # r = f.read() # with open("images/cover.jpg", "wb") as f: # f.write(r) # def epub_loaded(self, file_path): # self.epub_path = file_path # self.file_loaded.emit(False, ) def uri_to_path(self, uri): if sys.platform == "win32": path = unquote(urlparse(uri).path)[1:] elif sys.platform == "linux": path = unquote(urlparse(uri).path) else: path = None return path def dropEvent(self, ev): # formats = ev.mimeData().formats() # for i in formats: # print(i) # if ev.mimeData().hasFormat(self.win_file_mime): # ev.accept() # file_path = bytes(ev.mimeData().data(self.win_file_mime).data())[:-2].decode('utf16') # if file_path.endswith(".txt"): # self.text_loaded(file_path) # elif file_path.endswith(".jpg") or file_path.endswith(".jpeg") or file_path.endswith(".png"): # self.image_loaded(file_path) # elif file_path.endswith(".epub"): # self.epub_loaded(file_path) # print(file_path) if ev.mimeData().hasImage(): self.image_loaded.emit(ev.mimeData().imageData()) if ev.mimeData().hasFormat(self.text_uri_mime): uri = ev.mimeData().data(self.text_uri_mime).data().decode("utf8").strip() file_path = self.uri_to_path(uri) if uri.lower().endswith(".txt") or uri.lower().endswith(".epub"): self.load_file(file_path) elif uri.lower().endswith(".zip"): #打开一个zip文档,获取其中的txt import zipfile zf = zipfile.ZipFile(file_path) for filename in zf.namelist(): #如果文档中txt文件大于10kb则解压到当前文件夹 if filename.lower().endswith(".txt") and zf.getinfo(filename).file_size > 10 * 1024: zf.extract(filename) # 发送文件位置信号 self.load_file(os.curdir + os.sep + filename) break elif uri.lower().endswith(".rar"): import rarfile rf = rarfile.RarFile(file_path) for filename in rf.namelist(): # 如果文档中txt文件大于10kb则解压到当前文件夹 if filename.lower().endswith(".txt") and rf.getinfo(filename).file_size > 10 * 1024: rf.extract(filename) #发送文件位置信号 self.load_file(os.curdir + os.sep + filename) break else: ev.ignore()<|fim▁end|>
def create_content_browser(self):
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS,<|fim▁hole|>// See the License for the specific language governing permissions and // limitations under the License. pub mod jws;<|fim▁end|>
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
<|file_name|>intrusive_ptr.hpp<|end_file_name|><|fim▁begin|><|fim▁hole|>#ifndef BOOST_INTRUSIVE_PTR_HPP_INCLUDED #define BOOST_INTRUSIVE_PTR_HPP_INCLUDED // // intrusive_ptr.hpp // // Copyright (c) 2001, 2002 Peter Dimov // // Distributed under the Boost Software License, Version 1.0. // See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt // // See http://www.boost.org/libs/smart_ptr/ for documentation. // #include <boost/smart_ptr/intrusive_ptr.hpp> #endif // #ifndef BOOST_INTRUSIVE_PTR_HPP_INCLUDED<|fim▁end|>
<|file_name|>bladeRF_source.cc<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2010, Joshua Lackey * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <string.h> #include <pthread.h> #include <math.h> #include <complex> #include "bladeRF_source.h" extern int g_verbosity; bladeRF_source::bladeRF_source(float sample_rate, long int fpga_master_clock_freq) { m_fpga_master_clock_freq = fpga_master_clock_freq; m_desired_sample_rate = sample_rate; m_sample_rate = 0.0;<|fim▁hole|> m_cb = new circular_buffer(CB_LEN, sizeof(complex), 0); pthread_mutex_init(&m_u_mutex, 0); } bladeRF_source::bladeRF_source(unsigned int decimation, long int fpga_master_clock_freq) { m_fpga_master_clock_freq = fpga_master_clock_freq; m_sample_rate = 0.0; m_cb = new circular_buffer(CB_LEN, sizeof(complex), 0); pthread_mutex_init(&m_u_mutex, 0); m_decimation = decimation & ~1; if(m_decimation < 4) m_decimation = 4; if(m_decimation > 256) m_decimation = 256; } bladeRF_source::~bladeRF_source() { stop(); delete m_cb; pthread_mutex_destroy(&m_u_mutex); } void bladeRF_source::stop() { pthread_mutex_lock(&m_u_mutex); #if 0 if(m_db_rx) m_db_rx->set_enable(0); if(m_u_rx) m_u_rx->stop(); #endif pthread_mutex_unlock(&m_u_mutex); } void bladeRF_source::start() { pthread_mutex_lock(&m_u_mutex); if (bladerf_enable_module(bdev, BLADERF_MODULE_RX, 1)) { } pthread_mutex_unlock(&m_u_mutex); } void bladeRF_source::calculate_decimation() { float decimation_f; #if 0 decimation_f = (float)m_u_rx->fpga_master_clock_freq() / m_desired_sample_rate; m_decimation = (unsigned int)round(decimation_f) & ~1; if(m_decimation < 4) m_decimation = 4; if(m_decimation > 256) m_decimation = 256; #endif } float bladeRF_source::sample_rate() { return m_sample_rate; } int bladeRF_source::tune_dac(int dac) { printf("DAC: 0x%.4x\n", dac); return bladerf_dac_write(bdev, dac); } int bladeRF_source::save_dac(int dac) { int rv; bool ok; bladerf_fpga_size fpga_size; struct bladerf_image *image = NULL; uint32_t page, count; bladerf_get_fpga_size(bdev, &fpga_size); image = bladerf_alloc_cal_image(bdev, fpga_size, dac); if (!image) { return 1; } rv = bladerf_erase_flash_bytes(bdev, BLADERF_FLASH_ADDR_CAL, BLADERF_FLASH_BYTE_LEN_CAL); if (rv != 0) { return 1; } rv = bladerf_write_flash_bytes(bdev, image->data, image->address, image->length); return 0; } int bladeRF_source::tune(double freq) { int r; pthread_mutex_lock(&m_u_mutex); r = bladerf_set_frequency(bdev, BLADERF_MODULE_RX, freq); pthread_mutex_unlock(&m_u_mutex); return !r; } bool bladeRF_source::set_antenna(int antenna) { return true; //return m_db_rx->select_rx_antenna(antenna); } bool bladeRF_source::set_gain(float gain) { float min = 0.5, max = 2.0; if((gain < 0.0) || (1.0 < gain)) return false; return !bladerf_set_rxvga2(bdev, 3); return !bladerf_set_rxvga2(bdev, min + gain * (max - min)); } /* * open() should be called before multiple threads access bladeRF_source. */ int bladeRF_source::open(unsigned int subdev) { int do_set_decim = 0; if(!bdev) { int status; if (bladerf_open(&bdev, NULL)) { printf("Couldn't open bladeRF\n"); exit(1); } #define DEFAULT_STREAM_XFERS 64 #define DEFAULT_STREAM_BUFFERS 5600 #define DEFAULT_STREAM_SAMPLES 2048 #define DEFAULT_STREAM_TIMEOUT 4000 status = bladerf_sync_config(bdev, static_cast<bladerf_channel_layout>(BLADERF_CHANNEL_RX(0)), BLADERF_FORMAT_SC16_Q11, DEFAULT_STREAM_BUFFERS, DEFAULT_STREAM_SAMPLES, DEFAULT_STREAM_XFERS, DEFAULT_STREAM_TIMEOUT ); if(!m_decimation) { do_set_decim = 1; m_decimation = 4; } // if(do_set_decim) { // calculate_decimation(); // } //m_u_rx->set_decim_rate(m_decimation); // m_sample_rate = (double)m_u_rx->fpga_master_clock_freq() / m_decimation; unsigned int bw; bladerf_set_bandwidth(bdev, BLADERF_MODULE_RX, 1500000u, &bw); printf("Actual filter bandwidth = %d kHz\n", bw/1000); int gain; bladerf_set_rxvga1(bdev, 20); bladerf_get_rxvga1(bdev, &gain); printf("rxvga1 = %d dB\n", gain); bladerf_set_rxvga2(bdev, 30); bladerf_set_lna_gain(bdev, BLADERF_LNA_GAIN_MAX); bladerf_get_rxvga2(bdev, &gain); bladerf_dac_write(bdev, 0xa1ea); printf("rxvga2 = %d dB\n", gain); struct bladerf_rational_rate rate, actual; rate.integer = (4 * 13e6) / 48; rate.num = (4 * 13e6) - rate.integer * 48; rate.den = 48; m_sample_rate = (double)4.0 * 13.0e6 / 48.0; if (bladerf_set_rational_sample_rate(bdev, BLADERF_MODULE_RX, &rate, &actual)) { printf("Error setting RX sampling rate\n"); return -1; } if(g_verbosity > 1) { fprintf(stderr, "Decimation : %u\n", m_decimation); fprintf(stderr, "Sample rate: %f\n", m_sample_rate); } } set_gain(0.45); return 0; } #define USB_PACKET_SIZE 512 int bladeRF_source::fill(unsigned int num_samples, unsigned int *overrun_i) { bool overrun; unsigned char ubuf[USB_PACKET_SIZE]; short *s = (short *)ubuf; unsigned int i, j, space, overruns = 0; complex *c; while((m_cb->data_available() < num_samples) && (m_cb->space_available() > 0)) { // read one usb packet from the bladeRF pthread_mutex_lock(&m_u_mutex); bladerf_sync_rx(bdev, ubuf, 512 / 4, NULL, 0); overrun = false; pthread_mutex_unlock(&m_u_mutex); if(overrun) overruns++; // write complex<short> input to complex<float> output c = (complex *)m_cb->poke(&space); // set space to number of complex items to copy if(space > (USB_PACKET_SIZE >> 2)) space = USB_PACKET_SIZE >> 2; // write data for(i = 0, j = 0; i < space; i += 1, j += 2) c[i] = complex(s[j], s[j + 1]); // update cb m_cb->wrote(i); } // if the cb is full, we left behind data from the usb packet if(m_cb->space_available() == 0) { fprintf(stderr, "warning: local overrun\n"); overruns++; } if(overrun_i) *overrun_i = overruns; return 0; } int bladeRF_source::read(complex *buf, unsigned int num_samples, unsigned int *samples_read) { unsigned int n; if(fill(num_samples, 0)) return -1; n = m_cb->read(buf, num_samples); if(samples_read) *samples_read = n; return 0; } /* * Don't hold a lock on this and use the bladeRF at the same time. */ circular_buffer *bladeRF_source::get_buffer() { return m_cb; } int bladeRF_source::flush(unsigned int flush_count) { m_cb->flush(); fill(flush_count * USB_PACKET_SIZE, 0); m_cb->flush(); return 0; }<|fim▁end|>
m_decimation = 0;
<|file_name|>conditionals.rs<|end_file_name|><|fim▁begin|>use shell::{status::*, Shell}; macro_rules! string_function { ($method:tt) => { pub(crate) fn $method(args: &[&str], _: &mut Shell) -> i32 { match args.len() { 0...2 => { eprintln!("ion: {}: two arguments must be supplied", args[0]); return BAD_ARG; } 3 => if args[1].$method(&args[2]) { SUCCESS } else { FAILURE }, _ => { for arg in args[2..].iter() { if args[1].$method(arg) { return SUCCESS; } } FAILURE } } }<|fim▁hole|>string_function!(ends_with); string_function!(contains);<|fim▁end|>
}; } string_function!(starts_with);
<|file_name|>test_pp_psc_delta_stdp.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # test_pp_psc_delta_stdp.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. # # NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. # # Moritz Deger, [email protected], Aug 14, 2015 # # # Python script to reproduce failure of pp_psc_delta to show spike timing # dependent plasticity (STDP), as opposed to iaf_psc_delta. # The problem is probably related to the setting of 'archiver_length' # (printed at the end of the script) import nest import nest.raster_plot import numpy as np import pylab Dt = 1. nsteps = 100 w_0 = 100. nest.ResetKernel() nrn_pre = nest.Create('parrot_neuron') nrn_post1 = nest.Create('iaf_psc_delta') nrn_post2 = nest.Create('pp_psc_delta') nest.Connect(nrn_pre, nrn_post1 + nrn_post2, syn_spec={'model': 'stdp_synapse', 'weight': w_0}) conn1 = nest.GetConnections(nrn_pre, nrn_post1) conn2 = nest.GetConnections(nrn_pre, nrn_post2) sg_pre = nest.Create('spike_generator') nest.SetStatus(sg_pre, {'spike_times': np.arange(Dt, nsteps * Dt, 10. * Dt)}) nest.Connect(sg_pre, nrn_pre) mm = nest.Create('multimeter') nest.SetStatus(mm, {'record_from': ['V_m']}) nest.Connect(mm, nrn_post1 + nrn_post2) sd = nest.Create('spike_detector') nest.Connect(nrn_pre + nrn_post1 + nrn_post2, sd) t = [] w1 = [] w2 = [] t.append(0.) w1.append(nest.GetStatus(conn1, keys=['weight'])[0][0]) w2.append(nest.GetStatus(conn2, keys=['weight'])[0][0]) for i in xrange(nsteps):<|fim▁hole|> w2.append(nest.GetStatus(conn2, keys=['weight'])[0][0]) pylab.figure(1) pylab.plot(t, w1, 'g', label='iaf_psc_delta, ' + str(nrn_post1[0])) pylab.plot(t, w2, 'r', label='pp_psc_delta, ' + str(nrn_post2[0])) pylab.xlabel('time [ms]') pylab.ylabel('weight [mV]') pylab.legend(loc='best') ylims = pylab.ylim() pylab.ylim(ylims[0] - 5, ylims[1] + 5) # pylab.savefig('test_pp_psc_delta_stdp_fig1.png') nest.raster_plot.from_device(sd) ylims = pylab.ylim() pylab.ylim(ylims[0] - .5, ylims[1] + .5) pylab.show() # pylab.savefig('test_pp_psc_delta_stdp_fig2.png') print 'Archiver lengths shall be equal:' for nrn in [nrn_post1, nrn_post2]: print nest.GetStatus(nrn, keys=['model', 'archiver_length'])[0]<|fim▁end|>
nest.Simulate(Dt) t.append(i * Dt) w1.append(nest.GetStatus(conn1, keys=['weight'])[0][0])
<|file_name|>LPhoneSecurityProfile.java<|end_file_name|><|fim▁begin|>package com.cisco.axl.api._8; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for LPhoneSecurityProfile complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="LPhoneSecurityProfile"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence minOccurs="0"> * &lt;element name="phoneType" type="{http://www.cisco.com/AXL/API/8.0}XModel" minOccurs="0"/> * &lt;element name="protocol" type="{http://www.cisco.com/AXL/API/8.0}XDeviceProtocol" minOccurs="0"/> * &lt;element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="description" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/> * &lt;element name="deviceSecurityMode" type="{http://www.cisco.com/AXL/API/8.0}XDeviceSecurityMode" minOccurs="0"/> * &lt;element name="authenticationMode" type="{http://www.cisco.com/AXL/API/8.0}XAuthenticationMode" minOccurs="0"/> * &lt;element name="keySize" type="{http://www.cisco.com/AXL/API/8.0}XKeySize" minOccurs="0"/> * &lt;element name="tftpEncryptedConfig" type="{http://www.cisco.com/AXL/API/8.0}boolean" minOccurs="0"/> * &lt;element name="nonceValidityTime" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="transportType" type="{http://www.cisco.com/AXL/API/8.0}XTransport" minOccurs="0"/> * &lt;element name="sipPhonePort" type="{http://www.cisco.com/AXL/API/8.0}XInteger" minOccurs="0"/> * &lt;element name="enableDigestAuthentication" type="{http://www.cisco.com/AXL/API/8.0}boolean" minOccurs="0"/> * &lt;element name="excludeDigestCredentials" type="{http://www.cisco.com/AXL/API/8.0}boolean" minOccurs="0"/> * &lt;/sequence> * &lt;attribute name="uuid" type="{http://www.cisco.com/AXL/API/8.0}XUUID" /> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "LPhoneSecurityProfile", propOrder = { "phoneType", "protocol", "name", "description", "deviceSecurityMode", "authenticationMode", "keySize", "tftpEncryptedConfig", "nonceValidityTime", "transportType", "sipPhonePort", "enableDigestAuthentication", "excludeDigestCredentials" }) public class LPhoneSecurityProfile { protected String phoneType; protected String protocol; protected String name; protected String description; protected String deviceSecurityMode; protected String authenticationMode; protected String keySize; protected String tftpEncryptedConfig; protected String nonceValidityTime; protected String transportType; protected String sipPhonePort; protected String enableDigestAuthentication; protected String excludeDigestCredentials; @XmlAttribute protected String uuid; /** * Gets the value of the phoneType property. * * @return * possible object is * {@link String } * */ public String getPhoneType() { return phoneType; } /** * Sets the value of the phoneType property. * * @param value * allowed object is * {@link String } * */ public void setPhoneType(String value) { this.phoneType = value; } /** * Gets the value of the protocol property. * * @return * possible object is<|fim▁hole|> return protocol; } /** * Sets the value of the protocol property. * * @param value * allowed object is * {@link String } * */ public void setProtocol(String value) { this.protocol = value; } /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ public String getName() { return name; } /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ public void setName(String value) { this.name = value; } /** * Gets the value of the description property. * * @return * possible object is * {@link String } * */ public String getDescription() { return description; } /** * Sets the value of the description property. * * @param value * allowed object is * {@link String } * */ public void setDescription(String value) { this.description = value; } /** * Gets the value of the deviceSecurityMode property. * * @return * possible object is * {@link String } * */ public String getDeviceSecurityMode() { return deviceSecurityMode; } /** * Sets the value of the deviceSecurityMode property. * * @param value * allowed object is * {@link String } * */ public void setDeviceSecurityMode(String value) { this.deviceSecurityMode = value; } /** * Gets the value of the authenticationMode property. * * @return * possible object is * {@link String } * */ public String getAuthenticationMode() { return authenticationMode; } /** * Sets the value of the authenticationMode property. * * @param value * allowed object is * {@link String } * */ public void setAuthenticationMode(String value) { this.authenticationMode = value; } /** * Gets the value of the keySize property. * * @return * possible object is * {@link String } * */ public String getKeySize() { return keySize; } /** * Sets the value of the keySize property. * * @param value * allowed object is * {@link String } * */ public void setKeySize(String value) { this.keySize = value; } /** * Gets the value of the tftpEncryptedConfig property. * * @return * possible object is * {@link String } * */ public String getTftpEncryptedConfig() { return tftpEncryptedConfig; } /** * Sets the value of the tftpEncryptedConfig property. * * @param value * allowed object is * {@link String } * */ public void setTftpEncryptedConfig(String value) { this.tftpEncryptedConfig = value; } /** * Gets the value of the nonceValidityTime property. * * @return * possible object is * {@link String } * */ public String getNonceValidityTime() { return nonceValidityTime; } /** * Sets the value of the nonceValidityTime property. * * @param value * allowed object is * {@link String } * */ public void setNonceValidityTime(String value) { this.nonceValidityTime = value; } /** * Gets the value of the transportType property. * * @return * possible object is * {@link String } * */ public String getTransportType() { return transportType; } /** * Sets the value of the transportType property. * * @param value * allowed object is * {@link String } * */ public void setTransportType(String value) { this.transportType = value; } /** * Gets the value of the sipPhonePort property. * * @return * possible object is * {@link String } * */ public String getSipPhonePort() { return sipPhonePort; } /** * Sets the value of the sipPhonePort property. * * @param value * allowed object is * {@link String } * */ public void setSipPhonePort(String value) { this.sipPhonePort = value; } /** * Gets the value of the enableDigestAuthentication property. * * @return * possible object is * {@link String } * */ public String getEnableDigestAuthentication() { return enableDigestAuthentication; } /** * Sets the value of the enableDigestAuthentication property. * * @param value * allowed object is * {@link String } * */ public void setEnableDigestAuthentication(String value) { this.enableDigestAuthentication = value; } /** * Gets the value of the excludeDigestCredentials property. * * @return * possible object is * {@link String } * */ public String getExcludeDigestCredentials() { return excludeDigestCredentials; } /** * Sets the value of the excludeDigestCredentials property. * * @param value * allowed object is * {@link String } * */ public void setExcludeDigestCredentials(String value) { this.excludeDigestCredentials = value; } /** * Gets the value of the uuid property. * * @return * possible object is * {@link String } * */ public String getUuid() { return uuid; } /** * Sets the value of the uuid property. * * @param value * allowed object is * {@link String } * */ public void setUuid(String value) { this.uuid = value; } }<|fim▁end|>
* {@link String } * */ public String getProtocol() {
<|file_name|>create.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors // SPDX-License-Identifier: Apache-2.0 use crate::release_flow::{ hash_for_modules, load_artifact, save_release_artifact, verify::verify_payload_change, ReleaseArtifact, }; use anyhow::{bail, Result}; use diem_types::{ access_path::AccessPath, chain_id::ChainId, transaction::{ChangeSet, WriteSetPayload}, write_set::{WriteOp, WriteSetMut}, }; use diem_validator_interface::{DiemValidatorInterface, JsonRpcDebuggerInterface}; use std::collections::{BTreeMap, BTreeSet}; use vm::CompiledModule; pub fn create_release( // ChainID to distinguish the diem network. e.g: PREMAINNET<|fim▁hole|> // Blockchain height version: u64, // Set the flag to true in the first release. This will manually create the first release artifact on disk. first_release: bool, release_modules: &[(Vec<u8>, CompiledModule)], ) -> Result<WriteSetPayload> { let release_artifact = ReleaseArtifact { chain_id, version, stdlib_hash: hash_for_modules( release_modules .iter() .map(|(bytes, module)| (module.self_id(), bytes)), )?, }; if first_release { if load_artifact(&chain_id).is_ok() { bail!("Previous release existed"); } save_release_artifact(release_artifact.clone())?; } let artifact = load_artifact(&chain_id)?; if artifact.chain_id != chain_id { bail!("Artifact mismatch with on disk file"); } if artifact.version > version { bail!( "Artifact version is ahead of the argument: old: {:?}, new: {:?}", artifact.version, version ); } let remote = Box::new(JsonRpcDebuggerInterface::new(url.as_str())?); let payload = create_release_from_artifact(&release_artifact, url.as_str(), release_modules)?; verify_payload_change( remote, Some(version), &payload, release_modules.iter().map(|(_bytes, m)| m), )?; save_release_artifact(release_artifact)?; Ok(payload) } pub(crate) fn create_release_from_artifact( artifact: &ReleaseArtifact, remote_url: &str, release_modules: &[(Vec<u8>, CompiledModule)], ) -> Result<WriteSetPayload> { let remote = JsonRpcDebuggerInterface::new(remote_url)?; let remote_modules = remote.get_diem_framework_modules_by_version(artifact.version)?; create_release_writeset(&remote_modules, release_modules) } pub(crate) fn create_release_writeset( remote_frameworks: &[CompiledModule], local_frameworks: &[(Vec<u8>, CompiledModule)], ) -> Result<WriteSetPayload> { let remote_framework_map = remote_frameworks .iter() .map(|m| (m.self_id(), m)) .collect::<BTreeMap<_, _>>(); let remote_ids = remote_framework_map.keys().collect::<BTreeSet<_>>(); let local_framework_map = local_frameworks .iter() .map(|(bytes, module)| (module.self_id(), (bytes, module))) .collect::<BTreeMap<_, _>>(); let local_ids = local_framework_map.keys().collect::<BTreeSet<_>>(); let mut framework_changes = BTreeMap::new(); // 1. Insert new modules to be published. for module_id in local_ids.difference(&remote_ids) { let module = *local_framework_map .get(*module_id) .expect("ModuleID not found in local stdlib"); framework_changes.insert(*module_id, Some(module)); } // 2. Remove modules that are already deleted locally. for module_id in remote_ids.difference(&local_ids) { framework_changes.insert(*module_id, None); } // 3. Check the diff between on chain modules and local modules, update when local bytes is different. for module_id in local_ids.intersection(&remote_ids) { let (local_bytes, local_module) = *local_framework_map .get(*module_id) .expect("ModuleID not found in local stdlib"); let remote_module = remote_framework_map .get(*module_id) .expect("ModuleID not found in local stdlib"); if &local_module != remote_module { framework_changes.insert(*module_id, Some((local_bytes, local_module))); } } let mut write_patch = WriteSetMut::new(vec![]); for (id, module_opt) in framework_changes.into_iter() { let path = AccessPath::code_access_path(id.clone()); match module_opt { Some((bytes, _)) => { write_patch.push((path, WriteOp::Value((*bytes).clone()))); } None => write_patch.push((path, WriteOp::Deletion)), } } Ok(WriteSetPayload::Direct(ChangeSet::new( write_patch.freeze()?, vec![], ))) }<|fim▁end|>
chain_id: ChainId, // Public JSON-rpc endpoint URL. // TODO: Get rid of this URL argument once we have a stable mapping from ChainId to its url. url: String,
<|file_name|>wamp_client.py<|end_file_name|><|fim▁begin|>import os import sys import asyncio from pathlib import Path import pendulum sys.path.append(str(Path(__file__).absolute().parent.parent.parent)) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "project.settings") from django.core.wsgi import get_wsgi_application # noqa application = get_wsgi_application() from autobahn.asyncio.wamp import ApplicationSession, ApplicationRunner # noqa import devpy.develop as log # Boost the log max file size to 1Gb log.handlers[0].maxBytes *= 1000 from vessels.models import VesselActivity # noqa from vessels.crawler.ftp_client import crawl_csv, save_csv # noqa from vessels.crawler.nh_client import process_xml, crawl_xml # noqa class WampClient(ApplicationSession): async def onJoin(self, details): loop = asyncio.get_event_loop() async def update_activity_status(id, status): """ Update ship status for the given activity """ def _(id, value): log.info(f'Update activity "{id}" status to "{status}"') activity = VesselActivity.objects.get(id=id) activity.status = status or None activity.save() return activity.to_dict( timezone="Europe/Paris", include_vessel=True ) activity = await loop.run_in_executor(None, _, id, status) activity['timestamp'] = pendulum.utcnow().timestamp() log.info(f'Update status info for activity {activity!r}') self.publish('smit.activity.update', activity) return activity self.register(update_activity_status, 'smit.activity.update.status') async def update_vessel_helico(id, helico): """ Update helicopter approval for the vessel of this activity """ def _(id, value): activity = VesselActivity.objects.get(id=id) vessel = activity.vessel log.info(f'Update vessel "{vessel.id}" helico to "{helico}"') vessel.helico = helico or None vessel.save() return activity.to_dict( timezone="Europe/Paris", include_vessel=True ) activity = await loop.run_in_executor(None, _, id, helico) activity['timestamp'] = pendulum.utcnow().timestamp() log.info(f'Update helico info for activity {activity!r}') self.publish('smit.activity.update', activity) return activity self.register(update_vessel_helico, 'smit.vessel.update.helico') async def update_vessel_helico_obs(id, obs): """ Update helicopter obs for the vessel of this activity """ def _(id, value): activity = VesselActivity.objects.get(id=id) vessel = activity.vessel log.info(f'Update vessel "{vessel.id}" helico to "{obs}"') vessel.helico_observation = obs or None vessel.save() return activity.to_dict( timezone="Europe/Paris", include_vessel=True ) activity = await loop.run_in_executor(None, _, id, obs) activity['timestamp'] = pendulum.utcnow().timestamp() log.info(f'Update helico obs for activity {activity!r}') self.publish('smit.activity.update', activity) return activity self.register(update_vessel_helico_obs, 'smit.vessel.update.helico_obs') async def publish_csv_update(stream): activities = await save_csv(stream) self.publish('smit.sirene.csv.update', activities) coro = crawl_csv( host="localhost", login="user",<|fim▁hole|> pwd="password", port=2121, path="fixture.csv", csv_callback=publish_csv_update, tick=3 ) asyncio.ensure_future(coro) async def publish_xml_update(stream): distances = await process_xml(stream) self.publish('smit.nh.xml.update', distances) asyncio.ensure_future(crawl_xml(xml_callback=publish_xml_update)) if __name__ == '__main__': runner = ApplicationRunner("ws://127.0.0.1:3333/ws", "realm1") runner.run(WampClient)<|fim▁end|>
<|file_name|>c_parser.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------ # pycparser: c_parser.py # # CParser class: Parser and AST builder for the C language # # Copyright (C) 2008-2013, Eli Bendersky # License: BSD #------------------------------------------------------------------------------ import re from .ply import yacc from . import c_ast from .c_lexer import CLexer from .plyparser import PLYParser, Coord, ParseError from .ast_transforms import fix_switch_cases class CParser(PLYParser): def __init__( self, lex_optimize=True, lextab='cffi._pycparser.lextab', yacc_optimize=True, yacctab='cffi._pycparser.yacctab', yacc_debug=False): """ Create a new CParser. Some arguments for controlling the debug/optimization level of the parser are provided. The defaults are tuned for release/performance mode. The simple rules for using them are: *) When tweaking CParser/CLexer, set these to False *) When releasing a stable parser, set to True lex_optimize: Set to False when you're modifying the lexer. Otherwise, changes in the lexer won't be used, if some lextab.py file exists. When releasing with a stable lexer, set to True to save the re-generation of the lexer table on each run. lextab: Points to the lex table that's used for optimized mode. Only if you're modifying the lexer and want some tests to avoid re-generating the table, make this point to a local lex table file (that's been earlier generated with lex_optimize=True) yacc_optimize: Set to False when you're modifying the parser. Otherwise, changes in the parser won't be used, if some parsetab.py file exists. When releasing with a stable parser, set to True to save the re-generation of the parser table on each run. yacctab: Points to the yacc table that's used for optimized mode. Only if you're modifying the parser, make this point to a local yacc table file yacc_debug: Generate a parser.out file that explains how yacc built the parsing table from the grammar. """ self.clex = CLexer( error_func=self._lex_error_func, on_lbrace_func=self._lex_on_lbrace_func, on_rbrace_func=self._lex_on_rbrace_func, type_lookup_func=self._lex_type_lookup_func) self.clex.build( optimize=lex_optimize, lextab=lextab) self.tokens = self.clex.tokens rules_with_opt = [ 'abstract_declarator', 'assignment_expression', 'declaration_list', 'declaration_specifiers', 'designation', 'expression', 'identifier_list', 'init_declarator_list', 'parameter_type_list', 'specifier_qualifier_list', 'block_item_list', 'type_qualifier_list', 'struct_declarator_list' ] for rule in rules_with_opt: self._create_opt_rule(rule) self.cparser = yacc.yacc( module=self, start='translation_unit_or_empty', debug=yacc_debug, optimize=yacc_optimize, tabmodule=yacctab) # Stack of scopes for keeping track of symbols. _scope_stack[-1] is # the current (topmost) scope. Each scope is a dictionary that # specifies whether a name is a type. If _scope_stack[n][name] is # True, 'name' is currently a type in the scope. If it's False, # 'name' is used in the scope but not as a type (for instance, if we # saw: int name; # If 'name' is not a key in _scope_stack[n] then 'name' was not defined # in this scope at all. self._scope_stack = [dict()] # Keeps track of the last token given to yacc (the lookahead token) self._last_yielded_token = None def parse(self, text, filename='', debuglevel=0): """ Parses C code and returns an AST. text: A string containing the C source code filename: Name of the file being parsed (for meaningful error messages) debuglevel: Debug level to yacc """ self.clex.filename = filename self.clex.reset_lineno() self._scope_stack = [dict()] self._last_yielded_token = None return self.cparser.parse( input=text, lexer=self.clex, debug=debuglevel) ######################-- PRIVATE --###################### def _push_scope(self): self._scope_stack.append(dict()) def _pop_scope(self): assert len(self._scope_stack) > 1 self._scope_stack.pop() def _add_typedef_name(self, name, coord): """ Add a new typedef name (ie a TYPEID) to the current scope """ if not self._scope_stack[-1].get(name, True): self._parse_error( "Typedef %r previously declared as non-typedef " "in this scope" % name, coord) self._scope_stack[-1][name] = True def _add_identifier(self, name, coord): """ Add a new object, function, or enum member name (ie an ID) to the current scope """ if self._scope_stack[-1].get(name, False): self._parse_error( "Non-typedef %r previously declared as typedef " "in this scope" % name, coord) self._scope_stack[-1][name] = False def _is_type_in_scope(self, name): """ Is *name* a typedef-name in the current scope? """ for scope in reversed(self._scope_stack): # If name is an identifier in this scope it shadows typedefs in # higher scopes. in_scope = scope.get(name) if in_scope is not None: return in_scope return False def _lex_error_func(self, msg, line, column): self._parse_error(msg, self._coord(line, column)) def _lex_on_lbrace_func(self): self._push_scope() def _lex_on_rbrace_func(self): self._pop_scope() def _lex_type_lookup_func(self, name): """ Looks up types that were previously defined with typedef. Passed to the lexer for recognizing identifiers that are types. """ is_type = self._is_type_in_scope(name) return is_type def _get_yacc_lookahead_token(self): """ We need access to yacc's lookahead token in certain cases. This is the last token yacc requested from the lexer, so we ask the lexer. """ return self.clex.last_token # To understand what's going on here, read sections A.8.5 and # A.8.6 of K&R2 very carefully. # # A C type consists of a basic type declaration, with a list # of modifiers. For example: # # int *c[5]; # # The basic declaration here is 'int c', and the pointer and # the array are the modifiers. # # Basic declarations are represented by TypeDecl (from module # c_ast) and the modifiers are FuncDecl, PtrDecl and # ArrayDecl. # # The standard states that whenever a new modifier is parsed, # it should be added to the end of the list of modifiers. For # example: # # K&R2 A.8.6.2: Array Declarators # # In a declaration T D where D has the form # D1 [constant-expression-opt] # and the type of the identifier in the declaration T D1 is # "type-modifier T", the type of the # identifier of D is "type-modifier array of T" # # This is what this method does. The declarator it receives # can be a list of declarators ending with TypeDecl. It # tacks the modifier to the end of this list, just before # the TypeDecl. # # Additionally, the modifier may be a list itself. This is # useful for pointers, that can come as a chain from the rule # p_pointer. In this case, the whole modifier list is spliced # into the new location. # def _type_modify_decl(self, decl, modifier): """ Tacks a type modifier on a declarator, and returns the modified declarator. Note: the declarator and modifier may be modified """ #~ print '****' #~ decl.show(offset=3) #~ modifier.show(offset=3) #~ print '****' modifier_head = modifier modifier_tail = modifier # The modifier may be a nested list. Reach its tail. # while modifier_tail.type: modifier_tail = modifier_tail.type # If the decl is a basic type, just tack the modifier onto # it # if isinstance(decl, c_ast.TypeDecl): modifier_tail.type = decl return modifier else: # Otherwise, the decl is a list of modifiers. Reach # its tail and splice the modifier onto the tail, # pointing to the underlying basic type. # decl_tail = decl while not isinstance(decl_tail.type, c_ast.TypeDecl): decl_tail = decl_tail.type modifier_tail.type = decl_tail.type decl_tail.type = modifier_head return decl # Due to the order in which declarators are constructed, # they have to be fixed in order to look like a normal AST. # # When a declaration arrives from syntax construction, it has # these problems: # * The innermost TypeDecl has no type (because the basic # type is only known at the uppermost declaration level) # * The declaration has no variable name, since that is saved # in the innermost TypeDecl # * The typename of the declaration is a list of type # specifiers, and not a node. Here, basic identifier types # should be separated from more complex types like enums # and structs. # # This method fixes these problems. # def _fix_decl_name_type(self, decl, typename): """ Fixes a declaration. Modifies decl. """ # Reach the underlying basic type # type = decl while not isinstance(type, c_ast.TypeDecl): type = type.type decl.name = type.declname type.quals = decl.quals # The typename is a list of types. If any type in this # list isn't an IdentifierType, it must be the only # type in the list (it's illegal to declare "int enum ..") # If all the types are basic, they're collected in the # IdentifierType holder. # for tn in typename: if not isinstance(tn, c_ast.IdentifierType): if len(typename) > 1: self._parse_error( "Invalid multiple types specified", tn.coord) else: type.type = tn return decl if not typename: # Functions default to returning int # if not isinstance(decl.type, c_ast.FuncDecl): self._parse_error( "Missing type in declaration", decl.coord) type.type = c_ast.IdentifierType( ['int'], coord=decl.coord) else: # At this point, we know that typename is a list of IdentifierType # nodes. Concatenate all the names into a single list. # type.type = c_ast.IdentifierType( [name for id in typename for name in id.names], coord=typename[0].coord) return decl def _add_declaration_specifier(self, declspec, newspec, kind): """ Declaration specifiers are represented by a dictionary with the entries: * qual: a list of type qualifiers * storage: a list of storage type qualifiers * type: a list of type specifiers * function: a list of function specifiers This method is given a declaration specifier, and a new specifier of a given kind. Returns the declaration specifier, with the new specifier incorporated. """ spec = declspec or dict(qual=[], storage=[], type=[], function=[]) spec[kind].insert(0, newspec) return spec def _build_declarations(self, spec, decls, typedef_namespace=False): """ Builds a list of declarations all sharing the given specifiers. If typedef_namespace is true, each declared name is added to the "typedef namespace", which also includes objects, functions, and enum constants. """ is_typedef = 'typedef' in spec['storage'] declarations = [] # Bit-fields are allowed to be unnamed. # if decls[0].get('bitsize') is not None: pass # When redeclaring typedef names as identifiers in inner scopes, a # problem can occur where the identifier gets grouped into # spec['type'], leaving decl as None. This can only occur for the # first declarator. # elif decls[0]['decl'] is None: if len(spec['type']) < 2 or len(spec['type'][-1].names) != 1 or \ not self._is_type_in_scope(spec['type'][-1].names[0]): coord = '?' for t in spec['type']: if hasattr(t, 'coord'): coord = t.coord break self._parse_error('Invalid declaration', coord) # Make this look as if it came from "direct_declarator:ID" decls[0]['decl'] = c_ast.TypeDecl( declname=spec['type'][-1].names[0], type=None, quals=None, coord=spec['type'][-1].coord) # Remove the "new" type's name from the end of spec['type'] del spec['type'][-1] # A similar problem can occur where the declaration ends up looking # like an abstract declarator. Give it a name if this is the case. # elif not isinstance(decls[0]['decl'], (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)): decls_0_tail = decls[0]['decl'] while not isinstance(decls_0_tail, c_ast.TypeDecl): decls_0_tail = decls_0_tail.type if decls_0_tail.declname is None: decls_0_tail.declname = spec['type'][-1].names[0] del spec['type'][-1] for decl in decls: assert decl['decl'] is not None if is_typedef: declaration = c_ast.Typedef( name=None, quals=spec['qual'], storage=spec['storage'], type=decl['decl'], coord=decl['decl'].coord) else: declaration = c_ast.Decl( name=None, quals=spec['qual'], storage=spec['storage'], funcspec=spec['function'], type=decl['decl'], init=decl.get('init'), bitsize=decl.get('bitsize'), coord=decl['decl'].coord) if isinstance(declaration.type, (c_ast.Struct, c_ast.Union, c_ast.IdentifierType)): fixed_decl = declaration else: fixed_decl = self._fix_decl_name_type(declaration, spec['type']) # Add the type name defined by typedef to a # symbol table (for usage in the lexer) # if typedef_namespace: if is_typedef: self._add_typedef_name(fixed_decl.name, fixed_decl.coord) else: self._add_identifier(fixed_decl.name, fixed_decl.coord) declarations.append(fixed_decl) return declarations def _build_function_definition(self, spec, decl, param_decls, body): """ Builds a function definition. """ assert 'typedef' not in spec['storage'] declaration = self._build_declarations( spec=spec, decls=[dict(decl=decl, init=None)], typedef_namespace=True)[0] return c_ast.FuncDef( decl=declaration, param_decls=param_decls, body=body, coord=decl.coord) def _select_struct_union_class(self, token): """ Given a token (either STRUCT or UNION), selects the appropriate AST class. """ if token == 'struct': return c_ast.Struct else: return c_ast.Union ## ## Precedence and associativity of operators ## precedence = ( ('left', 'LOR'), ('left', 'LAND'), ('left', 'OR'), ('left', 'XOR'), ('left', 'AND'), ('left', 'EQ', 'NE'), ('left', 'GT', 'GE', 'LT', 'LE'), ('left', 'RSHIFT', 'LSHIFT'), ('left', 'PLUS', 'MINUS'), ('left', 'TIMES', 'DIVIDE', 'MOD') ) ## ## Grammar productions ## Implementation of the BNF defined in K&R2 A.13 ## # Wrapper around a translation unit, to allow for empty input. # Not strictly part of the C99 Grammar, but useful in practice. # def p_translation_unit_or_empty(self, p): """ translation_unit_or_empty : translation_unit | empty """ if p[1] is None: p[0] = c_ast.FileAST([]) else: p[0] = c_ast.FileAST(p[1]) def p_translation_unit_1(self, p): """ translation_unit : external_declaration """ # Note: external_declaration is already a list # p[0] = p[1] def p_translation_unit_2(self, p): """ translation_unit : translation_unit external_declaration """ if p[2] is not None: p[1].extend(p[2]) p[0] = p[1] # Declarations always come as lists (because they can be # several in one line), so we wrap the function definition # into a list as well, to make the return value of # external_declaration homogenous. # def p_external_declaration_1(self, p): """ external_declaration : function_definition """ p[0] = [p[1]] def p_external_declaration_2(self, p): """ external_declaration : declaration """ p[0] = p[1] def p_external_declaration_3(self, p): """ external_declaration : pp_directive """ p[0] = p[1] def p_external_declaration_4(self, p): """ external_declaration : SEMI """ p[0] = None def p_pp_directive(self, p): """ pp_directive : PPHASH """ self._parse_error('Directives not supported yet', self._coord(p.lineno(1))) # In function definitions, the declarator can be followed by # a declaration list, for old "K&R style" function definitios. # def p_function_definition_1(self, p): """ function_definition : declarator declaration_list_opt compound_statement """ # no declaration specifiers - 'int' becomes the default type spec = dict( qual=[], storage=[], type=[c_ast.IdentifierType(['int'], coord=self._coord(p.lineno(1)))], function=[]) p[0] = self._build_function_definition( spec=spec, decl=p[1], param_decls=p[2], body=p[3]) def p_function_definition_2(self, p): """ function_definition : declaration_specifiers declarator declaration_list_opt compound_statement """ spec = p[1] p[0] = self._build_function_definition( spec=spec, decl=p[2], param_decls=p[3], body=p[4]) def p_statement(self, p): """ statement : labeled_statement | expression_statement | compound_statement | selection_statement | iteration_statement | jump_statement """ p[0] = p[1] # In C, declarations can come several in a line: # int x, *px, romulo = 5; # # However, for the AST, we will split them to separate Decl # nodes. # # This rule splits its declarations and always returns a list # of Decl nodes, even if it's one element long. # def p_decl_body(self, p): """ decl_body : declaration_specifiers init_declarator_list_opt """ spec = p[1] # p[2] (init_declarator_list_opt) is either a list or None # if p[2] is None: # By the standard, you must have at least one declarator unless # declaring a structure tag, a union tag, or the members of an # enumeration. # ty = spec['type'] s_u_or_e = (c_ast.Struct, c_ast.Union, c_ast.Enum) if len(ty) == 1 and isinstance(ty[0], s_u_or_e): decls = [c_ast.Decl( name=None, quals=spec['qual'], storage=spec['storage'], funcspec=spec['function'], type=ty[0], init=None, bitsize=None, coord=ty[0].coord)] # However, this case can also occur on redeclared identifiers in # an inner scope. The trouble is that the redeclared type's name # gets grouped into declaration_specifiers; _build_declarations # compensates for this. # else: decls = self._build_declarations( spec=spec, decls=[dict(decl=None, init=None)], typedef_namespace=True) else: decls = self._build_declarations( spec=spec, decls=p[2], typedef_namespace=True) p[0] = decls # The declaration has been split to a decl_body sub-rule and # SEMI, because having them in a single rule created a problem # for defining typedefs. # # If a typedef line was directly followed by a line using the # type defined with the typedef, the type would not be # recognized. This is because to reduce the declaration rule, # the parser's lookahead asked for the token after SEMI, which # was the type from the next line, and the lexer had no chance # to see the updated type symbol table. # # Splitting solves this problem, because after seeing SEMI, # the parser reduces decl_body, which actually adds the new # type into the table to be seen by the lexer before the next # line is reached. def p_declaration(self, p): """ declaration : decl_body SEMI """ p[0] = p[1] # Since each declaration is a list of declarations, this # rule will combine all the declarations and return a single # list # def p_declaration_list(self, p): """ declaration_list : declaration | declaration_list declaration """ p[0] = p[1] if len(p) == 2 else p[1] + p[2] def p_declaration_specifiers_1(self, p): """ declaration_specifiers : type_qualifier declaration_specifiers_opt """ p[0] = self._add_declaration_specifier(p[2], p[1], 'qual') def p_declaration_specifiers_2(self, p): """ declaration_specifiers : type_specifier declaration_specifiers_opt """ p[0] = self._add_declaration_specifier(p[2], p[1], 'type') def p_declaration_specifiers_3(self, p): """ declaration_specifiers : storage_class_specifier declaration_specifiers_opt """ p[0] = self._add_declaration_specifier(p[2], p[1], 'storage') def p_declaration_specifiers_4(self, p): """ declaration_specifiers : function_specifier declaration_specifiers_opt """ p[0] = self._add_declaration_specifier(p[2], p[1], 'function') def p_storage_class_specifier(self, p): """ storage_class_specifier : AUTO | REGISTER | STATIC | EXTERN | TYPEDEF """ p[0] = p[1] def p_function_specifier(self, p): """ function_specifier : INLINE """ p[0] = p[1] def p_type_specifier_1(self, p): """ type_specifier : VOID | _BOOL | CHAR | SHORT | INT | LONG | FLOAT | DOUBLE | _COMPLEX | SIGNED | UNSIGNED """ p[0] = c_ast.IdentifierType([p[1]], coord=self._coord(p.lineno(1))) def p_type_specifier_2(self, p): """ type_specifier : typedef_name | enum_specifier | struct_or_union_specifier """ p[0] = p[1] def p_type_qualifier(self, p): """ type_qualifier : CONST | RESTRICT | VOLATILE """ p[0] = p[1] def p_init_declarator_list_1(self, p): """ init_declarator_list : init_declarator | init_declarator_list COMMA init_declarator """ p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] # If the code is declaring a variable that was declared a typedef in an # outer scope, yacc will think the name is part of declaration_specifiers, # not init_declarator, and will then get confused by EQUALS. Pass None # up in place of declarator, and handle this at a higher level. # def p_init_declarator_list_2(self, p): """ init_declarator_list : EQUALS initializer """ p[0] = [dict(decl=None, init=p[2])] # Similarly, if the code contains duplicate typedefs of, for example, # array types, the array portion will appear as an abstract declarator. # def p_init_declarator_list_3(self, p): """ init_declarator_list : abstract_declarator """ p[0] = [dict(decl=p[1], init=None)] # Returns a {decl=<declarator> : init=<initializer>} dictionary # If there's no initializer, uses None # def p_init_declarator(self, p): """ init_declarator : declarator | declarator EQUALS initializer """ p[0] = dict(decl=p[1], init=(p[3] if len(p) > 2 else None)) def p_specifier_qualifier_list_1(self, p): """ specifier_qualifier_list : type_qualifier specifier_qualifier_list_opt """ p[0] = self._add_declaration_specifier(p[2], p[1], 'qual') def p_specifier_qualifier_list_2(self, p): """ specifier_qualifier_list : type_specifier specifier_qualifier_list_opt """ p[0] = self._add_declaration_specifier(p[2], p[1], 'type') # TYPEID is allowed here (and in other struct/enum related tag names), because # struct/enum tags reside in their own namespace and can be named the same as types # def p_struct_or_union_specifier_1(self, p): """ struct_or_union_specifier : struct_or_union ID | struct_or_union TYPEID """ klass = self._select_struct_union_class(p[1]) p[0] = klass( name=p[2], decls=None, coord=self._coord(p.lineno(2))) def p_struct_or_union_specifier_2(self, p): """ struct_or_union_specifier : struct_or_union brace_open struct_declaration_list brace_close """ klass = self._select_struct_union_class(p[1]) p[0] = klass( name=None, decls=p[3], coord=self._coord(p.lineno(2))) def p_struct_or_union_specifier_3(self, p): """ struct_or_union_specifier : struct_or_union ID brace_open struct_declaration_list brace_close | struct_or_union TYPEID brace_open struct_declaration_list brace_close """ klass = self._select_struct_union_class(p[1]) p[0] = klass( name=p[2], decls=p[4], coord=self._coord(p.lineno(2))) def p_struct_or_union(self, p): """ struct_or_union : STRUCT | UNION """ p[0] = p[1] # Combine all declarations into a single list # def p_struct_declaration_list(self, p): """ struct_declaration_list : struct_declaration | struct_declaration_list struct_declaration """ p[0] = p[1] if len(p) == 2 else p[1] + p[2] def p_struct_declaration_1(self, p): """ struct_declaration : specifier_qualifier_list struct_declarator_list_opt SEMI """ spec = p[1] assert 'typedef' not in spec['storage'] if p[2] is not None: decls = self._build_declarations( spec=spec, decls=p[2]) elif len(spec['type']) == 1: # Anonymous struct/union, gcc extension, C1x feature. # Although the standard only allows structs/unions here, I see no # reason to disallow other types since some compilers have typedefs # here, and pycparser isn't about rejecting all invalid code. # node = spec['type'][0] if isinstance(node, c_ast.Node): decl_type = node else: decl_type = c_ast.IdentifierType(node) decls = self._build_declarations( spec=spec, decls=[dict(decl=decl_type)]) else: # Structure/union members can have the same names as typedefs. # The trouble is that the member's name gets grouped into # specifier_qualifier_list; _build_declarations compensates. # decls = self._build_declarations( spec=spec, decls=[dict(decl=None, init=None)]) p[0] = decls def p_struct_declaration_2(self, p): """ struct_declaration : specifier_qualifier_list abstract_declarator SEMI """ # "Abstract declarator?!", you ask? Structure members can have the # same names as typedefs. The trouble is that the member's name gets # grouped into specifier_qualifier_list, leaving any remainder to # appear as an abstract declarator, as in: # typedef int Foo; # struct { Foo Foo[3]; }; # p[0] = self._build_declarations( spec=p[1], decls=[dict(decl=p[2], init=None)]) def p_struct_declarator_list(self, p): """ struct_declarator_list : struct_declarator | struct_declarator_list COMMA struct_declarator """ p[0] = p[1] + [p[3]] if len(p) == 4 else [p[1]] # struct_declarator passes up a dict with the keys: decl (for # the underlying declarator) and bitsize (for the bitsize) # def p_struct_declarator_1(self, p): """ struct_declarator : declarator """ p[0] = {'decl': p[1], 'bitsize': None} def p_struct_declarator_2(self, p): """ struct_declarator : declarator COLON constant_expression | COLON constant_expression """ if len(p) > 3: p[0] = {'decl': p[1], 'bitsize': p[3]} else: p[0] = {'decl': c_ast.TypeDecl(None, None, None), 'bitsize': p[2]} def p_enum_specifier_1(self, p): """ enum_specifier : ENUM ID | ENUM TYPEID """ p[0] = c_ast.Enum(p[2], None, self._coord(p.lineno(1))) def p_enum_specifier_2(self, p): """ enum_specifier : ENUM brace_open enumerator_list brace_close """ p[0] = c_ast.Enum(None, p[3], self._coord(p.lineno(1))) def p_enum_specifier_3(self, p): """ enum_specifier : ENUM ID brace_open enumerator_list brace_close | ENUM TYPEID brace_open enumerator_list brace_close """ p[0] = c_ast.Enum(p[2], p[4], self._coord(p.lineno(1))) def p_enumerator_list(self, p): """ enumerator_list : enumerator | enumerator_list COMMA | enumerator_list COMMA enumerator """ if len(p) == 2: p[0] = c_ast.EnumeratorList([p[1]], p[1].coord) elif len(p) == 3: p[0] = p[1] else: p[1].enumerators.append(p[3]) p[0] = p[1] def p_enumerator(self, p): """ enumerator : ID | ID EQUALS constant_expression """ if len(p) == 2: enumerator = c_ast.Enumerator( p[1], None, self._coord(p.lineno(1))) else: enumerator = c_ast.Enumerator( p[1], p[3], self._coord(p.lineno(1))) self._add_identifier(enumerator.name, enumerator.coord) p[0] = enumerator def p_declarator_1(self, p): """ declarator : direct_declarator """ p[0] = p[1] def p_declarator_2(self, p): """ declarator : pointer direct_declarator """ p[0] = self._type_modify_decl(p[2], p[1]) # Since it's impossible for a type to be specified after a pointer, assume # it's intended to be the name for this declaration. _add_identifier will # raise an error if this TYPEID can't be redeclared. # def p_declarator_3(self, p): """ declarator : pointer TYPEID """ decl = c_ast.TypeDecl( declname=p[2], type=None, quals=None, coord=self._coord(p.lineno(2))) p[0] = self._type_modify_decl(decl, p[1]) def p_direct_declarator_1(self, p): """ direct_declarator : ID """ p[0] = c_ast.TypeDecl( declname=p[1], type=None, quals=None, coord=self._coord(p.lineno(1))) def p_direct_declarator_2(self, p): """ direct_declarator : LPAREN declarator RPAREN """ p[0] = p[2] def p_direct_declarator_3(self, p): """ direct_declarator : direct_declarator LBRACKET assignment_expression_opt RBRACKET """ arr = c_ast.ArrayDecl( type=None, dim=p[3], coord=p[1].coord) p[0] = self._type_modify_decl(decl=p[1], modifier=arr) # Special for VLAs # def p_direct_declarator_4(self, p): """ direct_declarator : direct_declarator LBRACKET TIMES RBRACKET """ arr = c_ast.ArrayDecl( type=None, dim=c_ast.ID(p[3], self._coord(p.lineno(3))), coord=p[1].coord) p[0] = self._type_modify_decl(decl=p[1], modifier=arr) def p_direct_declarator_5(self, p): """ direct_declarator : direct_declarator LPAREN parameter_type_list RPAREN | direct_declarator LPAREN identifier_list_opt RPAREN """ func = c_ast.FuncDecl( args=p[3], type=None, coord=p[1].coord) # To see why _get_yacc_lookahead_token is needed, consider: # typedef char TT; # void foo(int TT) { TT = 10; } # Outside the function, TT is a typedef, but inside (starting and # ending with the braces) it's a parameter. The trouble begins with # yacc's lookahead token. We don't know if we're declaring or # defining a function until we see LBRACE, but if we wait for yacc to # trigger a rule on that token, then TT will have already been read # and incorrectly interpreted as TYPEID. We need to add the # parameters to the scope the moment the lexer sees LBRACE. # if self._get_yacc_lookahead_token().type == "LBRACE": if func.args is not None: for param in func.args.params: if isinstance(param, c_ast.EllipsisParam): break self._add_identifier(param.name, param.coord) p[0] = self._type_modify_decl(decl=p[1], modifier=func) def p_pointer(self, p): """ pointer : TIMES type_qualifier_list_opt | TIMES type_qualifier_list_opt pointer """ coord = self._coord(p.lineno(1)) p[0] = c_ast.PtrDecl( quals=p[2] or [], type=p[3] if len(p) > 3 else None, coord=coord) def p_type_qualifier_list(self, p): """ type_qualifier_list : type_qualifier | type_qualifier_list type_qualifier """ p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] def p_parameter_type_list(self, p): """ parameter_type_list : parameter_list | parameter_list COMMA ELLIPSIS """ if len(p) > 2: p[1].params.append(c_ast.EllipsisParam(self._coord(p.lineno(3)))) p[0] = p[1] def p_parameter_list(self, p): """ parameter_list : parameter_declaration | parameter_list COMMA parameter_declaration """ if len(p) == 2: # single parameter p[0] = c_ast.ParamList([p[1]], p[1].coord) else: p[1].params.append(p[3]) p[0] = p[1] def p_parameter_declaration_1(self, p): """ parameter_declaration : declaration_specifiers declarator """ spec = p[1] if not spec['type']: spec['type'] = [c_ast.IdentifierType(['int'], coord=self._coord(p.lineno(1)))] p[0] = self._build_declarations( spec=spec, decls=[dict(decl=p[2])])[0] def p_parameter_declaration_2(self, p): """ parameter_declaration : declaration_specifiers abstract_declarator_opt """ spec = p[1] if not spec['type']: spec['type'] = [c_ast.IdentifierType(['int'], coord=self._coord(p.lineno(1)))] # Parameters can have the same names as typedefs. The trouble is that # the parameter's name gets grouped into declaration_specifiers, making # it look like an old-style declaration; compensate. # if len(spec['type']) > 1 and len(spec['type'][-1].names) == 1 and \ self._is_type_in_scope(spec['type'][-1].names[0]): decl = self._build_declarations( spec=spec, decls=[dict(decl=p[2], init=None)])[0] # This truly is an old-style parameter declaration # else: decl = c_ast.Typename( quals=spec['qual'], type=p[2] or c_ast.TypeDecl(None, None, None), coord=self._coord(p.lineno(2))) typename = spec['type'] decl = self._fix_decl_name_type(decl, typename) p[0] = decl def p_identifier_list(self, p): """ identifier_list : identifier | identifier_list COMMA identifier """ if len(p) == 2: # single parameter p[0] = c_ast.ParamList([p[1]], p[1].coord) else: p[1].params.append(p[3]) p[0] = p[1] def p_initializer_1(self, p): """ initializer : assignment_expression """ p[0] = p[1] def p_initializer_2(self, p): """ initializer : brace_open initializer_list brace_close | brace_open initializer_list COMMA brace_close """ p[0] = p[2] def p_initializer_list(self, p): """ initializer_list : designation_opt initializer | initializer_list COMMA designation_opt initializer """ if len(p) == 3: # single initializer init = p[2] if p[1] is None else c_ast.NamedInitializer(p[1], p[2]) p[0] = c_ast.InitList([init], p[2].coord) else: init = p[4] if p[3] is None else c_ast.NamedInitializer(p[3], p[4]) p[1].exprs.append(init) p[0] = p[1] def p_designation(self, p): """ designation : designator_list EQUALS """ p[0] = p[1] # Designators are represented as a list of nodes, in the order in which # they're written in the code. # def p_designator_list(self, p): """ designator_list : designator | designator_list designator """ p[0] = [p[1]] if len(p) == 2 else p[1] + [p[2]] def p_designator(self, p): """ designator : LBRACKET constant_expression RBRACKET | PERIOD identifier """ p[0] = p[2] def p_type_name(self, p): """ type_name : specifier_qualifier_list abstract_declarator_opt """ #~ print '==========' #~ print p[1] #~ print p[2] #~ print p[2].children() #~ print '==========' typename = c_ast.Typename( quals=p[1]['qual'], type=p[2] or c_ast.TypeDecl(None, None, None), coord=self._coord(p.lineno(2))) p[0] = self._fix_decl_name_type(typename, p[1]['type']) def p_abstract_declarator_1(self, p): """ abstract_declarator : pointer """ dummytype = c_ast.TypeDecl(None, None, None) p[0] = self._type_modify_decl( decl=dummytype, modifier=p[1]) def p_abstract_declarator_2(self, p): """ abstract_declarator : pointer direct_abstract_declarator """ p[0] = self._type_modify_decl(p[2], p[1]) def p_abstract_declarator_3(self, p): """ abstract_declarator : direct_abstract_declarator """ p[0] = p[1] # Creating and using direct_abstract_declarator_opt here # instead of listing both direct_abstract_declarator and the # lack of it in the beginning of _1 and _2 caused two # shift/reduce errors. # def p_direct_abstract_declarator_1(self, p): """ direct_abstract_declarator : LPAREN abstract_declarator RPAREN """ p[0] = p[2] def p_direct_abstract_declarator_2(self, p): """ direct_abstract_declarator : direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET """ arr = c_ast.ArrayDecl( type=None, dim=p[3], coord=p[1].coord) p[0] = self._type_modify_decl(decl=p[1], modifier=arr) def p_direct_abstract_declarator_3(self, p): """ direct_abstract_declarator : LBRACKET assignment_expression_opt RBRACKET """ p[0] = c_ast.ArrayDecl( type=c_ast.TypeDecl(None, None, None), dim=p[2], coord=self._coord(p.lineno(1))) def p_direct_abstract_declarator_4(self, p): """ direct_abstract_declarator : direct_abstract_declarator LBRACKET TIMES RBRACKET """ arr = c_ast.ArrayDecl( type=None, dim=c_ast.ID(p[3], self._coord(p.lineno(3))), coord=p[1].coord) p[0] = self._type_modify_decl(decl=p[1], modifier=arr) def p_direct_abstract_declarator_5(self, p): """ direct_abstract_declarator : LBRACKET TIMES RBRACKET """ p[0] = c_ast.ArrayDecl( type=c_ast.TypeDecl(None, None, None), dim=c_ast.ID(p[3], self._coord(p.lineno(3))), coord=self._coord(p.lineno(1))) def p_direct_abstract_declarator_6(self, p): """ direct_abstract_declarator : direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN """ func = c_ast.FuncDecl( args=p[3], type=None, coord=p[1].coord) p[0] = self._type_modify_decl(decl=p[1], modifier=func) def p_direct_abstract_declarator_7(self, p): """ direct_abstract_declarator : LPAREN parameter_type_list_opt RPAREN """ p[0] = c_ast.FuncDecl( args=p[2], type=c_ast.TypeDecl(None, None, None), coord=self._coord(p.lineno(1))) # declaration is a list, statement isn't. To make it consistent, block_item # will always be a list # def p_block_item(self, p): """ block_item : declaration | statement """ p[0] = p[1] if isinstance(p[1], list) else [p[1]] # Since we made block_item a list, this just combines lists # def p_block_item_list(self, p): """ block_item_list : block_item | block_item_list block_item<|fim▁hole|> def p_compound_statement_1(self, p): """ compound_statement : brace_open block_item_list_opt brace_close """ p[0] = c_ast.Compound( block_items=p[2], coord=self._coord(p.lineno(1))) def p_labeled_statement_1(self, p): """ labeled_statement : ID COLON statement """ p[0] = c_ast.Label(p[1], p[3], self._coord(p.lineno(1))) def p_labeled_statement_2(self, p): """ labeled_statement : CASE constant_expression COLON statement """ p[0] = c_ast.Case(p[2], [p[4]], self._coord(p.lineno(1))) def p_labeled_statement_3(self, p): """ labeled_statement : DEFAULT COLON statement """ p[0] = c_ast.Default([p[3]], self._coord(p.lineno(1))) def p_selection_statement_1(self, p): """ selection_statement : IF LPAREN expression RPAREN statement """ p[0] = c_ast.If(p[3], p[5], None, self._coord(p.lineno(1))) def p_selection_statement_2(self, p): """ selection_statement : IF LPAREN expression RPAREN statement ELSE statement """ p[0] = c_ast.If(p[3], p[5], p[7], self._coord(p.lineno(1))) def p_selection_statement_3(self, p): """ selection_statement : SWITCH LPAREN expression RPAREN statement """ p[0] = fix_switch_cases( c_ast.Switch(p[3], p[5], self._coord(p.lineno(1)))) def p_iteration_statement_1(self, p): """ iteration_statement : WHILE LPAREN expression RPAREN statement """ p[0] = c_ast.While(p[3], p[5], self._coord(p.lineno(1))) def p_iteration_statement_2(self, p): """ iteration_statement : DO statement WHILE LPAREN expression RPAREN SEMI """ p[0] = c_ast.DoWhile(p[5], p[2], self._coord(p.lineno(1))) def p_iteration_statement_3(self, p): """ iteration_statement : FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement """ p[0] = c_ast.For(p[3], p[5], p[7], p[9], self._coord(p.lineno(1))) def p_iteration_statement_4(self, p): """ iteration_statement : FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement """ p[0] = c_ast.For(c_ast.DeclList(p[3]), p[4], p[6], p[8], self._coord(p.lineno(1))) def p_jump_statement_1(self, p): """ jump_statement : GOTO ID SEMI """ p[0] = c_ast.Goto(p[2], self._coord(p.lineno(1))) def p_jump_statement_2(self, p): """ jump_statement : BREAK SEMI """ p[0] = c_ast.Break(self._coord(p.lineno(1))) def p_jump_statement_3(self, p): """ jump_statement : CONTINUE SEMI """ p[0] = c_ast.Continue(self._coord(p.lineno(1))) def p_jump_statement_4(self, p): """ jump_statement : RETURN expression SEMI | RETURN SEMI """ p[0] = c_ast.Return(p[2] if len(p) == 4 else None, self._coord(p.lineno(1))) def p_expression_statement(self, p): """ expression_statement : expression_opt SEMI """ if p[1] is None: p[0] = c_ast.EmptyStatement(self._coord(p.lineno(1))) else: p[0] = p[1] def p_expression(self, p): """ expression : assignment_expression | expression COMMA assignment_expression """ if len(p) == 2: p[0] = p[1] else: if not isinstance(p[1], c_ast.ExprList): p[1] = c_ast.ExprList([p[1]], p[1].coord) p[1].exprs.append(p[3]) p[0] = p[1] def p_typedef_name(self, p): """ typedef_name : TYPEID """ p[0] = c_ast.IdentifierType([p[1]], coord=self._coord(p.lineno(1))) def p_assignment_expression(self, p): """ assignment_expression : conditional_expression | unary_expression assignment_operator assignment_expression """ if len(p) == 2: p[0] = p[1] else: p[0] = c_ast.Assignment(p[2], p[1], p[3], p[1].coord) # K&R2 defines these as many separate rules, to encode # precedence and associativity. Why work hard ? I'll just use # the built in precedence/associativity specification feature # of PLY. (see precedence declaration above) # def p_assignment_operator(self, p): """ assignment_operator : EQUALS | XOREQUAL | TIMESEQUAL | DIVEQUAL | MODEQUAL | PLUSEQUAL | MINUSEQUAL | LSHIFTEQUAL | RSHIFTEQUAL | ANDEQUAL | OREQUAL """ p[0] = p[1] def p_constant_expression(self, p): """ constant_expression : conditional_expression """ p[0] = p[1] def p_conditional_expression(self, p): """ conditional_expression : binary_expression | binary_expression CONDOP expression COLON conditional_expression """ if len(p) == 2: p[0] = p[1] else: p[0] = c_ast.TernaryOp(p[1], p[3], p[5], p[1].coord) def p_binary_expression(self, p): """ binary_expression : cast_expression | binary_expression TIMES binary_expression | binary_expression DIVIDE binary_expression | binary_expression MOD binary_expression | binary_expression PLUS binary_expression | binary_expression MINUS binary_expression | binary_expression RSHIFT binary_expression | binary_expression LSHIFT binary_expression | binary_expression LT binary_expression | binary_expression LE binary_expression | binary_expression GE binary_expression | binary_expression GT binary_expression | binary_expression EQ binary_expression | binary_expression NE binary_expression | binary_expression AND binary_expression | binary_expression OR binary_expression | binary_expression XOR binary_expression | binary_expression LAND binary_expression | binary_expression LOR binary_expression """ if len(p) == 2: p[0] = p[1] else: p[0] = c_ast.BinaryOp(p[2], p[1], p[3], p[1].coord) def p_cast_expression_1(self, p): """ cast_expression : unary_expression """ p[0] = p[1] def p_cast_expression_2(self, p): """ cast_expression : LPAREN type_name RPAREN cast_expression """ p[0] = c_ast.Cast(p[2], p[4], self._coord(p.lineno(1))) def p_unary_expression_1(self, p): """ unary_expression : postfix_expression """ p[0] = p[1] def p_unary_expression_2(self, p): """ unary_expression : PLUSPLUS unary_expression | MINUSMINUS unary_expression | unary_operator cast_expression """ p[0] = c_ast.UnaryOp(p[1], p[2], p[2].coord) def p_unary_expression_3(self, p): """ unary_expression : SIZEOF unary_expression | SIZEOF LPAREN type_name RPAREN """ p[0] = c_ast.UnaryOp( p[1], p[2] if len(p) == 3 else p[3], self._coord(p.lineno(1))) def p_unary_operator(self, p): """ unary_operator : AND | TIMES | PLUS | MINUS | NOT | LNOT """ p[0] = p[1] def p_postfix_expression_1(self, p): """ postfix_expression : primary_expression """ p[0] = p[1] def p_postfix_expression_2(self, p): """ postfix_expression : postfix_expression LBRACKET expression RBRACKET """ p[0] = c_ast.ArrayRef(p[1], p[3], p[1].coord) def p_postfix_expression_3(self, p): """ postfix_expression : postfix_expression LPAREN argument_expression_list RPAREN | postfix_expression LPAREN RPAREN """ p[0] = c_ast.FuncCall(p[1], p[3] if len(p) == 5 else None, p[1].coord) def p_postfix_expression_4(self, p): """ postfix_expression : postfix_expression PERIOD ID | postfix_expression PERIOD TYPEID | postfix_expression ARROW ID | postfix_expression ARROW TYPEID """ field = c_ast.ID(p[3], self._coord(p.lineno(3))) p[0] = c_ast.StructRef(p[1], p[2], field, p[1].coord) def p_postfix_expression_5(self, p): """ postfix_expression : postfix_expression PLUSPLUS | postfix_expression MINUSMINUS """ p[0] = c_ast.UnaryOp('p' + p[2], p[1], p[1].coord) def p_postfix_expression_6(self, p): """ postfix_expression : LPAREN type_name RPAREN brace_open initializer_list brace_close | LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close """ p[0] = c_ast.CompoundLiteral(p[2], p[5]) def p_primary_expression_1(self, p): """ primary_expression : identifier """ p[0] = p[1] def p_primary_expression_2(self, p): """ primary_expression : constant """ p[0] = p[1] def p_primary_expression_3(self, p): """ primary_expression : unified_string_literal | unified_wstring_literal """ p[0] = p[1] def p_primary_expression_4(self, p): """ primary_expression : LPAREN expression RPAREN """ p[0] = p[2] def p_argument_expression_list(self, p): """ argument_expression_list : assignment_expression | argument_expression_list COMMA assignment_expression """ if len(p) == 2: # single expr p[0] = c_ast.ExprList([p[1]], p[1].coord) else: p[1].exprs.append(p[3]) p[0] = p[1] def p_identifier(self, p): """ identifier : ID """ p[0] = c_ast.ID(p[1], self._coord(p.lineno(1))) def p_constant_1(self, p): """ constant : INT_CONST_DEC | INT_CONST_OCT | INT_CONST_HEX """ p[0] = c_ast.Constant( 'int', p[1], self._coord(p.lineno(1))) def p_constant_2(self, p): """ constant : FLOAT_CONST | HEX_FLOAT_CONST """ p[0] = c_ast.Constant( 'float', p[1], self._coord(p.lineno(1))) def p_constant_3(self, p): """ constant : CHAR_CONST | WCHAR_CONST """ p[0] = c_ast.Constant( 'char', p[1], self._coord(p.lineno(1))) # The "unified" string and wstring literal rules are for supporting # concatenation of adjacent string literals. # I.e. "hello " "world" is seen by the C compiler as a single string literal # with the value "hello world" # def p_unified_string_literal(self, p): """ unified_string_literal : STRING_LITERAL | unified_string_literal STRING_LITERAL """ if len(p) == 2: # single literal p[0] = c_ast.Constant( 'string', p[1], self._coord(p.lineno(1))) else: p[1].value = p[1].value[:-1] + p[2][1:] p[0] = p[1] def p_unified_wstring_literal(self, p): """ unified_wstring_literal : WSTRING_LITERAL | unified_wstring_literal WSTRING_LITERAL """ if len(p) == 2: # single literal p[0] = c_ast.Constant( 'string', p[1], self._coord(p.lineno(1))) else: p[1].value = p[1].value.rstrip[:-1] + p[2][1:] p[0] = p[1] def p_brace_open(self, p): """ brace_open : LBRACE """ p[0] = p[1] def p_brace_close(self, p): """ brace_close : RBRACE """ p[0] = p[1] def p_empty(self, p): 'empty : ' p[0] = None def p_error(self, p): # If error recovery is added here in the future, make sure # _get_yacc_lookahead_token still works! # if p: self._parse_error( 'before: %s' % p.value, self._coord(lineno=p.lineno, column=self.clex.find_tok_column(p))) else: self._parse_error('At end of input', '') #------------------------------------------------------------------------------ if __name__ == "__main__": import pprint import time, sys #t1 = time.time() #parser = CParser(lex_optimize=True, yacc_debug=True, yacc_optimize=False) #sys.write(time.time() - t1) #buf = ''' #int (*k)(int); #''' ## set debuglevel to 2 for debugging #t = parser.parse(buf, 'x.c', debuglevel=0) #t.show(showcoord=True)<|fim▁end|>
""" # Empty block items (plain ';') produce [None], so ignore them p[0] = p[1] if (len(p) == 2 or p[2] == [None]) else p[1] + p[2]
<|file_name|>opengl3graphics.hpp<|end_file_name|><|fim▁begin|>/* _______ __ __ __ ______ __ __ _______ __ __ * / _____/\ / /\ / /\ / /\ / ____/\ / /\ / /\ / ___ /\ / |\/ /\ * / /\____\// / // / // / // /\___\// /_// / // /\_/ / // , |/ / / * / / /__ / / // / // / // / / / ___ / // ___ / // /| ' / / * / /_// /\ / /_// / // / // /_/_ / / // / // /\_/ / // / | / / * /______/ //______/ //_/ //_____/\ /_/ //_/ //_/ //_/ //_/ /|_/ / * \______\/ \______\/ \_\/ \_____\/ \_\/ \_\/ \_\/ \_\/ \_\/ \_\/ * * Copyright (c) 2004 - 2008 Olof Naessén and Per Larsson * * * Per Larsson a.k.a finalman * Olof Naessén a.k.a jansem/yakslem * * Visit: http://guichan.sourceforge.net * * License: (BSD) * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * 3. Neither the name of Guichan nor the names of its contributors may * be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED<|fim▁hole|> * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef GCN_OPENGL3GRAPHICS_HPP #define GCN_OPENGL3GRAPHICS_HPP #if defined (_WIN32) #define WIN32_LEAN_AND_MEAN #include <windows.h> #endif #if defined (__amigaos4__) #include <mgl/gl.h> #elif defined (__APPLE__) #include <OpenGL/gl.h> #else #include <GL/gl.h> #endif #include <glm/glm.hpp> #include <glm/gtc/matrix_transform.hpp> #include <glm/gtc/type_ptr.hpp> #include "guichan/color.hpp" #include "guichan/graphics.hpp" #include "guichan/platform.hpp" namespace gcn { /** * OpenGL3 implementation of the Graphics. */ class GCN_EXTENSION_DECLSPEC OpenGL3Graphics: public Graphics { public: // Needed so that drawImage(gcn::Image *, int, int) is visible. using Graphics::drawImage; /** * Constructor. */ OpenGL3Graphics(); /** * Constructor. * * @param width the width of the logical drawing surface. Should be the * same as the screen resolution. * * @param height the height ot the logical drawing surface. Should be * the same as the screen resolution. */ OpenGL3Graphics(int width, int height); /** * Destructor. */ virtual ~OpenGL3Graphics(); /** * Sets the target plane on where to draw. * * @param width the width of the logical drawing surface. Should be the * same as the screen resolution. * @param height the height ot the logical drawing surface. Should be * the same as the screen resolution. */ virtual void setTargetPlane(int width, int height); /** * Gets the target plane width. * * @return The target plane width. */ virtual int getTargetPlaneWidth() const; /** * Gets the target plane height. * * @return The target plane height. */ virtual int getTargetPlaneHeight() const; // Inherited from Graphics virtual void _beginDraw(); virtual void _endDraw(); virtual bool pushClipArea(Rectangle area); virtual void popClipArea(); virtual void drawImage(const Image* image, int srcX, int srcY, int dstX, int dstY, int width, int height); virtual void drawPoint(int x, int y); virtual void drawLine(int x1, int y1, int x2, int y2); virtual void drawRectangle(const Rectangle& rectangle); virtual void fillRectangle(const Rectangle& rectangle); virtual void setColor(const Color& color); virtual const Color& getColor() const; protected: int mWidth, mHeight; bool mAlpha; Color mColor; GLuint mVBO; GLuint mImageShader; GLuint mLineShader; glm::mat4 mProjection; mutable bool mInitialize; private: GLuint createShaderProgram(const std::string& vs, const std::string& fs); }; } #endif // end GCN_OPENGL3GRAPHICS_HPP<|fim▁end|>
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
<|file_name|>servicemanagement_v1_generated_service_manager_create_service_sync.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at<|fim▁hole|># # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for CreateService # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-service-management # [START servicemanagement_v1_generated_ServiceManager_CreateService_sync] from google.cloud import servicemanagement_v1 def sample_create_service(): # Create a client client = servicemanagement_v1.ServiceManagerClient() # Initialize request argument(s) request = servicemanagement_v1.CreateServiceRequest( ) # Make the request operation = client.create_service(request=request) print("Waiting for operation to complete...") response = operation.result() # Handle the response print(response) # [END servicemanagement_v1_generated_ServiceManager_CreateService_sync]<|fim▁end|>
<|file_name|>container.go<|end_file_name|><|fim▁begin|>package container import ( "bytes" "encoding/json" "fmt" "io" "net" "os" "path/filepath" "runtime" "strconv" "strings" "sync" "syscall" "time" "github.com/containerd/containerd/cio" containertypes "github.com/docker/docker/api/types/container" mounttypes "github.com/docker/docker/api/types/mount" networktypes "github.com/docker/docker/api/types/network" swarmtypes "github.com/docker/docker/api/types/swarm" "github.com/docker/docker/container/stream" "github.com/docker/docker/daemon/exec" "github.com/docker/docker/daemon/logger" "github.com/docker/docker/daemon/logger/jsonfilelog" "github.com/docker/docker/daemon/network" "github.com/docker/docker/image" "github.com/docker/docker/layer" "github.com/docker/docker/opts" "github.com/docker/docker/pkg/containerfs" "github.com/docker/docker/pkg/idtools" "github.com/docker/docker/pkg/ioutils" "github.com/docker/docker/pkg/signal" "github.com/docker/docker/pkg/symlink" "github.com/docker/docker/pkg/system" "github.com/docker/docker/restartmanager" "github.com/docker/docker/runconfig" "github.com/docker/docker/volume" "github.com/docker/go-connections/nat" "github.com/docker/go-units" "github.com/docker/libnetwork" "github.com/docker/libnetwork/netlabel" "github.com/docker/libnetwork/options" "github.com/docker/libnetwork/types" agentexec "github.com/docker/swarmkit/agent/exec" "github.com/pkg/errors" "github.com/sirupsen/logrus" "golang.org/x/net/context" ) const configFileName = "config.v2.json" var ( errInvalidEndpoint = errors.New("invalid endpoint while building port map info") errInvalidNetwork = errors.New("invalid network settings while building port map info") ) // ExitStatus provides exit reasons for a container. type ExitStatus struct { // The exit code with which the container exited. ExitCode int // Whether the container encountered an OOM. OOMKilled bool // Time at which the container died ExitedAt time.Time } // Container holds the structure defining a container object. type Container struct { StreamConfig *stream.Config // embed for Container to support states directly. *State `json:"State"` // Needed for Engine API version <= 1.11 Root string `json:"-"` // Path to the "home" of the container, including metadata. BaseFS containerfs.ContainerFS `json:"-"` // interface containing graphdriver mount RWLayer layer.RWLayer `json:"-"` ID string Created time.Time Managed bool Path string Args []string Config *containertypes.Config ImageID image.ID `json:"Image"` NetworkSettings *network.Settings LogPath string Name string Driver string OS string // MountLabel contains the options for the 'mount' command MountLabel string ProcessLabel string RestartCount int HasBeenStartedBefore bool HasBeenManuallyStopped bool // used for unless-stopped restart policy MountPoints map[string]*volume.MountPoint HostConfig *containertypes.HostConfig `json:"-"` // do not serialize the host config in the json, otherwise we'll make the container unportable ExecCommands *exec.Store `json:"-"` DependencyStore agentexec.DependencyGetter `json:"-"` SecretReferences []*swarmtypes.SecretReference ConfigReferences []*swarmtypes.ConfigReference // logDriver for closing LogDriver logger.Logger `json:"-"` LogCopier *logger.Copier `json:"-"` restartManager restartmanager.RestartManager attachContext *attachContext // Fields here are specific to Unix platforms AppArmorProfile string HostnamePath string HostsPath string ShmPath string ResolvConfPath string SeccompProfile string NoNewPrivileges bool // Fields here are specific to Windows NetworkSharedContainerID string `json:"-"` SharedEndpointList []string `json:"-"` } // NewBaseContainer creates a new container with its // basic configuration. func NewBaseContainer(id, root string) *Container { return &Container{ ID: id, State: NewState(), ExecCommands: exec.NewStore(), Root: root, MountPoints: make(map[string]*volume.MountPoint), StreamConfig: stream.NewConfig(), attachContext: &attachContext{}, } } // FromDisk loads the container configuration stored in the host. func (container *Container) FromDisk() error { pth, err := container.ConfigPath() if err != nil { return err } jsonSource, err := os.Open(pth) if err != nil { return err } defer jsonSource.Close() dec := json.NewDecoder(jsonSource) // Load container settings if err := dec.Decode(container); err != nil { return err } // Ensure the operating system is set if blank. Assume it is the OS of the // host OS if not, to ensure containers created before multiple-OS // support are migrated if container.OS == "" { container.OS = runtime.GOOS } return container.readHostConfig() } // toDisk saves the container configuration on disk and returns a deep copy. func (container *Container) toDisk() (*Container, error) { var ( buf bytes.Buffer deepCopy Container ) pth, err := container.ConfigPath() if err != nil { return nil, err } // Save container settings f, err := ioutils.NewAtomicFileWriter(pth, 0600) if err != nil { return nil, err } defer f.Close() w := io.MultiWriter(&buf, f) if err := json.NewEncoder(w).Encode(container); err != nil { return nil, err } if err := json.NewDecoder(&buf).Decode(&deepCopy); err != nil { return nil, err } deepCopy.HostConfig, err = container.WriteHostConfig() if err != nil { return nil, err } return &deepCopy, nil } // CheckpointTo makes the Container's current state visible to queries, and persists state. // Callers must hold a Container lock. func (container *Container) CheckpointTo(store ViewDB) error { deepCopy, err := container.toDisk() if err != nil { return err } return store.Save(deepCopy) } // readHostConfig reads the host configuration from disk for the container. func (container *Container) readHostConfig() error { container.HostConfig = &containertypes.HostConfig{} // If the hostconfig file does not exist, do not read it. // (We still have to initialize container.HostConfig, // but that's OK, since we just did that above.) pth, err := container.HostConfigPath() if err != nil { return err } f, err := os.Open(pth) if err != nil { if os.IsNotExist(err) { return nil } return err } defer f.Close() if err := json.NewDecoder(f).Decode(&container.HostConfig); err != nil { return err } container.InitDNSHostConfig() return nil } // WriteHostConfig saves the host configuration on disk for the container, // and returns a deep copy of the saved object. Callers must hold a Container lock. func (container *Container) WriteHostConfig() (*containertypes.HostConfig, error) { var ( buf bytes.Buffer deepCopy containertypes.HostConfig ) pth, err := container.HostConfigPath() if err != nil { return nil, err } f, err := ioutils.NewAtomicFileWriter(pth, 0644) if err != nil { return nil, err } defer f.Close() w := io.MultiWriter(&buf, f) if err := json.NewEncoder(w).Encode(&container.HostConfig); err != nil { return nil, err } if err := json.NewDecoder(&buf).Decode(&deepCopy); err != nil { return nil, err } return &deepCopy, nil } // SetupWorkingDirectory sets up the container's working directory as set in container.Config.WorkingDir func (container *Container) SetupWorkingDirectory(rootIDs idtools.IDPair) error { // TODO @jhowardmsft, @gupta-ak LCOW Support. This will need revisiting. // We will need to do remote filesystem operations here. if container.OS != runtime.GOOS { return nil } if container.Config.WorkingDir == "" { return nil } container.Config.WorkingDir = filepath.Clean(container.Config.WorkingDir) pth, err := container.GetResourcePath(container.Config.WorkingDir) if err != nil { return err } if err := idtools.MkdirAllAndChownNew(pth, 0755, rootIDs); err != nil { pthInfo, err2 := os.Stat(pth) if err2 == nil && pthInfo != nil && !pthInfo.IsDir() { return errors.Errorf("Cannot mkdir: %s is not a directory", container.Config.WorkingDir) } return err } return nil } // GetResourcePath evaluates `path` in the scope of the container's BaseFS, with proper path // sanitisation. Symlinks are all scoped to the BaseFS of the container, as // though the container's BaseFS was `/`. // // The BaseFS of a container is the host-facing path which is bind-mounted as // `/` inside the container. This method is essentially used to access a // particular path inside the container as though you were a process in that // container. // // NOTE: The returned path is *only* safely scoped inside the container's BaseFS // if no component of the returned path changes (such as a component // symlinking to a different path) between using this method and using the // path. See symlink.FollowSymlinkInScope for more details. func (container *Container) GetResourcePath(path string) (string, error) { // IMPORTANT - These are paths on the OS where the daemon is running, hence // any filepath operations must be done in an OS agnostic way. r, e := container.BaseFS.ResolveScopedPath(path, false) // Log this here on the daemon side as there's otherwise no indication apart // from the error being propagated all the way back to the client. This makes // debugging significantly easier and clearly indicates the error comes from the daemon. if e != nil { logrus.Errorf("Failed to ResolveScopedPath BaseFS %s path %s %s\n", container.BaseFS.Path(), path, e) } return r, e } // GetRootResourcePath evaluates `path` in the scope of the container's root, with proper path // sanitisation. Symlinks are all scoped to the root of the container, as // though the container's root was `/`. // // The root of a container is the host-facing configuration metadata directory. // Only use this method to safely access the container's `container.json` or // other metadata files. If in doubt, use container.GetResourcePath. // // NOTE: The returned path is *only* safely scoped inside the container's root // if no component of the returned path changes (such as a component // symlinking to a different path) between using this method and using the // path. See symlink.FollowSymlinkInScope for more details. func (container *Container) GetRootResourcePath(path string) (string, error) { // IMPORTANT - These are paths on the OS where the daemon is running, hence // any filepath operations must be done in an OS agnostic way. cleanPath := filepath.Join(string(os.PathSeparator), path) return symlink.FollowSymlinkInScope(filepath.Join(container.Root, cleanPath), container.Root) } // ExitOnNext signals to the monitor that it should not restart the container // after we send the kill signal. func (container *Container) ExitOnNext() { container.RestartManager().Cancel() } // HostConfigPath returns the path to the container's JSON hostconfig func (container *Container) HostConfigPath() (string, error) { return container.GetRootResourcePath("hostconfig.json") } // ConfigPath returns the path to the container's JSON config func (container *Container) ConfigPath() (string, error) { return container.GetRootResourcePath(configFileName) } // CheckpointDir returns the directory checkpoints are stored in func (container *Container) CheckpointDir() string { return filepath.Join(container.Root, "checkpoints") } // StartLogger starts a new logger driver for the container. func (container *Container) StartLogger() (logger.Logger, error) { cfg := container.HostConfig.LogConfig initDriver, err := logger.GetLogDriver(cfg.Type) if err != nil { return nil, errors.Wrap(err, "failed to get logging factory") } info := logger.Info{ Config: cfg.Config, ContainerID: container.ID, ContainerName: container.Name, ContainerEntrypoint: container.Path, ContainerArgs: container.Args, ContainerImageID: container.ImageID.String(), ContainerImageName: container.Config.Image, ContainerCreated: container.Created, ContainerEnv: container.Config.Env, ContainerLabels: container.Config.Labels, DaemonName: "docker", } // Set logging file for "json-logger" if cfg.Type == jsonfilelog.Name { info.LogPath, err = container.GetRootResourcePath(fmt.Sprintf("%s-json.log", container.ID)) if err != nil { return nil, err } } l, err := initDriver(info) if err != nil { return nil, err } if containertypes.LogMode(cfg.Config["mode"]) == containertypes.LogModeNonBlock { bufferSize := int64(-1) if s, exists := cfg.Config["max-buffer-size"]; exists { bufferSize, err = units.RAMInBytes(s) if err != nil { return nil, err } } l = logger.NewRingLogger(l, info, bufferSize) } return l, nil } // GetProcessLabel returns the process label for the container. func (container *Container) GetProcessLabel() string { // even if we have a process label return "" if we are running // in privileged mode if container.HostConfig.Privileged { return "" } return container.ProcessLabel } // GetMountLabel returns the mounting label for the container. // This label is empty if the container is privileged. func (container *Container) GetMountLabel() string { return container.MountLabel } // GetExecIDs returns the list of exec commands running on the container. func (container *Container) GetExecIDs() []string { return container.ExecCommands.List() } // ShouldRestart decides whether the daemon should restart the container or not. // This is based on the container's restart policy. func (container *Container) ShouldRestart() bool { shouldRestart, _, _ := container.RestartManager().ShouldRestart(uint32(container.ExitCode()), container.HasBeenManuallyStopped, container.FinishedAt.Sub(container.StartedAt)) return shouldRestart } // AddMountPointWithVolume adds a new mount point configured with a volume to the container. func (container *Container) AddMountPointWithVolume(destination string, vol volume.Volume, rw bool) { operatingSystem := container.OS if operatingSystem == "" { operatingSystem = runtime.GOOS } volumeParser := volume.NewParser(operatingSystem) container.MountPoints[destination] = &volume.MountPoint{ Type: mounttypes.TypeVolume, Name: vol.Name(), Driver: vol.DriverName(), Destination: destination, RW: rw, Volume: vol, CopyData: volumeParser.DefaultCopyMode(), } } // UnmountVolumes unmounts all volumes func (container *Container) UnmountVolumes(volumeEventLog func(name, action string, attributes map[string]string)) error { var errors []string for _, volumeMount := range container.MountPoints { if volumeMount.Volume == nil { continue } if err := volumeMount.Cleanup(); err != nil { errors = append(errors, err.Error()) continue } attributes := map[string]string{ "driver": volumeMount.Volume.DriverName(), "container": container.ID, } volumeEventLog(volumeMount.Volume.Name(), "unmount", attributes) } if len(errors) > 0 { return fmt.Errorf("error while unmounting volumes for container %s: %s", container.ID, strings.Join(errors, "; ")) } return nil } // IsDestinationMounted checks whether a path is mounted on the container or not. func (container *Container) IsDestinationMounted(destination string) bool { return container.MountPoints[destination] != nil } // StopSignal returns the signal used to stop the container. func (container *Container) StopSignal() int { var stopSignal syscall.Signal if container.Config.StopSignal != "" { stopSignal, _ = signal.ParseSignal(container.Config.StopSignal) } if int(stopSignal) == 0 { stopSignal, _ = signal.ParseSignal(signal.DefaultStopSignal) } return int(stopSignal) } // StopTimeout returns the timeout (in seconds) used to stop the container. func (container *Container) StopTimeout() int { if container.Config.StopTimeout != nil { return *container.Config.StopTimeout } return DefaultStopTimeout } // InitDNSHostConfig ensures that the dns fields are never nil. // New containers don't ever have those fields nil, // but pre created containers can still have those nil values. // The non-recommended host configuration in the start api can // make these fields nil again, this corrects that issue until // we remove that behavior for good. // See https://github.com/docker/docker/pull/17779 // for a more detailed explanation on why we don't want that. func (container *Container) InitDNSHostConfig() { container.Lock() defer container.Unlock() if container.HostConfig.DNS == nil { container.HostConfig.DNS = make([]string, 0) } if container.HostConfig.DNSSearch == nil { container.HostConfig.DNSSearch = make([]string, 0) } if container.HostConfig.DNSOptions == nil { container.HostConfig.DNSOptions = make([]string, 0) } } // GetEndpointInNetwork returns the container's endpoint to the provided network. func (container *Container) GetEndpointInNetwork(n libnetwork.Network) (libnetwork.Endpoint, error) { endpointName := strings.TrimPrefix(container.Name, "/") return n.EndpointByName(endpointName) } func (container *Container) buildPortMapInfo(ep libnetwork.Endpoint) error { if ep == nil { return errInvalidEndpoint } networkSettings := container.NetworkSettings if networkSettings == nil { return errInvalidNetwork } if len(networkSettings.Ports) == 0 { pm, err := getEndpointPortMapInfo(ep) if err != nil { return err } networkSettings.Ports = pm } return nil } func getEndpointPortMapInfo(ep libnetwork.Endpoint) (nat.PortMap, error) { pm := nat.PortMap{} driverInfo, err := ep.DriverInfo() if err != nil { return pm, err } if driverInfo == nil { // It is not an error for epInfo to be nil return pm, nil } if expData, ok := driverInfo[netlabel.ExposedPorts]; ok { if exposedPorts, ok := expData.([]types.TransportPort); ok { for _, tp := range exposedPorts { natPort, err := nat.NewPort(tp.Proto.String(), strconv.Itoa(int(tp.Port))) if err != nil { return pm, fmt.Errorf("Error parsing Port value(%v):%v", tp.Port, err) } pm[natPort] = nil } } } mapData, ok := driverInfo[netlabel.PortMap] if !ok { return pm, nil } if portMapping, ok := mapData.([]types.PortBinding); ok { for _, pp := range portMapping { natPort, err := nat.NewPort(pp.Proto.String(), strconv.Itoa(int(pp.Port))) if err != nil { return pm, err } natBndg := nat.PortBinding{HostIP: pp.HostIP.String(), HostPort: strconv.Itoa(int(pp.HostPort))} pm[natPort] = append(pm[natPort], natBndg) } } return pm, nil } // GetSandboxPortMapInfo retrieves the current port-mapping programmed for the given sandbox func GetSandboxPortMapInfo(sb libnetwork.Sandbox) nat.PortMap { pm := nat.PortMap{} if sb == nil { return pm } for _, ep := range sb.Endpoints() { pm, _ = getEndpointPortMapInfo(ep) if len(pm) > 0 { break } } return pm } // BuildEndpointInfo sets endpoint-related fields on container.NetworkSettings based on the provided network and endpoint. func (container *Container) BuildEndpointInfo(n libnetwork.Network, ep libnetwork.Endpoint) error { if ep == nil { return errInvalidEndpoint } networkSettings := container.NetworkSettings if networkSettings == nil { return errInvalidNetwork } epInfo := ep.Info() if epInfo == nil { // It is not an error to get an empty endpoint info return nil } if _, ok := networkSettings.Networks[n.Name()]; !ok { networkSettings.Networks[n.Name()] = &network.EndpointSettings{ EndpointSettings: &networktypes.EndpointSettings{}, } } networkSettings.Networks[n.Name()].NetworkID = n.ID() networkSettings.Networks[n.Name()].EndpointID = ep.ID() iface := epInfo.Iface() if iface == nil { return nil } if iface.MacAddress() != nil { networkSettings.Networks[n.Name()].MacAddress = iface.MacAddress().String() } if iface.Address() != nil { ones, _ := iface.Address().Mask.Size() networkSettings.Networks[n.Name()].IPAddress = iface.Address().IP.String() networkSettings.Networks[n.Name()].IPPrefixLen = ones } if iface.AddressIPv6() != nil && iface.AddressIPv6().IP.To16() != nil { onesv6, _ := iface.AddressIPv6().Mask.Size() networkSettings.Networks[n.Name()].GlobalIPv6Address = iface.AddressIPv6().IP.String() networkSettings.Networks[n.Name()].GlobalIPv6PrefixLen = onesv6 } return nil } type named interface { Name() string } // UpdateJoinInfo updates network settings when container joins network n with endpoint ep. func (container *Container) UpdateJoinInfo(n named, ep libnetwork.Endpoint) error { if err := container.buildPortMapInfo(ep); err != nil { return err } epInfo := ep.Info() if epInfo == nil { // It is not an error to get an empty endpoint info return nil } if epInfo.Gateway() != nil { container.NetworkSettings.Networks[n.Name()].Gateway = epInfo.Gateway().String() } if epInfo.GatewayIPv6().To16() != nil { container.NetworkSettings.Networks[n.Name()].IPv6Gateway = epInfo.GatewayIPv6().String() } return nil } // UpdateSandboxNetworkSettings updates the sandbox ID and Key. func (container *Container) UpdateSandboxNetworkSettings(sb libnetwork.Sandbox) error { container.NetworkSettings.SandboxID = sb.ID() container.NetworkSettings.SandboxKey = sb.Key() return nil } // BuildJoinOptions builds endpoint Join options from a given network. func (container *Container) BuildJoinOptions(n named) ([]libnetwork.EndpointOption, error) { var joinOptions []libnetwork.EndpointOption if epConfig, ok := container.NetworkSettings.Networks[n.Name()]; ok { for _, str := range epConfig.Links { name, alias, err := opts.ParseLink(str) if err != nil { return nil, err } joinOptions = append(joinOptions, libnetwork.CreateOptionAlias(name, alias)) } for k, v := range epConfig.DriverOpts { joinOptions = append(joinOptions, libnetwork.EndpointOptionGeneric(options.Generic{k: v})) } } return joinOptions, nil } // BuildCreateEndpointOptions builds endpoint options from a given network. func (container *Container) BuildCreateEndpointOptions(n libnetwork.Network, epConfig *networktypes.EndpointSettings, sb libnetwork.Sandbox, daemonDNS []string) ([]libnetwork.EndpointOption, error) { var ( bindings = make(nat.PortMap) pbList []types.PortBinding exposeList []types.TransportPort createOptions []libnetwork.EndpointOption ) defaultNetName := runconfig.DefaultDaemonNetworkMode().NetworkName() if (!container.EnableServiceDiscoveryOnDefaultNetwork() && n.Name() == defaultNetName) || container.NetworkSettings.IsAnonymousEndpoint { createOptions = append(createOptions, libnetwork.CreateOptionAnonymous()) } if epConfig != nil { ipam := epConfig.IPAMConfig if ipam != nil { var ( ipList []net.IP ip, ip6, linkip net.IP ) for _, ips := range ipam.LinkLocalIPs { if linkip = net.ParseIP(ips); linkip == nil && ips != "" { return nil, errors.Errorf("Invalid link-local IP address: %s", ipam.LinkLocalIPs)<|fim▁hole|> } if ip = net.ParseIP(ipam.IPv4Address); ip == nil && ipam.IPv4Address != "" { return nil, errors.Errorf("Invalid IPv4 address: %s)", ipam.IPv4Address) } if ip6 = net.ParseIP(ipam.IPv6Address); ip6 == nil && ipam.IPv6Address != "" { return nil, errors.Errorf("Invalid IPv6 address: %s)", ipam.IPv6Address) } createOptions = append(createOptions, libnetwork.CreateOptionIpam(ip, ip6, ipList, nil)) } for _, alias := range epConfig.Aliases { createOptions = append(createOptions, libnetwork.CreateOptionMyAlias(alias)) } for k, v := range epConfig.DriverOpts { createOptions = append(createOptions, libnetwork.EndpointOptionGeneric(options.Generic{k: v})) } } if container.NetworkSettings.Service != nil { svcCfg := container.NetworkSettings.Service var vip string if svcCfg.VirtualAddresses[n.ID()] != nil { vip = svcCfg.VirtualAddresses[n.ID()].IPv4 } var portConfigs []*libnetwork.PortConfig for _, portConfig := range svcCfg.ExposedPorts { portConfigs = append(portConfigs, &libnetwork.PortConfig{ Name: portConfig.Name, Protocol: libnetwork.PortConfig_Protocol(portConfig.Protocol), TargetPort: portConfig.TargetPort, PublishedPort: portConfig.PublishedPort, }) } createOptions = append(createOptions, libnetwork.CreateOptionService(svcCfg.Name, svcCfg.ID, net.ParseIP(vip), portConfigs, svcCfg.Aliases[n.ID()])) } if !containertypes.NetworkMode(n.Name()).IsUserDefined() { createOptions = append(createOptions, libnetwork.CreateOptionDisableResolution()) } // configs that are applicable only for the endpoint in the network // to which container was connected to on docker run. // Ideally all these network-specific endpoint configurations must be moved under // container.NetworkSettings.Networks[n.Name()] if n.Name() == container.HostConfig.NetworkMode.NetworkName() || (n.Name() == defaultNetName && container.HostConfig.NetworkMode.IsDefault()) { if container.Config.MacAddress != "" { mac, err := net.ParseMAC(container.Config.MacAddress) if err != nil { return nil, err } genericOption := options.Generic{ netlabel.MacAddress: mac, } createOptions = append(createOptions, libnetwork.EndpointOptionGeneric(genericOption)) } } // Port-mapping rules belong to the container & applicable only to non-internal networks portmaps := GetSandboxPortMapInfo(sb) if n.Info().Internal() || len(portmaps) > 0 { return createOptions, nil } if container.HostConfig.PortBindings != nil { for p, b := range container.HostConfig.PortBindings { bindings[p] = []nat.PortBinding{} for _, bb := range b { bindings[p] = append(bindings[p], nat.PortBinding{ HostIP: bb.HostIP, HostPort: bb.HostPort, }) } } } portSpecs := container.Config.ExposedPorts ports := make([]nat.Port, len(portSpecs)) var i int for p := range portSpecs { ports[i] = p i++ } nat.SortPortMap(ports, bindings) for _, port := range ports { expose := types.TransportPort{} expose.Proto = types.ParseProtocol(port.Proto()) expose.Port = uint16(port.Int()) exposeList = append(exposeList, expose) pb := types.PortBinding{Port: expose.Port, Proto: expose.Proto} binding := bindings[port] for i := 0; i < len(binding); i++ { pbCopy := pb.GetCopy() newP, err := nat.NewPort(nat.SplitProtoPort(binding[i].HostPort)) var portStart, portEnd int if err == nil { portStart, portEnd, err = newP.Range() } if err != nil { return nil, errors.Wrapf(err, "Error parsing HostPort value (%s)", binding[i].HostPort) } pbCopy.HostPort = uint16(portStart) pbCopy.HostPortEnd = uint16(portEnd) pbCopy.HostIP = net.ParseIP(binding[i].HostIP) pbList = append(pbList, pbCopy) } if container.HostConfig.PublishAllPorts && len(binding) == 0 { pbList = append(pbList, pb) } } var dns []string if len(container.HostConfig.DNS) > 0 { dns = container.HostConfig.DNS } else if len(daemonDNS) > 0 { dns = daemonDNS } if len(dns) > 0 { createOptions = append(createOptions, libnetwork.CreateOptionDNS(dns)) } createOptions = append(createOptions, libnetwork.CreateOptionPortMapping(pbList), libnetwork.CreateOptionExposedPorts(exposeList)) return createOptions, nil } // UpdateMonitor updates monitor configure for running container func (container *Container) UpdateMonitor(restartPolicy containertypes.RestartPolicy) { type policySetter interface { SetPolicy(containertypes.RestartPolicy) } if rm, ok := container.RestartManager().(policySetter); ok { rm.SetPolicy(restartPolicy) } } // FullHostname returns hostname and optional domain appended to it. func (container *Container) FullHostname() string { fullHostname := container.Config.Hostname if container.Config.Domainname != "" { fullHostname = fmt.Sprintf("%s.%s", fullHostname, container.Config.Domainname) } return fullHostname } // RestartManager returns the current restartmanager instance connected to container. func (container *Container) RestartManager() restartmanager.RestartManager { if container.restartManager == nil { container.restartManager = restartmanager.New(container.HostConfig.RestartPolicy, container.RestartCount) } return container.restartManager } // ResetRestartManager initializes new restartmanager based on container config func (container *Container) ResetRestartManager(resetCount bool) { if container.restartManager != nil { container.restartManager.Cancel() } if resetCount { container.RestartCount = 0 } container.restartManager = nil } type attachContext struct { ctx context.Context cancel context.CancelFunc mu sync.Mutex } // InitAttachContext initializes or returns existing context for attach calls to // track container liveness. func (container *Container) InitAttachContext() context.Context { container.attachContext.mu.Lock() defer container.attachContext.mu.Unlock() if container.attachContext.ctx == nil { container.attachContext.ctx, container.attachContext.cancel = context.WithCancel(context.Background()) } return container.attachContext.ctx } // CancelAttachContext cancels attach context. All attach calls should detach // after this call. func (container *Container) CancelAttachContext() { container.attachContext.mu.Lock() if container.attachContext.ctx != nil { container.attachContext.cancel() container.attachContext.ctx = nil } container.attachContext.mu.Unlock() } func (container *Container) startLogging() error { if container.HostConfig.LogConfig.Type == "none" { return nil // do not start logging routines } l, err := container.StartLogger() if err != nil { return fmt.Errorf("failed to initialize logging driver: %v", err) } copier := logger.NewCopier(map[string]io.Reader{"stdout": container.StdoutPipe(), "stderr": container.StderrPipe()}, l) container.LogCopier = copier copier.Run() container.LogDriver = l // set LogPath field only for json-file logdriver if jl, ok := l.(*jsonfilelog.JSONFileLogger); ok { container.LogPath = jl.LogPath() } return nil } // StdinPipe gets the stdin stream of the container func (container *Container) StdinPipe() io.WriteCloser { return container.StreamConfig.StdinPipe() } // StdoutPipe gets the stdout stream of the container func (container *Container) StdoutPipe() io.ReadCloser { return container.StreamConfig.StdoutPipe() } // StderrPipe gets the stderr stream of the container func (container *Container) StderrPipe() io.ReadCloser { return container.StreamConfig.StderrPipe() } // CloseStreams closes the container's stdio streams func (container *Container) CloseStreams() error { return container.StreamConfig.CloseStreams() } // InitializeStdio is called by libcontainerd to connect the stdio. func (container *Container) InitializeStdio(iop *cio.DirectIO) (cio.IO, error) { if err := container.startLogging(); err != nil { container.Reset(false) return nil, err } container.StreamConfig.CopyToPipe(iop) if container.StreamConfig.Stdin() == nil && !container.Config.Tty { if iop.Stdin != nil { if err := iop.Stdin.Close(); err != nil { logrus.Warnf("error closing stdin: %+v", err) } } } return &rio{IO: iop, sc: container.StreamConfig}, nil } // MountsResourcePath returns the path where mounts are stored for the given mount func (container *Container) MountsResourcePath(mount string) (string, error) { return container.GetRootResourcePath(filepath.Join("mounts", mount)) } // SecretMountPath returns the path of the secret mount for the container func (container *Container) SecretMountPath() (string, error) { return container.MountsResourcePath("secrets") } // SecretFilePath returns the path to the location of a secret on the host. func (container *Container) SecretFilePath(secretRef swarmtypes.SecretReference) (string, error) { secrets, err := container.SecretMountPath() if err != nil { return "", err } return filepath.Join(secrets, secretRef.SecretID), nil } func getSecretTargetPath(r *swarmtypes.SecretReference) string { if filepath.IsAbs(r.File.Name) { return r.File.Name } return filepath.Join(containerSecretMountPath, r.File.Name) } // ConfigsDirPath returns the path to the directory where configs are stored on // disk. func (container *Container) ConfigsDirPath() (string, error) { return container.GetRootResourcePath("configs") } // ConfigFilePath returns the path to the on-disk location of a config. func (container *Container) ConfigFilePath(configRef swarmtypes.ConfigReference) (string, error) { configs, err := container.ConfigsDirPath() if err != nil { return "", err } return filepath.Join(configs, configRef.ConfigID), nil } // CreateDaemonEnvironment creates a new environment variable slice for this container. func (container *Container) CreateDaemonEnvironment(tty bool, linkedEnv []string) []string { // Setup environment os := container.OS if os == "" { os = runtime.GOOS } env := []string{} if runtime.GOOS != "windows" || (runtime.GOOS == "windows" && os == "linux") { env = []string{ "PATH=" + system.DefaultPathEnv(os), "HOSTNAME=" + container.Config.Hostname, } if tty { env = append(env, "TERM=xterm") } env = append(env, linkedEnv...) } // because the env on the container can override certain default values // we need to replace the 'env' keys where they match and append anything // else. env = ReplaceOrAppendEnvValues(env, container.Config.Env) return env } type rio struct { cio.IO sc *stream.Config } func (i *rio) Close() error { i.IO.Close() return i.sc.CloseStreams() } func (i *rio) Wait() { i.sc.Wait() i.IO.Wait() }<|fim▁end|>
} ipList = append(ipList, linkip)
<|file_name|>early-vtbl-resolution.rs<|end_file_name|><|fim▁begin|>// run-pass #![allow(non_camel_case_types)] #![allow(dead_code)] // pretty-expanded FIXME #23616 <|fim▁hole|>trait thing<A> { fn foo(&self) -> Option<A>; } impl<A> thing<A> for isize { fn foo(&self) -> Option<A> { None } } fn foo_func<A, B: thing<A>>(x: B) -> Option<A> { x.foo() } struct A { a: isize } pub fn main() { let _x: Option<f64> = foo_func(0); }<|fim▁end|>
<|file_name|>PartitioningOperators.java<|end_file_name|><|fim▁begin|>/* * PartitioningOperators.java Feb 3 2014, 03:44 * * Copyright 2014 Drunken Dev. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.drunkendev.lambdas; import com.drunkendev.lambdas.domain.DomainService; import com.drunkendev.lambdas.domain.Order; import com.drunkendev.lambdas.helper.IndexHolder; import com.drunkendev.lambdas.helper.MutableBoolean; import java.util.ArrayList; import java.util.Arrays; /** * * @author Brett Ryan */ public class PartitioningOperators { private final DomainService ds; /** * Creates a new {@code PartitioningOperators} instance. */ public PartitioningOperators() { this.ds = new DomainService(); } public static void main(String[] args) { PartitioningOperators po = new PartitioningOperators(); po.lambda20(); po.lambda21(); po.lambda22(); po.lambda23(); po.lambda24(); po.lambda25(); po.lambda26(); po.lambda27(); } public void lambda20() { System.out.println("\nFirst 3 numbers:"); int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0}; Arrays.stream(numbers) .limit(3) .forEach(System.out::println); } public void lambda21() {<|fim▁hole|> ds.getCustomerList().stream() .filter(c -> "WA".equalsIgnoreCase(c.getRegion())) .flatMap(c -> c.getOrders().stream() .map(n -> new CustOrder(c.getCustomerId(), n)) ).limit(3) .forEach(System.out::println); } public void lambda22() { System.out.println("\nAll but first 4 numbers:"); int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0}; Arrays.stream(numbers) .skip(4) .forEach(System.out::println); } public void lambda23() { System.out.println("\nAll but first 2 orders in WA:"); ds.getCustomerList().stream() .filter(c -> "WA".equalsIgnoreCase(c.getRegion())) .flatMap(c -> c.getOrders().stream() .map(n -> new CustOrder(c.getCustomerId(), n)) ).skip(2) .forEach(System.out::println); } /** * Unfortunately this method will not short circuit and will continue to * iterate until the end of the stream. I need to figure out a better way to * handle this. */ public void lambda24() { System.out.println("\nFirst numbers less than 6:"); int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0}; MutableBoolean mb = new MutableBoolean(true); Arrays.stream(numbers) .collect(ArrayList<Integer>::new, (output, v) -> { if (mb.isTrue()) { if (v < 6) { output.add(v); } else { mb.flip(); } } }, (c1, c2) -> c1.addAll(c2)) .forEach(System.out::println); } public void lambda25() { System.out.println("\nFirst numbers not less than their position:"); int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0}; IndexHolder i = new IndexHolder(); MutableBoolean mb = new MutableBoolean(true); Arrays.stream(numbers) .collect(ArrayList<Integer>::new, (output, v) -> { if (mb.isTrue()) { if (v > i.postIncrement()) { output.add(v); } else { mb.flip(); } } }, (c1, c2) -> c1.addAll(c2)) .forEach(System.out::println); } public void lambda26() { System.out.println("\nAll elements starting from first element divisible by 3:"); int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0}; MutableBoolean mb = new MutableBoolean(false); Arrays.stream(numbers) .collect(ArrayList<Integer>::new, (output, v) -> { if (mb.isTrue()) { output.add(v); } else if (v % 3 == 0) { output.add(v); mb.flip(); } }, (c1, c2) -> c1.addAll(c2)) .forEach(System.out::println); } public void lambda27() { System.out.println("\nAll elements starting from first element less than its position:"); int[] numbers = {5, 4, 1, 3, 9, 8, 6, 7, 2, 0}; IndexHolder i = new IndexHolder(); MutableBoolean mb = new MutableBoolean(false); Arrays.stream(numbers) .collect(ArrayList<Integer>::new, (output, v) -> { if (mb.isTrue()) { output.add(v); } else if (v < i.postIncrement()) { output.add(v); mb.flip(); } }, (c1, c2) -> c1.addAll(c2) ) .forEach(System.out::println); } private static class CustOrder { private final String customerId; private final Order order; public CustOrder(String customerId, Order order) { this.customerId = customerId; this.order = order; } public String getCustomerId() { return customerId; } public Order getOrder() { return order; } @Override public String toString() { return String.format("CustOrder[customerId=%s,orderId=%d,orderDate=%s]", customerId, order.getOrderId(), order.getOrderDate()); } } }<|fim▁end|>
System.out.println("\nFirst 3 orders in WA:");
<|file_name|>inline.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #![deny(unsafe_code)] use app_units::Au; use block::AbsoluteAssignBSizesTraversal; use context::LayoutContext; use display_list_builder::{FragmentDisplayListBuilding, InlineFlowDisplayListBuilding}; use euclid::{Point2D, Rect, Size2D}; use floats::{FloatKind, Floats, PlacementInfo}; use flow::{EarlyAbsolutePositionInfo, MutableFlowUtils, OpaqueFlow}; use flow::{self, BaseFlow, Flow, FlowClass, ForceNonfloatedFlag, IS_ABSOLUTELY_POSITIONED}; use flow_ref; use fragment::{CoordinateSystem, Fragment, FragmentBorderBoxIterator, SpecificFragmentInfo}; use gfx::display_list::OpaqueNode; use gfx::font::FontMetrics; use gfx::font_context::FontContext; use incremental::{BUBBLE_ISIZES, REFLOW, REFLOW_OUT_OF_FLOW, REPAINT, RESOLVE_GENERATED_CONTENT}; use layout_debug; use model::IntrinsicISizesContribution; use std::cmp::max; use std::collections::VecDeque; use std::sync::Arc; use std::{fmt, isize, mem}; use style::computed_values::{display, overflow_x, position, text_align, text_justify}; use style::computed_values::{text_overflow, vertical_align, white_space}; use style::properties::ComputedValues; use style::values::computed::LengthOrPercentage; use text; use unicode_bidi; use util; use util::logical_geometry::{LogicalRect, LogicalSize, WritingMode}; use util::print_tree::PrintTree; use util::range::{Range, RangeIndex}; use wrapper::PseudoElementType; // From gfxFontConstants.h in Firefox static FONT_SUBSCRIPT_OFFSET_RATIO: f32 = 0.20; static FONT_SUPERSCRIPT_OFFSET_RATIO: f32 = 0.34; /// `Line`s are represented as offsets into the child list, rather than /// as an object that "owns" fragments. Choosing a different set of line /// breaks requires a new list of offsets, and possibly some splitting and /// merging of TextFragments. /// /// A similar list will keep track of the mapping between CSS fragments and /// the corresponding fragments in the inline flow. /// /// After line breaks are determined, render fragments in the inline flow may /// overlap visually. For example, in the case of nested inline CSS fragments, /// outer inlines must be at least as large as the inner inlines, for /// purposes of drawing noninherited things like backgrounds, borders, /// outlines. /// /// N.B. roc has an alternative design where the list instead consists of /// things like "start outer fragment, text, start inner fragment, text, end inner /// fragment, text, end outer fragment, text". This seems a little complicated to /// serve as the starting point, but the current design doesn't make it /// hard to try out that alternative. /// /// Line fragments also contain some metadata used during line breaking. The /// green zone is the area that the line can expand to before it collides /// with a float or a horizontal wall of the containing block. The block-start /// inline-start corner of the green zone is the same as that of the line, but /// the green zone can be taller and wider than the line itself. #[derive(RustcEncodable, Debug, Clone)] pub struct Line { /// A range of line indices that describe line breaks. /// /// For example, consider the following HTML and rendered element with /// linebreaks: /// /// ~~~html /// <span>I <span>like truffles, <img></span> yes I do.</span> /// ~~~ /// /// ~~~text /// +------------+ /// | I like | /// | truffles, | /// | +----+ | /// | | | | /// | +----+ yes | /// | I do. | /// +------------+ /// ~~~ /// /// The ranges that describe these lines would be: /// /// | [0, 2) | [2, 3) | [3, 5) | [5, 6) | /// |----------|-------------|-------------|----------| /// | 'I like' | 'truffles,' | '<img> yes' | 'I do.' | pub range: Range<FragmentIndex>, /// The bidirectional embedding level runs for this line, in visual order. /// /// Can be set to `None` if the line is 100% left-to-right. pub visual_runs: Option<Vec<(Range<FragmentIndex>, u8)>>, /// The bounds are the exact position and extents of the line with respect /// to the parent box. /// /// For example, for the HTML below... /// /// ~~~html /// <div><span>I <span>like truffles, <img></span></div> /// ~~~ /// /// ...the bounds would be: /// /// ~~~text /// +-----------------------------------------------------------+ /// | ^ | /// | | | /// | origin.y | /// | | | /// | v | /// |< - origin.x ->+ - - - - - - - - +---------+---- | /// | | | | ^ | /// | | | <img> | size.block | /// | I like truffles, | | v | /// | + - - - - - - - - +---------+---- | /// | | | | /// | |<------ size.inline ------>| | /// | | /// | | /// +-----------------------------------------------------------+ /// ~~~ pub bounds: LogicalRect<Au>, /// The green zone is the greatest extent from which a line can extend to /// before it collides with a float. /// /// ~~~text /// +-----------------------+ /// |::::::::::::::::: | /// |:::::::::::::::::FFFFFF| /// |============:::::FFFFFF| /// |:::::::::::::::::FFFFFF| /// |:::::::::::::::::FFFFFF| /// |::::::::::::::::: | /// | FFFFFFFFF | /// | FFFFFFFFF | /// | FFFFFFFFF | /// | | /// +-----------------------+ /// /// === line /// ::: green zone /// FFF float /// ~~~ pub green_zone: LogicalSize<Au>, /// The inline metrics for this line. pub inline_metrics: InlineMetrics, } impl Line { fn new(writing_mode: WritingMode, minimum_block_size_above_baseline: Au, minimum_depth_below_baseline: Au) -> Line { Line { range: Range::empty(), visual_runs: None, bounds: LogicalRect::zero(writing_mode), green_zone: LogicalSize::zero(writing_mode), inline_metrics: InlineMetrics::new(minimum_block_size_above_baseline, minimum_depth_below_baseline, minimum_block_size_above_baseline), } } } int_range_index! { #[derive(RustcEncodable)] #[doc = "The index of a fragment in a flattened vector of DOM elements."] struct FragmentIndex(isize) } /// Arranges fragments into lines, splitting them up as necessary. struct LineBreaker { /// The floats we need to flow around. floats: Floats, /// The resulting fragment list for the flow, consisting of possibly-broken fragments. new_fragments: Vec<Fragment>, /// The next fragment or fragments that we need to work on. work_list: VecDeque<Fragment>, /// The line we're currently working on. pending_line: Line, /// The lines we've already committed. lines: Vec<Line>, /// The index of the last known good line breaking opportunity. The opportunity will either /// be inside this fragment (if it is splittable) or immediately prior to it. last_known_line_breaking_opportunity: Option<FragmentIndex>, /// The current position in the block direction. cur_b: Au, /// The computed value of the indentation for the first line (`text-indent`, CSS 2.1 § 16.1). first_line_indentation: Au, /// The minimum block-size above the baseline for each line, as specified by the line height /// and font style. minimum_block_size_above_baseline: Au, /// The minimum depth below the baseline for each line, as specified by the line height and /// font style. minimum_depth_below_baseline: Au, } impl LineBreaker { /// Creates a new `LineBreaker` with a set of floats and the indentation of the first line. fn new(float_context: Floats, first_line_indentation: Au, minimum_block_size_above_baseline: Au, minimum_depth_below_baseline: Au) -> LineBreaker { LineBreaker { new_fragments: Vec::new(), work_list: VecDeque::new(), pending_line: Line::new(float_context.writing_mode, minimum_block_size_above_baseline, minimum_depth_below_baseline), floats: float_context, lines: Vec::new(), cur_b: Au(0), last_known_line_breaking_opportunity: None, first_line_indentation: first_line_indentation, minimum_block_size_above_baseline: minimum_block_size_above_baseline, minimum_depth_below_baseline: minimum_depth_below_baseline, } } /// Resets the `LineBreaker` to the initial state it had after a call to `new`. fn reset_scanner(&mut self) { self.lines = Vec::new(); self.new_fragments = Vec::new(); self.cur_b = Au(0); self.reset_line(); } /// Reinitializes the pending line to blank data. fn reset_line(&mut self) -> Line { self.last_known_line_breaking_opportunity = None; mem::replace(&mut self.pending_line, Line::new(self.floats.writing_mode, self.minimum_block_size_above_baseline, self.minimum_depth_below_baseline)) } /// Reflows fragments for the given inline flow. fn scan_for_lines(&mut self, flow: &mut InlineFlow, layout_context: &LayoutContext) { self.reset_scanner(); // Create our fragment iterator. debug!("LineBreaker: scanning for lines, {} fragments", flow.fragments.len()); let mut old_fragments = mem::replace(&mut flow.fragments, InlineFragments::new()); let old_fragment_iter = old_fragments.fragments.into_iter(); // TODO(pcwalton): This would likely be better as a list of dirty line indices. That way we // could resynchronize if we discover during reflow that all subsequent fragments must have // the same position as they had in the previous reflow. I don't know how common this case // really is in practice, but it's probably worth handling. self.lines = Vec::new(); // Do the reflow. self.reflow_fragments(old_fragment_iter, flow, layout_context); // Perform unicode bidirectional layout. let para_level = flow.base.writing_mode.to_bidi_level(); // The text within a fragment is at a single bidi embedding level (because we split // fragments on level run boundaries during flow construction), so we can build a level // array with just one entry per fragment. let levels: Vec<u8> = self.new_fragments.iter().map(|fragment| match fragment.specific { SpecificFragmentInfo::ScannedText(ref info) => info.run.bidi_level, _ => para_level }).collect(); let mut lines = mem::replace(&mut self.lines, Vec::new()); // If everything is LTR, don't bother with reordering. let has_rtl = levels.iter().cloned().any(unicode_bidi::is_rtl); if has_rtl { // Compute and store the visual ordering of the fragments within the line. for line in &mut lines { let range = line.range.begin().to_usize()..line.range.end().to_usize(); let runs = unicode_bidi::visual_runs(range, &levels); line.visual_runs = Some(runs.iter().map(|run| { let start = FragmentIndex(run.start as isize); let len = FragmentIndex(run.len() as isize); (Range::new(start, len), levels[run.start]) }).collect()); } } // Place the fragments back into the flow. old_fragments.fragments = mem::replace(&mut self.new_fragments, vec![]); flow.fragments = old_fragments; flow.lines = lines; } /// Reflows the given fragments, which have been plucked out of the inline flow. fn reflow_fragments<'a, I>(&mut self, mut old_fragment_iter: I, flow: &'a InlineFlow, layout_context: &LayoutContext) where I: Iterator<Item=Fragment> { loop { // Acquire the next fragment to lay out from the work list or fragment list, as // appropriate. let fragment = match self.next_unbroken_fragment(&mut old_fragment_iter) { None => break, Some(fragment) => fragment, }; // Try to append the fragment. self.reflow_fragment(fragment, flow, layout_context); } if !self.pending_line_is_empty() { debug!("LineBreaker: partially full line {} at end of scanning; committing it", self.lines.len()); self.flush_current_line() } } /// Acquires a new fragment to lay out from the work list or fragment list as appropriate. /// Note that you probably don't want to call this method directly in order to be incremental- /// reflow-safe; try `next_unbroken_fragment` instead. fn next_fragment<I>(&mut self, old_fragment_iter: &mut I) -> Option<Fragment> where I: Iterator<Item=Fragment> { self.work_list.pop_front().or_else(|| old_fragment_iter.next()) } /// Acquires a new fragment to lay out from the work list or fragment list, merging it with any /// subsequent fragments as appropriate. In effect, what this method does is to return the next /// fragment to lay out, undoing line break operations that any previous reflows may have /// performed. You probably want to be using this method instead of `next_fragment`. fn next_unbroken_fragment<I>(&mut self, old_fragment_iter: &mut I) -> Option<Fragment> where I: Iterator<Item=Fragment> { let mut result = match self.next_fragment(old_fragment_iter) { None => return None, Some(fragment) => fragment, }; loop { let candidate = match self.next_fragment(old_fragment_iter) { None => return Some(result), Some(fragment) => fragment, }; let need_to_merge = match (&mut result.specific, &candidate.specific) { (&mut SpecificFragmentInfo::ScannedText(ref mut result_info), &SpecificFragmentInfo::ScannedText(ref candidate_info)) => { util::arc_ptr_eq(&result_info.run, &candidate_info.run) && inline_contexts_are_equal(&result.inline_context, &candidate.inline_context) } _ => false, }; if need_to_merge { result.merge_with(candidate); continue } self.work_list.push_front(candidate); return Some(result) } } /// Commits a line to the list. fn flush_current_line(&mut self) { debug!("LineBreaker: flushing line {}: {:?}", self.lines.len(), self.pending_line); self.strip_trailing_whitespace_from_pending_line_if_necessary(); self.lines.push(self.pending_line.clone()); self.cur_b = self.pending_line.bounds.start.b + self.pending_line.bounds.size.block; self.reset_line(); } /// Removes trailing whitespace from the pending line if necessary. This is done right before /// flushing it. fn strip_trailing_whitespace_from_pending_line_if_necessary(&mut self) { if self.pending_line.range.is_empty() { return } let last_fragment_index = self.pending_line.range.end() - FragmentIndex(1); let mut fragment = &mut self.new_fragments[last_fragment_index.get() as usize]; let mut old_fragment_inline_size = None; if let SpecificFragmentInfo::ScannedText(_) = fragment.specific { old_fragment_inline_size = Some(fragment.border_box.size.inline + fragment.margin.inline_start_end()); } fragment.strip_trailing_whitespace_if_necessary(); if let SpecificFragmentInfo::ScannedText(ref mut scanned_text_fragment_info) = fragment.specific { let scanned_text_fragment_info = &mut **scanned_text_fragment_info; let range = &mut scanned_text_fragment_info.range; scanned_text_fragment_info.content_size.inline = scanned_text_fragment_info.run.metrics_for_range(range).advance_width; fragment.border_box.size.inline = scanned_text_fragment_info.content_size.inline + fragment.border_padding.inline_start_end(); self.pending_line.bounds.size.inline = self.pending_line.bounds.size.inline - (old_fragment_inline_size.unwrap() - (fragment.border_box.size.inline + fragment.margin.inline_start_end())); } } // FIXME(eatkinson): this assumes that the tallest fragment in the line determines the line // block-size. This might not be the case with some weird text fonts. fn new_inline_metrics_for_line(&self, new_fragment: &Fragment, layout_context: &LayoutContext) -> InlineMetrics { self.pending_line.inline_metrics.max(&new_fragment.inline_metrics(layout_context)) } fn new_block_size_for_line(&self, new_fragment: &Fragment, layout_context: &LayoutContext) -> Au { max(self.pending_line.bounds.size.block, self.new_inline_metrics_for_line(new_fragment, layout_context).block_size()) } /// Computes the position of a line that has only the provided fragment. Returns the bounding /// rect of the line's green zone (whose origin coincides with the line's origin) and the /// actual inline-size of the first fragment after splitting. fn initial_line_placement(&self, flow: &InlineFlow, first_fragment: &Fragment, ceiling: Au) -> (LogicalRect<Au>, Au) { debug!("LineBreaker: trying to place first fragment of line {}; fragment size: {:?}, \ splittable: {}", self.lines.len(), first_fragment.border_box.size, first_fragment.can_split()); // Initially, pretend a splittable fragment has zero inline-size. We will move it later if // it has nonzero inline-size and that causes problems. let placement_inline_size = if first_fragment.can_split() { Au(0) } else { first_fragment.margin_box_inline_size() + self.indentation_for_pending_fragment() }; // Try to place the fragment between floats. let line_bounds = self.floats.place_between_floats(&PlacementInfo { size: LogicalSize::new(self.floats.writing_mode, placement_inline_size, first_fragment.border_box.size.block), ceiling: ceiling, max_inline_size: flow.base.position.size.inline, kind: FloatKind::Left, }); // Simple case: if the fragment fits, then we can stop here. if line_bounds.size.inline > first_fragment.margin_box_inline_size() { debug!("LineBreaker: fragment fits on line {}", self.lines.len()); return (line_bounds, first_fragment.margin_box_inline_size()); } // If not, but we can't split the fragment, then we'll place the line here and it will // overflow. if !first_fragment.can_split() { debug!("LineBreaker: line doesn't fit, but is unsplittable"); } (line_bounds, first_fragment.margin_box_inline_size()) } /// Performs float collision avoidance. This is called when adding a fragment is going to /// increase the block-size, and because of that we will collide with some floats. /// /// We have two options here: /// 1) Move the entire line so that it doesn't collide any more. /// 2) Break the line and put the new fragment on the next line. /// /// The problem with option 1 is that we might move the line and then wind up breaking anyway, /// which violates the standard. But option 2 is going to look weird sometimes. /// /// So we'll try to move the line whenever we can, but break if we have to. /// /// Returns false if and only if we should break the line. fn avoid_floats(&mut self, flow: &InlineFlow, in_fragment: Fragment, new_block_size: Au) -> bool { debug!("LineBreaker: entering float collision avoider!"); // First predict where the next line is going to be. let (next_line, first_fragment_inline_size) = self.initial_line_placement(flow, &in_fragment, self.pending_line.bounds.start.b); let next_green_zone = next_line.size; let new_inline_size = self.pending_line.bounds.size.inline + first_fragment_inline_size; // Now, see if everything can fit at the new location. if next_green_zone.inline >= new_inline_size && next_green_zone.block >= new_block_size { debug!("LineBreaker: case=adding fragment collides vertically with floats: moving \ line"); self.pending_line.bounds.start = next_line.start; self.pending_line.green_zone = next_green_zone; debug_assert!(!self.pending_line_is_empty(), "Non-terminating line breaking"); self.work_list.push_front(in_fragment); return true } debug!("LineBreaker: case=adding fragment collides vertically with floats: breaking line"); self.work_list.push_front(in_fragment); false } /// Tries to append the given fragment to the line, splitting it if necessary. Commits the /// current line if needed. fn reflow_fragment(&mut self, mut fragment: Fragment, flow: &InlineFlow, layout_context: &LayoutContext) { // Determine initial placement for the fragment if we need to. // // Also, determine whether we can legally break the line before, or inside, this fragment. let fragment_is_line_break_opportunity = if self.pending_line_is_empty() { fragment.strip_leading_whitespace_if_necessary(); let (line_bounds, _) = self.initial_line_placement(flow, &fragment, self.cur_b); self.pending_line.bounds.start = line_bounds.start; self.pending_line.green_zone = line_bounds.size; false } else { fragment.white_space().allow_wrap() }; debug!("LineBreaker: trying to append to line {} (fragment size: {:?}, green zone: {:?}): \ {:?}", self.lines.len(), fragment.border_box.size, self.pending_line.green_zone, fragment); // NB: At this point, if `green_zone.inline < self.pending_line.bounds.size.inline` or // `green_zone.block < self.pending_line.bounds.size.block`, then we committed a line that // overlaps with floats. let green_zone = self.pending_line.green_zone; let new_block_size = self.new_block_size_for_line(&fragment, layout_context); if new_block_size > green_zone.block { // Uh-oh. Float collision imminent. Enter the float collision avoider! if !self.avoid_floats(flow, fragment, new_block_size) { self.flush_current_line(); } return } // Record the last known good line break opportunity if this is one. if fragment_is_line_break_opportunity { self.last_known_line_breaking_opportunity = Some(self.pending_line.range.end()) } // If we must flush the line after finishing this fragment due to `white-space: pre`, // detect that. let line_flush_mode = if fragment.white_space().preserve_newlines() { if fragment.requires_line_break_afterward_if_wrapping_on_newlines() { LineFlushMode::Flush } else { LineFlushMode::No } } else { LineFlushMode::No }; // If we're not going to overflow the green zone vertically, we might still do so // horizontally. We'll try to place the whole fragment on this line and break somewhere if // it doesn't fit. let indentation = self.indentation_for_pending_fragment(); let new_inline_size = self.pending_line.bounds.size.inline + fragment.margin_box_inline_size() + indentation; if new_inline_size <= green_zone.inline { debug!("LineBreaker: fragment fits without splitting"); self.push_fragment_to_line(layout_context, fragment, line_flush_mode); return } // If the wrapping mode prevents us from splitting, then back up and split at the last // known good split point. if !fragment.white_space().allow_wrap() { debug!("LineBreaker: fragment can't split; falling back to last known good split point"); if !self.split_line_at_last_known_good_position() { // No line breaking opportunity exists at all for this line. Overflow. self.push_fragment_to_line(layout_context, fragment, line_flush_mode); } else { self.work_list.push_front(fragment); } return; } // Split it up! let available_inline_size = green_zone.inline - self.pending_line.bounds.size.inline - indentation; let inline_start_fragment; let inline_end_fragment; let split_result = match fragment.calculate_split_position(available_inline_size, self.pending_line_is_empty()) { None => { // We failed to split. Defer to the next line if we're allowed to; otherwise, // rewind to the last line breaking opportunity. if fragment_is_line_break_opportunity { debug!("LineBreaker: fragment was unsplittable; deferring to next line"); self.work_list.push_front(fragment); self.flush_current_line(); } else if self.split_line_at_last_known_good_position() { // We split the line at a known-good prior position. Restart with the current // fragment. self.work_list.push_front(fragment) } else { // We failed to split and there is no known-good place on this line to split. // Overflow. self.push_fragment_to_line(layout_context, fragment, LineFlushMode::No) } return } Some(split_result) => split_result, }; inline_start_fragment = split_result.inline_start.as_ref().map(|x| { fragment.transform_with_split_info(x, split_result.text_run.clone()) }); inline_end_fragment = split_result.inline_end.as_ref().map(|x| { fragment.transform_with_split_info(x, split_result.text_run.clone()) }); // Push the first fragment onto the line we're working on and start off the next line with // the second fragment. If there's no second fragment, the next line will start off empty. match (inline_start_fragment, inline_end_fragment) { (Some(inline_start_fragment), Some(inline_end_fragment)) => { self.push_fragment_to_line(layout_context, inline_start_fragment, LineFlushMode::Flush); self.work_list.push_front(inline_end_fragment) }, (Some(fragment), None) => { self.push_fragment_to_line(layout_context, fragment, line_flush_mode); } (None, Some(fragment)) => { // Yes, this can happen! self.flush_current_line(); self.work_list.push_front(fragment) } (None, None) => {} } } /// Pushes a fragment to the current line unconditionally, possibly truncating it and placing /// an ellipsis based on the value of `text-overflow`. If `flush_line` is `Flush`, then flushes /// the line afterward; fn push_fragment_to_line(&mut self, layout_context: &LayoutContext, fragment: Fragment, line_flush_mode: LineFlushMode) { let indentation = self.indentation_for_pending_fragment(); if self.pending_line_is_empty() { debug_assert!(self.new_fragments.len() <= (isize::MAX as usize)); self.pending_line.range.reset(FragmentIndex(self.new_fragments.len() as isize), FragmentIndex(0)); } // Determine if an ellipsis will be necessary to account for `text-overflow`. let mut need_ellipsis = false; let available_inline_size = self.pending_line.green_zone.inline - self.pending_line.bounds.size.inline - indentation; match (fragment.style().get_inheritedtext().text_overflow, fragment.style().get_box().overflow_x) { (text_overflow::T::clip, _) | (_, overflow_x::T::visible) => {} (text_overflow::T::ellipsis, _) => { need_ellipsis = fragment.margin_box_inline_size() > available_inline_size; } } if !need_ellipsis { self.push_fragment_to_line_ignoring_text_overflow(fragment, layout_context); } else { let ellipsis = fragment.transform_into_ellipsis(layout_context); if let Some(truncation_info) = fragment.truncate_to_inline_size(available_inline_size - ellipsis.margin_box_inline_size()) { let fragment = fragment.transform_with_split_info(&truncation_info.split, truncation_info.text_run); self.push_fragment_to_line_ignoring_text_overflow(fragment, layout_context); } self.push_fragment_to_line_ignoring_text_overflow(ellipsis, layout_context); } if line_flush_mode == LineFlushMode::Flush { self.flush_current_line() } } /// Pushes a fragment to the current line unconditionally, without placing an ellipsis in the /// case of `text-overflow: ellipsis`. fn push_fragment_to_line_ignoring_text_overflow(&mut self, fragment: Fragment, layout_context: &LayoutContext) { let indentation = self.indentation_for_pending_fragment(); self.pending_line.range.extend_by(FragmentIndex(1)); if !fragment.is_inline_absolute() { self.pending_line.bounds.size.inline = self.pending_line.bounds.size.inline + fragment.margin_box_inline_size() + indentation; self.pending_line.inline_metrics = self.new_inline_metrics_for_line(&fragment, layout_context); self.pending_line.bounds.size.block = self.new_block_size_for_line(&fragment, layout_context); } self.new_fragments.push(fragment); } fn split_line_at_last_known_good_position(&mut self) -> bool { let last_known_line_breaking_opportunity = match self.last_known_line_breaking_opportunity { None => return false, Some(last_known_line_breaking_opportunity) => last_known_line_breaking_opportunity, }; for fragment_index in (last_known_line_breaking_opportunity.get().. self.pending_line.range.end().get()).rev() { debug_assert!(fragment_index == (self.new_fragments.len() as isize) - 1); self.work_list.push_front(self.new_fragments.pop().unwrap()); } // FIXME(pcwalton): This should actually attempt to split the last fragment if // possible to do so, to handle cases like: // // (available width) // +-------------+ // The alphabet // (<em>abcdefghijklmnopqrstuvwxyz</em>) // // Here, the last known-good split point is inside the fragment containing // "The alphabet (", which has already been committed by the time we get to this // point. Unfortunately, the existing splitting API (`calculate_split_position`) // has no concept of "split right before the last non-whitespace position". We'll // need to add that feature to the API to handle this case correctly. self.pending_line.range.extend_to(last_known_line_breaking_opportunity); self.flush_current_line(); true } /// Returns the indentation that needs to be applied before the fragment we're reflowing. fn indentation_for_pending_fragment(&self) -> Au { if self.pending_line_is_empty() && self.lines.is_empty() { self.first_line_indentation } else { Au(0) } } /// Returns true if the pending line is empty and false otherwise. fn pending_line_is_empty(&self) -> bool { self.pending_line.range.length() == FragmentIndex(0) } } /// Represents a list of inline fragments, including element ranges. #[derive(RustcEncodable, Clone)] pub struct InlineFragments { /// The fragments themselves. pub fragments: Vec<Fragment>, } impl InlineFragments { /// Creates an empty set of inline fragments. pub fn new() -> InlineFragments { InlineFragments { fragments: vec![], } } /// Returns the number of inline fragments. pub fn len(&self) -> usize { self.fragments.len() } /// Returns true if this list contains no fragments and false if it contains at least one /// fragment. pub fn is_empty(&self) -> bool { self.fragments.is_empty() } /// A convenience function to return the fragment at a given index. pub fn get(&self, index: usize) -> &Fragment { &self.fragments[index] } /// A convenience function to return a mutable reference to the fragment at a given index. pub fn get_mut(&mut self, index: usize) -> &mut Fragment { &mut self.fragments[index] } } /// Flows for inline layout. #[derive(RustcEncodable)] pub struct InlineFlow { /// Data common to all flows. pub base: BaseFlow, /// A vector of all inline fragments. Several fragments may correspond to one node/element. pub fragments: InlineFragments, /// A vector of ranges into fragments that represents line positions. These ranges are disjoint /// and are the result of inline layout. This also includes some metadata used for positioning /// lines. pub lines: Vec<Line>, /// The minimum block-size above the baseline for each line, as specified by the line height /// and font style. pub minimum_block_size_above_baseline: Au, /// The minimum depth below the baseline for each line, as specified by the line height and /// font style. pub minimum_depth_below_baseline: Au, /// The amount of indentation to use on the first line. This is determined by our block parent /// (because percentages are relative to the containing block, and we aren't in a position to /// compute things relative to our parent's containing block). pub first_line_indentation: Au, } impl InlineFlow { pub fn from_fragments(fragments: InlineFragments, writing_mode: WritingMode) -> InlineFlow { let mut flow = InlineFlow { base: BaseFlow::new(None, writing_mode, ForceNonfloatedFlag::ForceNonfloated), fragments: fragments, lines: Vec::new(), minimum_block_size_above_baseline: Au(0),<|fim▁hole|> for fragment in &flow.fragments.fragments { if fragment.is_generated_content() { flow.base.restyle_damage.insert(RESOLVE_GENERATED_CONTENT) } } flow } /// Returns the distance from the baseline for the logical block-start inline-start corner of /// this fragment, taking into account the value of the CSS `vertical-align` property. /// Negative values mean "toward the logical block-start" and positive values mean "toward the /// logical block-end". /// /// The extra boolean is set if and only if `largest_block_size_for_top_fragments` and/or /// `largest_block_size_for_bottom_fragments` were updated. That is, if the box has a `top` or /// `bottom` value for `vertical-align`, true is returned. fn distance_from_baseline(fragment: &Fragment, ascent: Au, parent_text_block_start: Au, parent_text_block_end: Au, block_size_above_baseline: &mut Au, depth_below_baseline: &mut Au, largest_block_size_for_top_fragments: &mut Au, largest_block_size_for_bottom_fragments: &mut Au, layout_context: &LayoutContext) -> (Au, bool) { let (mut offset_from_baseline, mut largest_size_updated) = (Au(0), false); for style in fragment.inline_styles() { // Ignore `vertical-align` values for table cells. let box_style = style.get_box(); match box_style.display { display::T::inline | display::T::block | display::T::inline_block => {} _ => continue, } match box_style.vertical_align { vertical_align::T::baseline => {} vertical_align::T::middle => { // TODO: x-height value should be used from font info. // TODO: Doing nothing here passes our current reftests but doesn't work in // all situations. Add vertical align reftests and fix this. }, vertical_align::T::sub => { let sub_offset = (parent_text_block_start + parent_text_block_end) .scale_by(FONT_SUBSCRIPT_OFFSET_RATIO); offset_from_baseline = offset_from_baseline + sub_offset }, vertical_align::T::super_ => { let super_offset = (parent_text_block_start + parent_text_block_end) .scale_by(FONT_SUPERSCRIPT_OFFSET_RATIO); offset_from_baseline = offset_from_baseline - super_offset }, vertical_align::T::text_top => { let fragment_block_size = *block_size_above_baseline + *depth_below_baseline; let prev_depth_below_baseline = *depth_below_baseline; *block_size_above_baseline = parent_text_block_start; *depth_below_baseline = fragment_block_size - *block_size_above_baseline; offset_from_baseline = offset_from_baseline + *depth_below_baseline - prev_depth_below_baseline }, vertical_align::T::text_bottom => { let fragment_block_size = *block_size_above_baseline + *depth_below_baseline; let prev_depth_below_baseline = *depth_below_baseline; *depth_below_baseline = parent_text_block_end; *block_size_above_baseline = fragment_block_size - *depth_below_baseline; offset_from_baseline = offset_from_baseline + *depth_below_baseline - prev_depth_below_baseline }, vertical_align::T::top => { if !largest_size_updated { largest_size_updated = true; *largest_block_size_for_top_fragments = max(*largest_block_size_for_top_fragments, *block_size_above_baseline + *depth_below_baseline); offset_from_baseline = offset_from_baseline + *block_size_above_baseline } }, vertical_align::T::bottom => { if !largest_size_updated { largest_size_updated = true; *largest_block_size_for_bottom_fragments = max(*largest_block_size_for_bottom_fragments, *block_size_above_baseline + *depth_below_baseline); offset_from_baseline = offset_from_baseline - *depth_below_baseline } }, vertical_align::T::LengthOrPercentage(LengthOrPercentage::Length(length)) => { offset_from_baseline = offset_from_baseline - length } vertical_align::T::LengthOrPercentage(LengthOrPercentage::Percentage(p)) => { let line_height = fragment.calculate_line_height(layout_context); let percent_offset = line_height.scale_by(p); offset_from_baseline = offset_from_baseline - percent_offset } vertical_align::T::LengthOrPercentage(LengthOrPercentage::Calc(calc)) => { let line_height = fragment.calculate_line_height(layout_context); let percent_offset = line_height.scale_by(calc.percentage()); offset_from_baseline = offset_from_baseline - percent_offset - calc.length() } } } (offset_from_baseline - ascent, largest_size_updated) } /// Sets fragment positions in the inline direction based on alignment for one line. This /// performs text justification if mandated by the style. fn set_inline_fragment_positions(fragments: &mut InlineFragments, line: &Line, line_align: text_align::T, indentation: Au, is_last_line: bool) { // Figure out how much inline-size we have. let slack_inline_size = max(Au(0), line.green_zone.inline - line.bounds.size.inline); // Compute the value we're going to use for `text-justify`. if fragments.fragments.is_empty() { return } let text_justify = fragments.fragments[0].style().get_inheritedtext().text_justify; // Translate `left` and `right` to logical directions. let is_ltr = fragments.fragments[0].style().writing_mode.is_bidi_ltr(); let line_align = match (line_align, is_ltr) { (text_align::T::left, true) | (text_align::T::servo_left, true) | (text_align::T::right, false) | (text_align::T::servo_right, false) => text_align::T::start, (text_align::T::left, false) | (text_align::T::servo_left, false) | (text_align::T::right, true) | (text_align::T::servo_right, true) => text_align::T::end, _ => line_align }; // Set the fragment inline positions based on that alignment, and justify the text if // necessary. let mut inline_start_position_for_fragment = line.bounds.start.i + indentation; match line_align { text_align::T::justify if !is_last_line && text_justify != text_justify::T::none => { InlineFlow::justify_inline_fragments(fragments, line, slack_inline_size) } text_align::T::justify | text_align::T::start => {} text_align::T::center | text_align::T::servo_center => { inline_start_position_for_fragment = inline_start_position_for_fragment + slack_inline_size.scale_by(0.5) } text_align::T::end => { inline_start_position_for_fragment = inline_start_position_for_fragment + slack_inline_size } text_align::T::left | text_align::T::servo_left | text_align::T::right | text_align::T::servo_right => unreachable!() } // Lay out the fragments in visual order. let run_count = match line.visual_runs { Some(ref runs) => runs.len(), None => 1 }; for run_idx in 0..run_count { let (range, level) = match line.visual_runs { Some(ref runs) if is_ltr => runs[run_idx], Some(ref runs) => runs[run_count - run_idx - 1], // reverse order for RTL runs None => (line.range, 0) }; // If the bidi embedding direction is opposite the layout direction, lay out this // run in reverse order. let reverse = unicode_bidi::is_ltr(level) != is_ltr; let fragment_indices = if reverse { (range.end().get() - 1..range.begin().get() - 1).step_by(-1) } else { (range.begin().get()..range.end().get()).step_by(1) }; for fragment_index in fragment_indices { let fragment = fragments.get_mut(fragment_index as usize); inline_start_position_for_fragment = inline_start_position_for_fragment + fragment.margin.inline_start; let border_start = if fragment.style.writing_mode.is_bidi_ltr() == is_ltr { inline_start_position_for_fragment } else { line.green_zone.inline - inline_start_position_for_fragment - fragment.margin.inline_end - fragment.border_box.size.inline }; fragment.border_box = LogicalRect::new(fragment.style.writing_mode, border_start, fragment.border_box.start.b, fragment.border_box.size.inline, fragment.border_box.size.block); fragment.update_late_computed_inline_position_if_necessary(); if !fragment.is_inline_absolute() { inline_start_position_for_fragment = inline_start_position_for_fragment + fragment.border_box.size.inline + fragment.margin.inline_end; } } } } /// Justifies the given set of inline fragments, distributing the `slack_inline_size` among all /// of them according to the value of `text-justify`. fn justify_inline_fragments(fragments: &mut InlineFragments, line: &Line, slack_inline_size: Au) { // Fast path. if slack_inline_size == Au(0) { return } // First, calculate the number of expansion opportunities (spaces, normally). let mut expansion_opportunities = 0i32; for fragment_index in line.range.each_index() { let fragment = fragments.get(fragment_index.to_usize()); let scanned_text_fragment_info = match fragment.specific { SpecificFragmentInfo::ScannedText(ref info) if !info.range.is_empty() => info, _ => continue }; let fragment_range = scanned_text_fragment_info.range; for slice in scanned_text_fragment_info.run.character_slices_in_range(&fragment_range) { expansion_opportunities += slice.glyphs.space_count_in_range(&slice.range) as i32 } } // Then distribute all the space across the expansion opportunities. let space_per_expansion_opportunity = slack_inline_size.to_f64_px() / (expansion_opportunities as f64); for fragment_index in line.range.each_index() { let fragment = fragments.get_mut(fragment_index.to_usize()); let mut scanned_text_fragment_info = match fragment.specific { SpecificFragmentInfo::ScannedText(ref mut info) if !info.range.is_empty() => info, _ => continue }; let fragment_range = scanned_text_fragment_info.range; // FIXME(pcwalton): This is an awful lot of uniqueness making. I don't see any easy way // to get rid of it without regressing the performance of the non-justified case, // though. let run = Arc::make_mut(&mut scanned_text_fragment_info.run); { let glyph_runs = Arc::make_mut(&mut run.glyphs); for mut glyph_run in &mut *glyph_runs { let mut range = glyph_run.range.intersect(&fragment_range); if range.is_empty() { continue } range.shift_by(-glyph_run.range.begin()); let glyph_store = Arc::make_mut(&mut glyph_run.glyph_store); glyph_store.distribute_extra_space_in_range(&range, space_per_expansion_opportunity); } } // Recompute the fragment's border box size. let new_inline_size = run.advance_for_range(&fragment_range); let new_size = LogicalSize::new(fragment.style.writing_mode, new_inline_size, fragment.border_box.size.block); fragment.border_box = LogicalRect::from_point_size(fragment.style.writing_mode, fragment.border_box.start, new_size); } } /// Sets final fragment positions in the block direction for one line. Assumes that the /// fragment positions were initially set to the distance from the baseline first. fn set_block_fragment_positions(fragments: &mut InlineFragments, line: &Line, line_distance_from_flow_block_start: Au, baseline_distance_from_block_start: Au, largest_depth_below_baseline: Au) { for fragment_index in line.range.each_index() { // If any of the inline styles say `top` or `bottom`, adjust the vertical align // appropriately. // // FIXME(#5624, pcwalton): This passes our current reftests but isn't the right thing // to do. let fragment = fragments.get_mut(fragment_index.to_usize()); let mut vertical_align = vertical_align::T::baseline; for style in fragment.inline_styles() { match (style.get_box().display, style.get_box().vertical_align) { (display::T::inline, vertical_align::T::top) | (display::T::block, vertical_align::T::top) | (display::T::inline_block, vertical_align::T::top) => { vertical_align = vertical_align::T::top; break } (display::T::inline, vertical_align::T::bottom) | (display::T::block, vertical_align::T::bottom) | (display::T::inline_block, vertical_align::T::bottom) => { vertical_align = vertical_align::T::bottom; break } _ => {} } } match vertical_align { vertical_align::T::top => { fragment.border_box.start.b = fragment.border_box.start.b + line_distance_from_flow_block_start } vertical_align::T::bottom => { fragment.border_box.start.b = fragment.border_box.start.b + line_distance_from_flow_block_start + baseline_distance_from_block_start + largest_depth_below_baseline; } _ => { fragment.border_box.start.b = fragment.border_box.start.b + line_distance_from_flow_block_start + baseline_distance_from_block_start } } fragment.update_late_computed_block_position_if_necessary(); } } /// Computes the minimum ascent and descent for each line. This is done during flow /// construction. /// /// `style` is the style of the block. pub fn compute_minimum_ascent_and_descent(&self, font_context: &mut FontContext, style: &ComputedValues) -> (Au, Au) { // As a special case, if this flow contains only hypothetical fragments, then the entire // flow is hypothetical and takes up no space. See CSS 2.1 § 10.3.7. if self.fragments.fragments.iter().all(|fragment| fragment.is_hypothetical()) { return (Au(0), Au(0)) } let font_style = style.get_font_arc(); let font_metrics = text::font_metrics_for_style(font_context, font_style); let line_height = text::line_height_from_style(style, &font_metrics); let inline_metrics = InlineMetrics::from_font_metrics(&font_metrics, line_height); let mut block_size_above_baseline = inline_metrics.block_size_above_baseline; let mut depth_below_baseline = inline_metrics.depth_below_baseline; // According to CSS 2.1 § 10.8, `line-height` of any inline element specifies the minimal // height of line boxes within the element. for frag in &self.fragments.fragments { match frag.inline_context { Some(ref inline_context) => { for node in &inline_context.nodes { let font_style = node.style.get_font_arc(); let font_metrics = text::font_metrics_for_style(font_context, font_style); let line_height = text::line_height_from_style(&*node.style, &font_metrics); let inline_metrics = InlineMetrics::from_font_metrics(&font_metrics, line_height); block_size_above_baseline = max(block_size_above_baseline, inline_metrics.block_size_above_baseline); depth_below_baseline = max(depth_below_baseline, inline_metrics.depth_below_baseline); } } None => {} } } (block_size_above_baseline, depth_below_baseline) } fn update_restyle_damage(&mut self) { let mut damage = self.base.restyle_damage; for frag in &self.fragments.fragments { damage.insert(frag.restyle_damage()); } self.base.restyle_damage = damage; } fn containing_block_range_for_flow_surrounding_fragment_at_index(&self, fragment_index: FragmentIndex) -> Range<FragmentIndex> { let mut start_index = fragment_index; while start_index > FragmentIndex(0) && self.fragments .fragments[(start_index - FragmentIndex(1)).get() as usize] .is_positioned() { start_index = start_index - FragmentIndex(1) } let mut end_index = fragment_index + FragmentIndex(1); while end_index < FragmentIndex(self.fragments.fragments.len() as isize) && self.fragments.fragments[end_index.get() as usize].is_positioned() { end_index = end_index + FragmentIndex(1) } Range::new(start_index, end_index - start_index) } fn containing_block_range_for_flow(&self, opaque_flow: OpaqueFlow) -> Range<FragmentIndex> { match self.fragments.fragments.iter().position(|fragment| { match fragment.specific { SpecificFragmentInfo::InlineAbsolute(ref inline_absolute) => { OpaqueFlow::from_flow(&*inline_absolute.flow_ref) == opaque_flow } SpecificFragmentInfo::InlineAbsoluteHypothetical( ref inline_absolute_hypothetical) => { OpaqueFlow::from_flow(&*inline_absolute_hypothetical.flow_ref) == opaque_flow } _ => false, } }) { Some(index) => { let index = FragmentIndex(index as isize); self.containing_block_range_for_flow_surrounding_fragment_at_index(index) } None => { // FIXME(pcwalton): This is quite wrong. We should only return the range // surrounding the inline fragments that constitute the containing block. But this // suffices to get Google looking right. Range::new(FragmentIndex(0), FragmentIndex(self.fragments.fragments.len() as isize)) } } } } impl Flow for InlineFlow { fn class(&self) -> FlowClass { FlowClass::Inline } fn as_inline(&self) -> &InlineFlow { self } fn as_mut_inline(&mut self) -> &mut InlineFlow { self } fn bubble_inline_sizes(&mut self) { self.update_restyle_damage(); let _scope = layout_debug_scope!("inline::bubble_inline_sizes {:x}", self.base.debug_id()); let writing_mode = self.base.writing_mode; for kid in self.base.child_iter() { flow::mut_base(kid).floats = Floats::new(writing_mode); } let mut intrinsic_sizes_for_flow = IntrinsicISizesContribution::new(); let mut intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new(); let mut intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new(); for fragment in &mut self.fragments.fragments { let intrinsic_sizes_for_fragment = fragment.compute_intrinsic_inline_sizes().finish(); match fragment.style.get_inheritedtext().white_space { white_space::T::nowrap => { intrinsic_sizes_for_nonbroken_run.union_nonbreaking_inline( &intrinsic_sizes_for_fragment) } white_space::T::pre => { intrinsic_sizes_for_nonbroken_run.union_nonbreaking_inline( &intrinsic_sizes_for_fragment); // Flush the intrinsic sizes we've been gathering up in order to handle the // line break, if necessary. if fragment.requires_line_break_afterward_if_wrapping_on_newlines() { intrinsic_sizes_for_inline_run.union_inline( &intrinsic_sizes_for_nonbroken_run.finish()); intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new(); intrinsic_sizes_for_flow.union_block( &intrinsic_sizes_for_inline_run.finish()); intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new(); } } white_space::T::pre_wrap | white_space::T::pre_line => { // Flush the intrinsic sizes we were gathering up for the nonbroken run, if // necessary. intrinsic_sizes_for_inline_run.union_inline( &intrinsic_sizes_for_nonbroken_run.finish()); intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new(); intrinsic_sizes_for_nonbroken_run.union_inline(&intrinsic_sizes_for_fragment); // Flush the intrinsic sizes we've been gathering up in order to handle the // line break, if necessary. if fragment.requires_line_break_afterward_if_wrapping_on_newlines() { intrinsic_sizes_for_inline_run.union_inline( &intrinsic_sizes_for_nonbroken_run.finish()); intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new(); intrinsic_sizes_for_flow.union_block( &intrinsic_sizes_for_inline_run.finish()); intrinsic_sizes_for_inline_run = IntrinsicISizesContribution::new(); } } white_space::T::normal => { // Flush the intrinsic sizes we were gathering up for the nonbroken run, if // necessary. intrinsic_sizes_for_inline_run.union_inline( &intrinsic_sizes_for_nonbroken_run.finish()); intrinsic_sizes_for_nonbroken_run = IntrinsicISizesContribution::new(); intrinsic_sizes_for_nonbroken_run.union_inline(&intrinsic_sizes_for_fragment); } } fragment.restyle_damage.remove(BUBBLE_ISIZES); } // Flush any remaining nonbroken-run and inline-run intrinsic sizes. intrinsic_sizes_for_inline_run.union_inline(&intrinsic_sizes_for_nonbroken_run.finish()); intrinsic_sizes_for_flow.union_block(&intrinsic_sizes_for_inline_run.finish()); // Finish up the computation. self.base.intrinsic_inline_sizes = intrinsic_sizes_for_flow.finish() } /// Recursively (top-down) determines the actual inline-size of child contexts and fragments. /// When called on this context, the context has had its inline-size set by the parent context. fn assign_inline_sizes(&mut self, _: &LayoutContext) { let _scope = layout_debug_scope!("inline::assign_inline_sizes {:x}", self.base.debug_id()); // Initialize content fragment inline-sizes if they haven't been initialized already. // // TODO: Combine this with `LineBreaker`'s walk in the fragment list, or put this into // `Fragment`. debug!("InlineFlow::assign_inline_sizes: floats in: {:?}", self.base.floats); let inline_size = self.base.block_container_inline_size; let container_mode = self.base.block_container_writing_mode; self.base.position.size.inline = inline_size; { let this = &mut *self; for fragment in this.fragments.fragments.iter_mut() { let border_collapse = fragment.style.get_inheritedtable().border_collapse; fragment.compute_border_and_padding(inline_size, border_collapse); fragment.compute_block_direction_margins(inline_size); fragment.compute_inline_direction_margins(inline_size); fragment.assign_replaced_inline_size_if_necessary(inline_size); } } // If there are any inline-block kids, propagate explicit block and inline // sizes down to them. let block_container_explicit_block_size = self.base.block_container_explicit_block_size; for kid in self.base.child_iter() { let kid_base = flow::mut_base(kid); kid_base.block_container_inline_size = inline_size; kid_base.block_container_writing_mode = container_mode; kid_base.block_container_explicit_block_size = block_container_explicit_block_size; } } /// Calculate and set the block-size of this flow. See CSS 2.1 § 10.6.1. fn assign_block_size(&mut self, layout_context: &LayoutContext) { let _scope = layout_debug_scope!("inline::assign_block_size {:x}", self.base.debug_id()); // Divide the fragments into lines. // // TODO(pcwalton, #226): Get the CSS `line-height` property from the style of the // containing block to determine the minimum line block size. // // TODO(pcwalton, #226): Get the CSS `line-height` property from each non-replaced inline // element to determine its block-size for computing the line's own block-size. // // TODO(pcwalton): Cache the line scanner? debug!("assign_block_size_inline: floats in: {:?}", self.base.floats); // Assign the block-size and late-computed inline-sizes for the inline fragments. let containing_block_block_size = self.base.block_container_explicit_block_size; for fragment in &mut self.fragments.fragments { fragment.update_late_computed_replaced_inline_size_if_necessary(); fragment.assign_replaced_block_size_if_necessary(containing_block_block_size); } // Reset our state, so that we handle incremental reflow correctly. // // TODO(pcwalton): Do something smarter, like Gecko and WebKit? self.lines.clear(); // Determine how much indentation the first line wants. let mut indentation = if self.fragments.is_empty() { Au(0) } else { self.first_line_indentation }; // Perform line breaking. let mut scanner = LineBreaker::new(self.base.floats.clone(), indentation, self.minimum_block_size_above_baseline, self.minimum_depth_below_baseline); scanner.scan_for_lines(self, layout_context); // Now, go through each line and lay out the fragments inside. let mut line_distance_from_flow_block_start = Au(0); let line_count = self.lines.len(); for line_index in 0..line_count { let line = &mut self.lines[line_index]; // Lay out fragments in the inline direction, and justify them if necessary. InlineFlow::set_inline_fragment_positions(&mut self.fragments, line, self.base.flags.text_align(), indentation, line_index + 1 == line_count); // Set the block-start position of the current line. // `line_height_offset` is updated at the end of the previous loop. line.bounds.start.b = line_distance_from_flow_block_start; // Calculate the distance from the baseline to the block-start and block-end of the // line. let mut largest_block_size_above_baseline = self.minimum_block_size_above_baseline; let mut largest_depth_below_baseline = self.minimum_depth_below_baseline; // Calculate the largest block-size among fragments with 'top' and 'bottom' values // respectively. let (mut largest_block_size_for_top_fragments, mut largest_block_size_for_bottom_fragments) = (Au(0), Au(0)); for fragment_index in line.range.each_index() { let fragment = &mut self.fragments.fragments[fragment_index.to_usize()]; let InlineMetrics { mut block_size_above_baseline, mut depth_below_baseline, ascent } = fragment.inline_metrics(layout_context); // To calculate text-top and text-bottom value when `vertical-align` is involved, // we should find the top and bottom of the content area of the parent fragment. // "Content area" is defined in CSS 2.1 § 10.6.1. // // TODO: We should extract em-box info from the font size of the parent and // calculate the distances from the baseline to the block-start and the block-end // of the parent's content area. // We should calculate the distance from baseline to the top of parent's content // area. But for now we assume it's the font size. // // CSS 2.1 does not state which font to use. This version of the code uses // the parent's font. // Calculate the final block-size above the baseline for this fragment. // // The no-update flag decides whether `largest_block_size_for_top_fragments` and // `largest_block_size_for_bottom_fragments` are to be updated or not. This will be // set if and only if the fragment has `vertical-align` set to `top` or `bottom`. let (distance_from_baseline, no_update_flag) = InlineFlow::distance_from_baseline( fragment, ascent, self.minimum_block_size_above_baseline, self.minimum_depth_below_baseline, &mut block_size_above_baseline, &mut depth_below_baseline, &mut largest_block_size_for_top_fragments, &mut largest_block_size_for_bottom_fragments, layout_context); // Unless the current fragment has `vertical-align` set to `top` or `bottom`, // `largest_block_size_above_baseline` and `largest_depth_below_baseline` are // updated. if !no_update_flag { largest_block_size_above_baseline = max(block_size_above_baseline, largest_block_size_above_baseline); largest_depth_below_baseline = max(depth_below_baseline, largest_depth_below_baseline); } // Temporarily use `fragment.border_box.start.b` to mean "the distance from the // baseline". We will assign the real value later. fragment.border_box.start.b = distance_from_baseline } // Calculate the distance from the baseline to the top of the largest fragment with a // value for `bottom`. Then, if necessary, update `largest_block-size_above_baseline`. largest_block_size_above_baseline = max(largest_block_size_above_baseline, largest_block_size_for_bottom_fragments - largest_depth_below_baseline); // Calculate the distance from baseline to the bottom of the largest fragment with a // value for `top`. Then, if necessary, update `largest_depth_below_baseline`. largest_depth_below_baseline = max(largest_depth_below_baseline, largest_block_size_for_top_fragments - largest_block_size_above_baseline); // Now, the distance from the logical block-start of the line to the baseline can be // computed as `largest_block-size_above_baseline`. let baseline_distance_from_block_start = largest_block_size_above_baseline; // Compute the final positions in the block direction of each fragment. Recall that // `fragment.border_box.start.b` was set to the distance from the baseline above. InlineFlow::set_block_fragment_positions(&mut self.fragments, line, line_distance_from_flow_block_start, baseline_distance_from_block_start, largest_depth_below_baseline); // This is used to set the block-start position of the next line in the next loop. line.bounds.size.block = largest_block_size_above_baseline + largest_depth_below_baseline; line_distance_from_flow_block_start = line_distance_from_flow_block_start + line.bounds.size.block; // We're no longer on the first line, so set indentation to zero. indentation = Au(0) } // End of `lines.iter_mut()` loop. // Assign block sizes for any inline-block descendants. let thread_id = self.base.thread_id; for kid in self.base.child_iter() { if flow::base(kid).flags.contains(IS_ABSOLUTELY_POSITIONED) || flow::base(kid).flags.is_float() { continue } kid.assign_block_size_for_inorder_child_if_necessary(layout_context, thread_id); } if self.contains_positioned_fragments() { // Assign block-sizes for all flows in this absolute flow tree. // This is preorder because the block-size of an absolute flow may depend on // the block-size of its containing block, which may also be an absolute flow. (&mut *self as &mut Flow).traverse_preorder_absolute_flows( &mut AbsoluteAssignBSizesTraversal(layout_context)); } self.base.position.size.block = match self.lines.last() { Some(ref last_line) => last_line.bounds.start.b + last_line.bounds.size.block, None => Au(0), }; self.base.floats = scanner.floats.clone(); let writing_mode = self.base.floats.writing_mode; self.base.floats.translate(LogicalSize::new(writing_mode, Au(0), -self.base.position.size.block)); let containing_block_size = LogicalSize::new(writing_mode, Au(0), self.base.position.size.block); self.mutate_fragments(&mut |f: &mut Fragment| { match f.specific { SpecificFragmentInfo::InlineBlock(ref mut info) => { let block = flow_ref::deref_mut(&mut info.flow_ref); flow::mut_base(block).early_absolute_position_info = EarlyAbsolutePositionInfo { relative_containing_block_size: containing_block_size, relative_containing_block_mode: writing_mode, }; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { let block = flow_ref::deref_mut(&mut info.flow_ref); flow::mut_base(block).early_absolute_position_info = EarlyAbsolutePositionInfo { relative_containing_block_size: containing_block_size, relative_containing_block_mode: writing_mode, }; } _ => (), } }); self.base.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); for fragment in &mut self.fragments.fragments { fragment.restyle_damage.remove(REFLOW_OUT_OF_FLOW | REFLOW); } } fn compute_absolute_position(&mut self, _: &LayoutContext) { // First, gather up the positions of all the containing blocks (if any). // // FIXME(pcwalton): This will get the absolute containing blocks inside `...` wrong in the // case of something like: // // <span style="position: relative"> // Foo // <span style="display: inline-block">...</span> // </span> let mut containing_block_positions = Vec::new(); let container_size = Size2D::new(self.base.block_container_inline_size, Au(0)); for (fragment_index, fragment) in self.fragments.fragments.iter().enumerate() { match fragment.specific { SpecificFragmentInfo::InlineAbsolute(_) => { let containing_block_range = self.containing_block_range_for_flow_surrounding_fragment_at_index( FragmentIndex(fragment_index as isize)); let first_fragment_index = containing_block_range.begin().get() as usize; debug_assert!(first_fragment_index < self.fragments.fragments.len()); let first_fragment = &self.fragments.fragments[first_fragment_index]; let padding_box_origin = (first_fragment.border_box - first_fragment.style.logical_border_width()).start; containing_block_positions.push( padding_box_origin.to_physical(self.base.writing_mode, container_size)); } SpecificFragmentInfo::InlineBlock(_) if fragment.is_positioned() => { let containing_block_range = self.containing_block_range_for_flow_surrounding_fragment_at_index( FragmentIndex(fragment_index as isize)); let first_fragment_index = containing_block_range.begin().get() as usize; debug_assert!(first_fragment_index < self.fragments.fragments.len()); let first_fragment = &self.fragments.fragments[first_fragment_index]; let padding_box_origin = (first_fragment.border_box - first_fragment.style.logical_border_width()).start; containing_block_positions.push( padding_box_origin.to_physical(self.base.writing_mode, container_size)); } _ => {} } } // Then compute the positions of all of our fragments. let mut containing_block_positions = containing_block_positions.iter(); for fragment in &mut self.fragments.fragments { let stacking_relative_border_box = fragment.stacking_relative_border_box(&self.base.stacking_relative_position, &self.base .early_absolute_position_info .relative_containing_block_size, self.base .early_absolute_position_info .relative_containing_block_mode, CoordinateSystem::Parent); let stacking_relative_content_box = fragment.stacking_relative_content_box(&stacking_relative_border_box); let clip = fragment.clipping_region_for_children(&self.base.clip, &stacking_relative_border_box, false); let is_positioned = fragment.is_positioned(); match fragment.specific { SpecificFragmentInfo::InlineBlock(ref mut info) => { let flow = flow_ref::deref_mut(&mut info.flow_ref); flow::mut_base(flow).clip = clip; let block_flow = flow.as_mut_block(); block_flow.base.late_absolute_position_info = self.base.late_absolute_position_info; let stacking_relative_position = self.base.stacking_relative_position; if is_positioned { let padding_box_origin = containing_block_positions.next().unwrap(); block_flow.base .late_absolute_position_info .stacking_relative_position_of_absolute_containing_block = stacking_relative_position + *padding_box_origin; } block_flow.base.stacking_relative_position = stacking_relative_content_box.origin; block_flow.base.stacking_relative_position_of_display_port = self.base.stacking_relative_position_of_display_port; } SpecificFragmentInfo::InlineAbsoluteHypothetical(ref mut info) => { let flow = flow_ref::deref_mut(&mut info.flow_ref); flow::mut_base(flow).clip = clip; let block_flow = flow.as_mut_block(); block_flow.base.late_absolute_position_info = self.base.late_absolute_position_info; block_flow.base.stacking_relative_position = stacking_relative_border_box.origin; block_flow.base.stacking_relative_position_of_display_port = self.base.stacking_relative_position_of_display_port; } SpecificFragmentInfo::InlineAbsolute(ref mut info) => { let flow = flow_ref::deref_mut(&mut info.flow_ref); flow::mut_base(flow).clip = clip; let block_flow = flow.as_mut_block(); block_flow.base.late_absolute_position_info = self.base.late_absolute_position_info; let stacking_relative_position = self.base.stacking_relative_position; let padding_box_origin = containing_block_positions.next().unwrap(); block_flow.base .late_absolute_position_info .stacking_relative_position_of_absolute_containing_block = stacking_relative_position + *padding_box_origin; block_flow.base.stacking_relative_position = stacking_relative_border_box.origin; block_flow.base.stacking_relative_position_of_display_port = self.base.stacking_relative_position_of_display_port; } _ => {} } } } fn update_late_computed_inline_position_if_necessary(&mut self, _: Au) {} fn update_late_computed_block_position_if_necessary(&mut self, _: Au) {} fn build_display_list(&mut self, layout_context: &LayoutContext) { self.build_display_list_for_inline(layout_context); for fragment in &mut self.fragments.fragments { fragment.restyle_damage.remove(REPAINT); } } fn repair_style(&mut self, _: &Arc<ComputedValues>) {} fn compute_overflow(&self) -> Rect<Au> { let mut overflow = Rect::zero(); let flow_size = self.base.position.size.to_physical(self.base.writing_mode); let relative_containing_block_size = &self.base.early_absolute_position_info.relative_containing_block_size; for fragment in &self.fragments.fragments { overflow = overflow.union(&fragment.compute_overflow(&flow_size, &relative_containing_block_size)) } overflow } fn iterate_through_fragment_border_boxes(&self, iterator: &mut FragmentBorderBoxIterator, level: i32, stacking_context_position: &Point2D<Au>) { // FIXME(#2795): Get the real container size. for fragment in &self.fragments.fragments { if !iterator.should_process(fragment) { continue } let stacking_relative_position = &self.base.stacking_relative_position; let relative_containing_block_size = &self.base.early_absolute_position_info.relative_containing_block_size; let relative_containing_block_mode = self.base.early_absolute_position_info.relative_containing_block_mode; iterator.process(fragment, level, &fragment.stacking_relative_border_box(stacking_relative_position, relative_containing_block_size, relative_containing_block_mode, CoordinateSystem::Own) .translate(stacking_context_position)) } } fn mutate_fragments(&mut self, mutator: &mut FnMut(&mut Fragment)) { for fragment in &mut self.fragments.fragments { (*mutator)(fragment) } } fn contains_positioned_fragments(&self) -> bool { self.fragments.fragments.iter().any(|fragment| fragment.is_positioned()) } fn contains_relatively_positioned_fragments(&self) -> bool { self.fragments.fragments.iter().any(|fragment| { fragment.style.get_box().position == position::T::relative }) } fn generated_containing_block_size(&self, for_flow: OpaqueFlow) -> LogicalSize<Au> { let mut containing_block_size = LogicalSize::new(self.base.writing_mode, Au(0), Au(0)); for index in self.containing_block_range_for_flow(for_flow).each_index() { let fragment = &self.fragments.fragments[index.get() as usize]; if fragment.is_absolutely_positioned() { continue } containing_block_size.inline = containing_block_size.inline + fragment.border_box.size.inline; containing_block_size.block = max(containing_block_size.block, fragment.border_box.size.block); } containing_block_size } fn print_extra_flow_children(&self, print_tree: &mut PrintTree) { for fragment in &self.fragments.fragments { print_tree.add_item(format!("{:?}", fragment)); } } } impl fmt::Debug for InlineFlow { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}({:x}) {:?}", self.class(), self.base.debug_id(), flow::base(self)) } } #[derive(Clone)] pub struct InlineFragmentNodeInfo { pub address: OpaqueNode, pub style: Arc<ComputedValues>, pub pseudo: PseudoElementType<()>, pub flags: InlineFragmentNodeFlags, } bitflags! { flags InlineFragmentNodeFlags: u8 { const FIRST_FRAGMENT_OF_ELEMENT = 0x01, const LAST_FRAGMENT_OF_ELEMENT = 0x02, } } impl fmt::Debug for InlineFragmentNodeInfo { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{:?}", self.flags.bits()) } } #[derive(Clone)] pub struct InlineFragmentContext { pub nodes: Vec<InlineFragmentNodeInfo>, } impl InlineFragmentContext { pub fn new() -> InlineFragmentContext { InlineFragmentContext { nodes: vec!(), } } #[inline] pub fn contains_node(&self, node_address: OpaqueNode) -> bool { self.nodes.iter().position(|node| node.address == node_address).is_some() } fn ptr_eq(&self, other: &InlineFragmentContext) -> bool { if self.nodes.len() != other.nodes.len() { return false } for (this_node, other_node) in self.nodes.iter().zip(&other.nodes) { if !util::arc_ptr_eq(&this_node.style, &other_node.style) { return false } } true } } fn inline_contexts_are_equal(inline_context_a: &Option<InlineFragmentContext>, inline_context_b: &Option<InlineFragmentContext>) -> bool { match (inline_context_a, inline_context_b) { (&Some(ref inline_context_a), &Some(ref inline_context_b)) => { inline_context_a.ptr_eq(inline_context_b) } (&None, &None) => true, (&Some(_), &None) | (&None, &Some(_)) => false, } } /// Block-size above the baseline, depth below the baseline, and ascent for a fragment. See CSS 2.1 /// § 10.8.1. #[derive(Clone, Copy, Debug, RustcEncodable)] pub struct InlineMetrics { pub block_size_above_baseline: Au, pub depth_below_baseline: Au, pub ascent: Au, } impl InlineMetrics { /// Creates a new set of inline metrics. pub fn new(block_size_above_baseline: Au, depth_below_baseline: Au, ascent: Au) -> InlineMetrics { InlineMetrics { block_size_above_baseline: block_size_above_baseline, depth_below_baseline: depth_below_baseline, ascent: ascent, } } /// Calculates inline metrics from font metrics and line block-size per CSS 2.1 § 10.8.1. #[inline] pub fn from_font_metrics(font_metrics: &FontMetrics, line_height: Au) -> InlineMetrics { let leading = line_height - (font_metrics.ascent + font_metrics.descent); InlineMetrics { block_size_above_baseline: font_metrics.ascent + leading.scale_by(0.5), depth_below_baseline: font_metrics.descent + leading.scale_by(0.5), ascent: font_metrics.ascent, } } /// Calculates inline metrics from font metrics and line block-size per CSS 2.1 § 10.8.1. #[inline] pub fn from_block_height(font_metrics: &FontMetrics, block_height: Au, block_start_margin: Au, block_end_margin: Au) -> InlineMetrics { let leading = block_height + block_start_margin + block_end_margin - (font_metrics.ascent + font_metrics.descent); InlineMetrics { block_size_above_baseline: font_metrics.ascent + leading.scale_by(0.5), depth_below_baseline: font_metrics.descent + leading.scale_by(0.5), ascent: font_metrics.ascent + leading.scale_by(0.5) - block_start_margin, } } pub fn block_size(&self) -> Au { self.block_size_above_baseline + self.depth_below_baseline } pub fn max(&self, other: &InlineMetrics) -> InlineMetrics { InlineMetrics { block_size_above_baseline: max(self.block_size_above_baseline, other.block_size_above_baseline), depth_below_baseline: max(self.depth_below_baseline, other.depth_below_baseline), ascent: max(self.ascent, other.ascent), } } } #[derive(Copy, Clone, PartialEq)] enum LineFlushMode { No, Flush, }<|fim▁end|>
minimum_depth_below_baseline: Au(0), first_line_indentation: Au(0), };
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Platform-specific extensions to `std` for Unix platforms. //! //! Provides access to platform-level information on Unix platforms, and //! exposes Unix-specific functions that would otherwise be inappropriate as //! part of the core `std` library. //! //! It exposes more ways to deal with platform-specific strings ([`OsStr`], //! [`OsString`]), allows to set permissions more granularly, extract low-level //! file descriptors from files and sockets, and has platform-specific helpers //! for spawning processes. //! //! # Examples //! //! ```no_run //! use std::fs::File; //! use std::os::unix::prelude::*; //! //! fn main() -> std::io::Result<()> { //! let f = File::create("foo.txt")?; //! let fd = f.as_raw_fd(); //! //! // use fd with native unix bindings //! //! Ok(()) //! } //! ``` //! //! [`OsStr`]: crate::ffi::OsStr //! [`OsString`]: crate::ffi::OsString #![stable(feature = "rust1", since = "1.0.0")] #![doc(cfg(unix))] // Use linux as the default platform when documenting on other platforms like Windows #[cfg(doc)] use crate::os::linux as platform; #[cfg(not(doc))] mod platform { #[cfg(target_os = "android")] pub use crate::os::android::*; #[cfg(target_os = "dragonfly")] pub use crate::os::dragonfly::*; #[cfg(target_os = "emscripten")] pub use crate::os::emscripten::*; #[cfg(target_os = "espidf")] pub use crate::os::espidf::*; #[cfg(target_os = "freebsd")] pub use crate::os::freebsd::*; #[cfg(target_os = "fuchsia")] pub use crate::os::fuchsia::*; #[cfg(target_os = "haiku")] pub use crate::os::haiku::*; #[cfg(target_os = "illumos")] pub use crate::os::illumos::*; #[cfg(target_os = "ios")] pub use crate::os::ios::*; #[cfg(any(target_os = "linux", target_os = "l4re"))] pub use crate::os::linux::*; #[cfg(target_os = "macos")] pub use crate::os::macos::*; #[cfg(target_os = "netbsd")] pub use crate::os::netbsd::*; #[cfg(target_os = "openbsd")] pub use crate::os::openbsd::*; #[cfg(target_os = "redox")] pub use crate::os::redox::*; #[cfg(target_os = "solaris")] pub use crate::os::solaris::*; #[cfg(target_os = "vxworks")] pub use crate::os::vxworks::*; } pub mod ffi; pub mod fs; pub mod io; pub mod net; pub mod process; pub mod raw; pub mod thread; #[unstable(feature = "peer_credentials_unix_socket", issue = "42839", reason = "unstable")] #[cfg(any( target_os = "android", target_os = "linux", target_os = "dragonfly", target_os = "freebsd", target_os = "ios",<|fim▁hole|> target_os = "macos", target_os = "netbsd", target_os = "openbsd" ))] pub mod ucred; /// A prelude for conveniently writing platform-specific code. /// /// Includes all extension traits, and some important type definitions. #[stable(feature = "rust1", since = "1.0.0")] pub mod prelude { #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::ffi::{OsStrExt, OsStringExt}; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::fs::DirEntryExt; #[doc(no_inline)] #[stable(feature = "file_offset", since = "1.15.0")] pub use super::fs::FileExt; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::fs::{FileTypeExt, MetadataExt, OpenOptionsExt, PermissionsExt}; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::io::{AsFd, AsRawFd, BorrowedFd, FromRawFd, IntoRawFd, OwnedFd, RawFd}; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::process::{CommandExt, ExitStatusExt}; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::thread::JoinHandleExt; }<|fim▁end|>
<|file_name|>update_pnacl_tool_revisions.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # Copyright (c) 2013 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import argparse import collections import datetime import email.mime.text import getpass import os import re import smtplib import subprocess import sys import tempfile import urllib2 BUILD_DIR = os.path.dirname(__file__) NACL_DIR = os.path.dirname(BUILD_DIR) TOOLCHAIN_REV_DIR = os.path.join(NACL_DIR, 'toolchain_revisions') PKG_VER = os.path.join(BUILD_DIR, 'package_version', 'package_version.py') PKGS = ['pnacl_newlib', 'pnacl_translator'] REV_FILES = [os.path.join(TOOLCHAIN_REV_DIR, '%s.json' % package) for package in PKGS] def ParseArgs(args): parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description="""Update pnacl_newlib.json PNaCl version. LLVM and other projects are checked-in to the NaCl repository, but their head isn't necessarily the one that we currently use in PNaCl. The pnacl_newlib.json and pnacl_translator.json files point at subversion revisions to use for tools such as LLVM. Our build process then downloads pre-built tool tarballs from the toolchain build waterfall. git repository before running this script: ______________________ | | v | ...----A------B------C------D------ NaCl HEAD ^ ^ ^ ^ | | | |__ Latest pnacl_{newlib,translator}.json update. | | | | | |__ A newer LLVM change (LLVM repository HEAD). | | | |__ Oldest LLVM change since this PNaCl version. | |__ pnacl_{newlib,translator}.json points at an older LLVM change. git repository after running this script:<|fim▁hole|> _______________ | | v | ...----A------B------C------D------E------ NaCl HEAD Note that there could be any number of non-PNaCl changes between each of these changelists, and that the user can also decide to update the pointer to B instead of C. There is further complication when toolchain builds are merged. """) parser.add_argument('--email', metavar='ADDRESS', type=str, default=getpass.getuser()+'@chromium.org', help="Email address to send errors to.") parser.add_argument('--svn-id', metavar='SVN_ID', type=int, default=0, help="Update to a specific SVN ID instead of the most " "recent SVN ID with a PNaCl change. This value must " "be more recent than the one in the current " "pnacl_newlib.json. This option is useful when multiple " "changelists' toolchain builds were merged, or when " "too many PNaCl changes would be pulled in at the " "same time.") parser.add_argument('--dry-run', default=False, action='store_true', help="Print the changelist that would be sent, but " "don't actually send anything to review.") # TODO(jfb) The following options come from download_toolchain.py and # should be shared in some way. parser.add_argument('--filter_out_predicates', default=[], help="Toolchains to filter out.") return parser.parse_args() def ExecCommand(command): try: return subprocess.check_output(command, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: sys.stderr.write('\nRunning `%s` returned %i, got:\n%s\n' % (' '.join(e.cmd), e.returncode, e.output)) raise def GetCurrentRevision(): return [ExecCommand([sys.executable, PKG_VER, 'getrevision', '--revision-package', package]).strip() for package in PKGS] def SetCurrentRevision(revision_num): for package in PKGS: ExecCommand([sys.executable, PKG_VER] + # TODO(dschuff) pnacl_newlib shouldn't use cloud-bucket # once we switch fully to toolchain_build. (['--cloud-bucket', 'nativeclient-archive2/pnacl_buildsh'] if package == 'pnacl_newlib' else []) + ['setrevision', '--revision-package', package, '--revision', str(revision_num)]) def GitCurrentBranch(): return ExecCommand(['git', 'symbolic-ref', 'HEAD', '--short']).strip() def GitStatus(): """List of statuses, one per path, of paths in the current git branch. Ignores untracked paths.""" out = ExecCommand(['git', 'status', '--porcelain']).strip().split('\n') return [f.strip() for f in out if not re.match('^\?\? (.*)$', f.strip())] def SyncSources(): """Assumes a git-svn checkout of NaCl. See: www.chromium.org/nativeclient/how-tos/how-to-use-git-svn-with-native-client """ ExecCommand(['gclient', 'sync']) def GitCommitInfo(info='', obj=None, num=None, extra=[]): """Commit information, where info is one of the shorthands in git_formats. obj can be a path or a hash. num is the number of results to return. extra is a list of optional extra arguments.""" # Shorthands for git's pretty formats. # See PRETTY FORMATS format:<string> in `git help log`. git_formats = { '': '', 'hash': '%H', 'date': '%ci', 'author': '%aN', 'subject': '%s', 'body': '%b', } cmd = ['git', 'log', '--format=format:%s' % git_formats[info]] + extra if num: cmd += ['-n'+str(num)] if obj: cmd += [obj] return ExecCommand(cmd).strip() def GitCommitsSince(date): """List of commit hashes since a particular date, in reverse chronological order.""" return GitCommitInfo(info='hash', extra=['--since="%s"' % date]).split('\n') def GitFilesChanged(commit_hash): """List of files changed in a commit.""" return GitCommitInfo(obj=commit_hash, num=1, extra=['--name-only']).split('\n') def GitChangesPath(commit_hash, path): """Returns True if the commit changes a file under the given path.""" return any([ re.search('^' + path, f.strip()) for f in GitFilesChanged(commit_hash)]) def GitBranchExists(name): return len(ExecCommand(['git', 'branch', '--list', name]).strip()) != 0 def GitCheckout(branch, force=False): """Checkout an existing branch. force throws away local changes.""" ExecCommand(['git', 'checkout'] + (['--force'] if force else []) + [branch]) def GitCheckoutNewBranch(branch): """Create and checkout a new git branch.""" ExecCommand(['git', 'checkout', '-b', branch]) def GitDeleteBranch(branch, force=False): """Force-delete a branch.""" ExecCommand(['git', 'branch', '-D' if force else '-d', branch]) def GitAdd(file): ExecCommand(['git', 'add', file]) def GitCommit(message): with tempfile.NamedTemporaryFile() as tmp: tmp.write(message) tmp.flush() ExecCommand(['git', 'commit', '--file=%s' % tmp.name]) def UploadChanges(): """Upload changes, don't prompt.""" # TODO(jfb) Using the commit queue and avoiding git try + manual commit # would be much nicer. See '--use-commit-queue' return ExecCommand(['git', 'cl', 'upload', '--send-mail', '-f']) def GitTry(): return ExecCommand(['git', 'try']) def FindCommitWithGitSvnId(git_svn_id): while True: # This command needs to retry because git-svn partially rebuild its # revision map for every commit. Asking it a second time fixes the # issue. out = ExecCommand(['git', 'svn', 'find-rev', 'r' + git_svn_id]).strip() if not re.match('^Partial-rebuilding ', out): break return out def CommitMessageToCleanDict(commit_message): """Extract and clean commit message fields that follow the NaCl commit message convention. Don't repeat them as-is, to avoid confusing our infrastructure.""" res = {} fields = [ ['git svn id', ('\s*git-svn-id: ' 'svn://[^@]+@([0-9]+) [a-f0-9\-]+'), '<none>'], ['reviewers tbr', '\s*TBR=([^\n]+)', ''], ['reviewers', '\s*R=([^\n]+)', ''], ['review url', '\s*Review URL: *([^\n]+)', '<none>'], ['bug', '\s*BUG=([^\n]+)', '<none>'], ['test', '\s*TEST=([^\n]+)', '<none>'], ] for key, regex, none in fields: found = re.search(regex, commit_message) if found: commit_message = commit_message.replace(found.group(0), '') res[key] = found.group(1).strip() else: res[key] = none res['body'] = commit_message.strip() return res def SendEmail(user_email, out): if user_email: sys.stderr.write('\nSending email to %s.\n' % user_email) msg = email.mime.text.MIMEText(out) msg['Subject'] = '[PNaCl revision updater] failure!' msg['From'] = '[email protected]' msg['To'] = user_email s = smtplib.SMTP('localhost') s.sendmail(msg['From'], [msg['To']], msg.as_string()) s.quit() else: sys.stderr.write('\nNo email address specified.') def DryRun(out): sys.stdout.write("DRY RUN: " + out + "\n") def Done(out): sys.stdout.write(out) sys.exit(0) class CLInfo: """Changelist information: sorted dictionary of NaCl-standard fields.""" def __init__(self, desc): self._desc = desc self._vals = collections.OrderedDict([ ('git svn id', None), ('hash', None), ('author', None), ('date', None), ('subject', None), ('commits since', None), ('bug', None), ('test', None), ('review url', None), ('reviewers tbr', None), ('reviewers', None), ('body', None), ]) def __getitem__(self, key): return self._vals[key] def __setitem__(self, key, val): assert key in self._vals.keys() self._vals[key] = str(val) def __str__(self): """Changelist to string. A short description of the change, e.g.: r12345: ([email protected]) Subject of the change. If the change is itself pulling in other changes from sub-repositories then take its relevant description and append it to the string. These sub-directory updates are also script-generated and therefore have a predictable format. e.g.: r12345: ([email protected]) Subject of the change. | dead123: ([email protected]) Other change in another repository. | beef456: ([email protected]) Yet another cross-repository change. """ desc = (' r' + self._vals['git svn id'] + ': (' + self._vals['author'] + ') ' + self._vals['subject']) if GitChangesPath(self._vals['hash'], 'pnacl/COMPONENT_REVISIONS'): git_hash_abbrev = '[0-9a-fA-F]{7}' email = '[^@)]+@[^)]+\.[^)]+' desc = '\n'.join([desc] + [ ' | ' + line for line in self._vals['body'].split('\n') if re.match('^ *%s: \(%s\) .*$' % (git_hash_abbrev, email), line)]) return desc def FmtOut(tr_points_at, pnacl_changes, err=[], msg=[]): assert isinstance(err, list) assert isinstance(msg, list) old_svn_id = tr_points_at['git svn id'] new_svn_id = pnacl_changes[-1]['git svn id'] if pnacl_changes else '?' changes = '\n'.join([str(cl) for cl in pnacl_changes]) bugs = '\n'.join(list(set( ['BUG= ' + cl['bug'].strip() if cl['bug'] else '' for cl in pnacl_changes]) - set(['']))) reviewers = ', '.join(list(set( [r.strip() for r in (','.join([ cl['author'] + ',' + cl['reviewers tbr'] + ',' + cl['reviewers'] for cl in pnacl_changes])).split(',')]) - set(['']))) return (('*** ERROR ***\n' if err else '') + '\n\n'.join(err) + '\n\n'.join(msg) + ('\n\n' if err or msg else '') + ('Update revision for PNaCl r%s->r%s\n\n' 'Pull the following PNaCl changes into NaCl:\n%s\n\n' '%s\n' 'R= %s\n' 'TEST=git try\n' 'NOTRY=true\n' '(Please LGTM this change and tick the "commit" box)\n' % (old_svn_id, new_svn_id, changes, bugs, reviewers))) def Main(): args = ParseArgs(sys.argv[1:]) tr_points_at = CLInfo('revision update points at PNaCl version') pnacl_changes = [] msg = [] branch = GitCurrentBranch() assert branch == 'master', ('Must be on branch master, currently on %s' % branch) try: status = GitStatus() assert len(status) == 0, ("Repository isn't clean:\n %s" % '\n '.join(status)) SyncSources() # The current revision file points at a specific PNaCl LLVM # version. LLVM is checked-in to the NaCl repository, but its head # isn't necessarily the one that we currently use in PNaCl. (pnacl_revision, translator_revision) = GetCurrentRevision() tr_points_at['git svn id'] = pnacl_revision tr_points_at['hash'] = FindCommitWithGitSvnId(tr_points_at['git svn id']) tr_points_at['date'] = GitCommitInfo( info='date', obj=tr_points_at['hash'], num=1) tr_points_at['subject'] = GitCommitInfo( info='subject', obj=tr_points_at['hash'], num=1) recent_commits = GitCommitsSince(tr_points_at['date']) tr_points_at['commits since'] = len(recent_commits) assert len(recent_commits) > 1 if args.svn_id and args.svn_id <= int(tr_points_at['git svn id']): Done(FmtOut(tr_points_at, pnacl_changes, err=["Can't update to SVN ID r%s, the current " "PNaCl revision's SVN ID (r%s) is more recent." % (args.svn_id, tr_points_at['git svn id'])])) # Find the commits changing PNaCl files that follow the previous # PNaCl revision pointer. pnacl_pathes = ['pnacl/', 'toolchain_build/'] pnacl_hashes = list(set(reduce( lambda acc, lst: acc + lst, [[cl for cl in recent_commits[:-1] if GitChangesPath(cl, path)] for path in pnacl_pathes]))) for hash in pnacl_hashes: cl = CLInfo('PNaCl change ' + hash) cl['hash'] = hash for i in ['author', 'date', 'subject']: cl[i] = GitCommitInfo(info=i, obj=hash, num=1) for k,v in CommitMessageToCleanDict( GitCommitInfo(info='body', obj=hash, num=1)).iteritems(): cl[k] = v pnacl_changes.append(cl) # The PNaCl hashes weren't ordered chronologically, make sure the # changes are. pnacl_changes.sort(key=lambda x: int(x['git svn id'])) if args.svn_id: pnacl_changes = [cl for cl in pnacl_changes if int(cl['git svn id']) <= args.svn_id] if len(pnacl_changes) == 0: Done(FmtOut(tr_points_at, pnacl_changes, msg=['No PNaCl change since r%s.' % tr_points_at['git svn id']])) new_pnacl_revision = pnacl_changes[-1]['git svn id'] new_branch_name = ('pnacl-revision-update-to-%s' % new_pnacl_revision) if GitBranchExists(new_branch_name): # TODO(jfb) Figure out if git-try succeeded, checkout the branch # and dcommit. raise Exception("Branch %s already exists, the change hasn't " "landed yet.\nPlease check trybots and dcommit it " "manually." % new_branch_name) if args.dry_run: DryRun("Would check out branch: " + new_branch_name) else: GitCheckoutNewBranch(new_branch_name) if args.dry_run: DryRun("Would update PNaCl revision to: %s" % new_pnacl_revision) else: SetCurrentRevision(new_pnacl_revision) for f in REV_FILES: GitAdd(f) GitCommit(FmtOut(tr_points_at, pnacl_changes)) upload_res = UploadChanges() msg += ['Upload result:\n%s' % upload_res] try_res = GitTry() msg += ['Try result:\n%s' % try_res] GitCheckout('master', force=False) Done(FmtOut(tr_points_at, pnacl_changes, msg=msg)) except SystemExit as e: # Normal exit. raise except (BaseException, Exception) as e: # Leave the branch around, if any was created: it'll prevent next # runs of the cronjob from succeeding until the failure is fixed. out = FmtOut(tr_points_at, pnacl_changes, msg=msg, err=['Failed at %s: %s' % (datetime.datetime.now(), e)]) sys.stderr.write(out) if not args.dry_run: SendEmail(args.email, out) GitCheckout('master', force=True) raise if __name__ == '__main__': Main()<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from tehbot.plugins import * import tehbot.plugins as plugins import wolframalpha import prettytable class WolframAlphaPlugin(StandardPlugin): def __init__(self): StandardPlugin.__init__(self) self.parser.add_argument("query", nargs="+") def initialize(self, dbconn): StandardPlugin.initialize(self, dbconn) try: self.client = wolframalpha.Client(self.settings["wolframalpha_app_id"]) except: self.settings["enabled"] = False @staticmethod def remove_empty_columns(table, nr_cols): t = [[] for n in range(len(table))] for i in range(nr_cols): keep = False for line in table: if line[i]: keep = True break if keep: for j in range(len(table)): t[j].append(table[j][i]) return t @staticmethod def format_table(s): table = [[y.strip() for y in x.strip().split("|")] for x in s.splitlines()] nr_cols = max(map(len, table)) table = [[x[i] if i < len(x) else "" for i in range(nr_cols)] for x in table] table = WolframAlphaPlugin.remove_empty_columns(table, nr_cols) if len(table) < 2: s2 = " | ".join(table[0]) return s2 pt = prettytable.PrettyTable() pt.header = False for line in table: pt.add_row(line) s = pt.get_string() return s def execute(self, connection, event, extra, dbconn): try: pargs = self.parser.parse_args(extra["args"]) if self.parser.help_requested: return self.parser.format_help().strip() except Exception as e: return u"Error: %s" % str(e) txt = "\x0303[Wolfram|Alpha]\x03 " try: res = None misc = [] for p in self.client.query(" ".join(pargs.query)).pods: if p.id == "Input": inp = " | ".join(p.text.splitlines()) elif p.id == "Result" and p.text: res = self.format_table(p.text) elif p.title and p.text:<|fim▁hole|> txt += inp + "\n" if res: txt += res + "\n" elif misc: txt += "\n".join(misc) else: raise NameError except (NameError, AttributeError): txt += "No results." except Exception as e: txt = "Error: %s" % e return plugins.shorten(txt, 450) register_plugin(["wolframalpha", "wa"], WolframAlphaPlugin())<|fim▁end|>
misc.append("%s\n%s" % (p.title, self.format_table(p.text)))
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # test documentation build configuration file, created by # sphinx-quickstart on Sun Jun 26 00:00:43 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # # import os # import sys # sys.path.insert(0, os.path.abspath('.')) import sys, os # To change default code-block format in Latex to footnotesize (8pt) # Tip from https://stackoverflow.com/questions/9899283/how-do-you-change-the-code-example-font-size-in-latex-pdf-output-with-sphinx/9955928 # Note: sizes are \footnotesize (8pt), \small (9pt), and \normalsize (10pt). #from sphinx.highlighting import PygmentsBridge #from pygments.formatters.latex import LatexFormatter # #class CustomLatexFormatter(LatexFormatter): # def __init__(self, **options): # super(CustomLatexFormatter, self).__init__(**options) # self.verboptions = r"formatcom=\footnotesize" # #PygmentsBridge.latex_formatter = CustomLatexFormatter # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.imgmath'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. # # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'ns-3 project' copyright = u'2006-2019' #author = u'test' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'ns-3-dev' # The full version, including alpha/beta/rc tags. release = u'ns-3-dev'<|fim▁hole|># for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # # today = '' # # Else, today_fmt is used as the format for a strftime call. # # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # These patterns also affect html_static_path and html_extra_path exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The reST default role (used for this markup: `text`) to use for all # documents. # # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'ns3_html_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ['../..'] # The name for this set of Sphinx documents. # "<project> v<release> documentation" by default. # # html_title = 'est vtest' html_title = 'Manual' # A shorter title for the navigation bar. Default is the same as html_title. # # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # # html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. # # html_extra_path = [] # If not None, a 'Last updated on:' timestamp is inserted at every page # bottom, using the given strftime format. # The empty string is equivalent to '%b %d, %Y'. # html_last_updated_fmt = '%b %d, %Y %H:%M' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # # html_additional_pages = {} # If false, no module index is generated. # # html_domain_indices = True # If false, no index is generated. # # html_use_index = True # If true, the index is split into individual pages for each letter. # # html_split_index = False # If true, links to the reST sources are added to the pages. # # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' # # html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # 'ja' uses this config value. # 'zh' user can custom change `jieba` dictionary path. # # html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. # # html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'ns-3doc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # VerbatimBorderColor: make the box around code samples blend into the background # Tip from https://stackoverflow.com/questions/29403100/how-to-remove-the-box-around-the-code-block-in-restructuredtext-with-sphinx # # sphinxcode is the wrapper around \texttt that sphinx.sty provides. # Redefine it here as needed to change the inline literal font size # (double backquotes) to either \footnotesize (8pt) or \small (9pt) # # See above to change the font size of verbatim code blocks # # 'preamble': '', 'preamble': u'''\\usepackage{amssymb} \\definecolor{VerbatimBorderColor}{rgb}{1,1,1} \\renewcommand{\\sphinxcode}[1]{\\texttt{\\small{#1}}} ''' # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'ns-3-manual.tex', u'ns-3 Manual', u'ns-3 project', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = '../../ns3_html_theme/static/ns-3.png' # If true, show page references after internal links. # # latex_show_pagerefs = False # If true, show URL addresses after external links. # # latex_show_urls = False # Documents to append as an appendix to all manuals. # # latex_appendices = [] # If false, will not define \strong, \code, \titleref, \crossref ... but only # \sphinxstrong, ..., \sphinxtitleref, ... to help avoid clash with user added # packages. # # latex_keep_old_macro_names = True # If false, no module index is generated. # # latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'ns-3-manual', u'ns-3 Manual', [u'ns-3 project'], 1) ] # If true, show URL addresses after external links. # # man_show_urls = False # -- Options for texinfo output --------------------------------------- #texinfo_documents = [ # (master_doc, 'test', u'test Documentation', # author, 'test', 'One line description of project.', # 'Miscellaneous'), #] # Documents to append as an appendix to all manuals. # # texinfo_appendices = [] # If false, no module index is generated. # # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # # texinfo_no_detailmenu = False<|fim▁end|>
# The language for content autogenerated by Sphinx. Refer to documentation
<|file_name|>syntax-extension-bytes-too-small-u8-literal.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn main() { let vec = bytes!(-1024u8); //~ ERROR non-literal in bytes<|fim▁hole|><|fim▁end|>
//~^ WARN `bytes!` is deprecated }
<|file_name|>http.go<|end_file_name|><|fim▁begin|>/* Copyright 2013 Google Inc. <|fim▁hole|> http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package groupcache import ( "fmt" "io/ioutil" "net/http" "net/url" "strings" "sync" "code.google.com/p/goprotobuf/proto" "github.com/ijibu/groupcache/consistenthash" pb "github.com/ijibu/groupcache/groupcachepb" ) // TODO: make this configurable? const defaultBasePath = "/_groupcache/" // TODO: make this configurable as well. const defaultReplicas = 3 // HTTPPool implements PeerPicker for a pool of HTTP peers. type HTTPPool struct { // Context optionally specifies a context for the server to use when it // receives a request. // If nil, the server uses a nil Context. Context func(*http.Request) Context // Transport optionally specifies an http.RoundTripper for the client // to use when it makes a request. // If nil, the client uses http.DefaultTransport. Transport func(Context) http.RoundTripper // base path including leading and trailing slash, e.g. "/_groupcache/" basePath string // this peer's base URL, e.g. "https://example.net:8000" self string mu sync.Mutex peers *consistenthash.Map } var httpPoolMade bool // NewHTTPPool initializes an HTTP pool of peers. // It registers itself as a PeerPicker and as an HTTP handler with the // http.DefaultServeMux. // The self argument be a valid base URL that points to the current server, // for example "http://example.net:8000". // 创建一个HttpPool, 只能被使用一次,主要是注册PeerPicker,以及初始化http服务 func NewHTTPPool(self string) *HTTPPool { if httpPoolMade { panic("groupcache: NewHTTPPool must be called only once") } httpPoolMade = true p := &HTTPPool{basePath: defaultBasePath, self: self, peers: consistenthash.New(defaultReplicas, nil)} RegisterPeerPicker(func() PeerPicker { return p }) http.Handle(defaultBasePath, p) return p } // Set updates the pool's list of peers. // Each peer(节点) value should be a valid base URL, // for example "http://example.net:8000". // 设置groupcache集群的节点列表 func (p *HTTPPool) Set(peers ...string) { p.mu.Lock() defer p.mu.Unlock() p.peers = consistenthash.New(defaultReplicas, nil) p.peers.Add(peers...) } // 根据key从远方获取value // 提供按key选取节点,按key作hash,但是这段代码在OS为32bit是存在bug,如果算出来的hashcode正好是-1 * 2^31时 // 会导致out of range,为啥会有这个bug看看代码你就会发现了,作者忘了-1 * 2^31 <= int32 <= 1 * 2^31 -1 func (p *HTTPPool) PickPeer(key string) (ProtoGetter, bool) { p.mu.Lock() defer p.mu.Unlock() if p.peers.IsEmpty() { return nil, false } //找到key存在那个真实的节点中 if peer := p.peers.Get(key); peer != p.self { // TODO: pre-build a slice of *httpGetter when Set() // is called to avoid these two allocations. // 返回服务器节点的信息 return &httpGetter{p.Transport, peer + p.basePath}, true } return nil, false } // http服务处理函数,主要是按http://example.com/groupname/key解析请求,调用group.Get,按协议返回请求 func (p *HTTPPool) ServeHTTP(w http.ResponseWriter, r *http.Request) { // Parse request. if !strings.HasPrefix(r.URL.Path, p.basePath) { panic("HTTPPool serving unexpected path: " + r.URL.Path) } parts := strings.SplitN(r.URL.Path[len(p.basePath):], "/", 2) if len(parts) != 2 { http.Error(w, "bad request", http.StatusBadRequest) return } groupName, err := url.QueryUnescape(parts[0]) if err != nil { http.Error(w, "decoding group: "+err.Error(), http.StatusBadRequest) return } key, err := url.QueryUnescape(parts[1]) if err != nil { http.Error(w, "decoding key: "+err.Error(), http.StatusBadRequest) return } // Fetch the value for this group/key. group := GetGroup(groupName) if group == nil { http.Error(w, "no such group: "+groupName, http.StatusNotFound) return } var ctx Context if p.Context != nil { ctx = p.Context(r) } var value []byte err = group.Get(ctx, key, AllocatingByteSliceSink(&value)) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } // Write the value to the response body as a proto message. body, err := proto.Marshal(&pb.GetResponse{Value: value}) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) return } w.Header().Set("Content-Type", "application/x-protobuf") w.Write(body) } // groupcache提供了一个节点互相访问访问的类 type httpGetter struct { transport func(Context) http.RoundTripper baseURL string } // 协议为GET http://example.com/groupname/key // response见groupcache.proto,含有2个可选项分别为[]byte和double // 实现默认使用go自带的net/http包直接发送请求 func (h *httpGetter) Get(context Context, in *pb.GetRequest, out *pb.GetResponse) error { u := fmt.Sprintf( "%v%v/%v", h.baseURL, url.QueryEscape(in.GetGroup()), url.QueryEscape(in.GetKey()), ) req, err := http.NewRequest("GET", u, nil) if err != nil { return err } tr := http.DefaultTransport if h.transport != nil { tr = h.transport(context) } res, err := tr.RoundTrip(req) if err != nil { return err } defer res.Body.Close() if res.StatusCode != http.StatusOK { return fmt.Errorf("server returned: %v", res.Status) } // TODO: avoid this garbage. b, err := ioutil.ReadAll(res.Body) if err != nil { return fmt.Errorf("reading response body: %v", err) } err = proto.Unmarshal(b, out) if err != nil { return fmt.Errorf("decoding response body: %v", err) } return nil }<|fim▁end|>
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
<|file_name|>Realtime.cpp<|end_file_name|><|fim▁begin|>/* The GTKWorkbook Project <http://gtkworkbook.sourceforge.net/> Copyright (C) 2008, 2009 John Bellone, Jr. <[email protected]> This library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PRACTICAL PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the library; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor Boston, MA 02110-1301 USA */ #include <sstream> #include <gdk/gdkkeysyms.h> #include <libgtkworkbook/workbook.h> #include <proactor/Proactor.hpp> #include <proactor/Event.hpp> #include <network/Tcp.hpp> #include "Realtime.hpp" using namespace realtime; /* @description: This method creates a filename with the prefix supplied and uses the pid of the process as its suffix. @pre: The prefix (should be a file path, obviously). */ static std::string AppendProcessId (const gchar * pre) { std::stringstream s; s << pre << getppid(); return s.str(); } static void StreamOpenDialogCallback (GtkWidget * w, gpointer data) { Realtime * rt = (Realtime *)data; OpenStreamDialog * dialog = rt->streamdialog(); if (dialog->widget == NULL) { dialog->rt = rt; dialog->widget = gtk_dialog_new_with_buttons ("Open stream ", GTK_WINDOW (rt->app()->gtkwindow()), (GtkDialogFlags) (GTK_DIALOG_MODAL|GTK_DIALOG_NO_SEPARATOR), GTK_STOCK_OK, GTK_RESPONSE_OK, GTK_STOCK_CANCEL, GTK_RESPONSE_CANCEL, NULL); GtkWidget * gtk_frame = gtk_frame_new ("Connection Options"); GtkWidget * hbox = gtk_hbox_new(FALSE, 0); GtkWidget * box = GTK_DIALOG (dialog->widget)->vbox; dialog->host_entry = gtk_entry_new(); dialog->port_entry = gtk_entry_new(); gtk_entry_set_max_length (GTK_ENTRY (dialog->host_entry), 15); gtk_entry_set_max_length (GTK_ENTRY (dialog->port_entry), 5); gtk_entry_set_width_chars (GTK_ENTRY (dialog->host_entry), 15); gtk_entry_set_width_chars (GTK_ENTRY (dialog->port_entry), 5); gtk_box_pack_start (GTK_BOX (hbox), dialog->host_entry, FALSE, FALSE, 0); gtk_box_pack_end (GTK_BOX (hbox), dialog->port_entry, FALSE, FALSE, 0); gtk_container_add (GTK_CONTAINER (gtk_frame), hbox); gtk_box_pack_start (GTK_BOX (box), gtk_frame, FALSE, FALSE, 0); g_signal_connect (G_OBJECT (dialog->widget), "delete-event", G_CALLBACK (gtk_widget_hide_on_delete), NULL); } gtk_widget_show_all ( dialog->widget ); if (gtk_dialog_run (GTK_DIALOG (dialog->widget)) == GTK_RESPONSE_OK) { const char * host_value = gtk_entry_get_text (GTK_ENTRY (dialog->host_entry)); const char * port_value = gtk_entry_get_text (GTK_ENTRY (dialog->port_entry)); Sheet * sheet = rt->workbook()->add_new_sheet (rt->workbook(), host_value, 100, 20); if (IS_NULLSTR (host_value) || IS_NULLSTR (port_value)) { g_warning ("One of requird values are empty"); } else if (sheet == NULL) { g_warning ("Cannot open connection to %s:%s because of failure to add sheet", host_value, port_value); } else if (rt->OpenTcpClient (sheet, host_value, atoi (port_value)) == false) { // STUB: Popup an alertbox about failing to connect? }<|fim▁hole|> } gtk_widget_hide_all ( dialog->widget ); } Realtime::Realtime (Application * appstate, Handle * platform) : Plugin (appstate, platform) { ConfigPair * logpath = appstate->config()->get_pair (appstate->config(), "realtime", "log", "path"); if (IS_NULL (logpath)) { g_critical ("Failed loading log->path from configuration file. Exiting application."); exit(1); } std::string logname = std::string (logpath->value).append("/"); logname.append (AppendProcessId("realtime.").append(".log")); if ((pktlog = fopen (logname.c_str(), "w")) == NULL) { g_critical ("Failed opening file '%s' for packet logging", logname.c_str()); } this->wb = workbook_open (appstate->gtkwindow(), "realtime"); this->packet_parser = NULL; this->tcp_server = NULL; } Realtime::~Realtime (void) { // Iterate through the list of active connections, and begin closing them. This should also // include deleting the pointers to all of the accepting threads. Eventually there should be // a boost::shared_ptr here so that we don't have to do the dirty work. ActiveThreads::iterator it = this->threads.begin(); while (it != this->threads.end()) { network::TcpSocket * socket = ((*it).first); concurrent::Thread * thread = ((*it).second); it = this->threads.erase (it); if (socket) delete socket; if (thread) { thread->stop(); delete thread; } } if (this->packet_parser) { this->packet_parser->stop(); delete this->packet_parser; } if (this->tcp_server) { this->tcp_server->stop(); delete this->tcp_server; } FCLOSE (this->pktlog); } bool Realtime::CreateNewServerConnection (network::TcpServerSocket * socket, AcceptThread * accept_thread) { this->threads.push_back ( ActiveThread (socket, accept_thread) ); if (this->tcp_server->addWorker (accept_thread) == false) { g_critical ("Failed starting accepting thread on socket %d", socket->getPort() ); return false; } return true; } bool Realtime::CreateNewClientConnection (network::TcpClientSocket * socket, CsvParser * csv, NetworkDispatcher * nd) { this->threads.push_back ( ActiveThread (socket, csv) ); this->threads.push_back ( ActiveThread (NULL, nd) ); // this is a hack ConnectionThread * reader = new ConnectionThread (socket); if (nd->addWorker (reader) == false) { g_critical ("Failed starting the client reader"); delete reader; return false; } this->threads.push_back ( ActiveThread (NULL, reader) ); // this is a hack return true; } bool Realtime::OpenTcpServer (int port) { // Has to be above the service ports. if (port < 1000) { g_warning ("Failed starting Tcp server: port (%d) must be above 1000", port); return false; } // The first time we attempt to create a port to receive input on we need to create a dispatcher, and // specify an event identifier so that we can communicate with it from workers. At this point the // Packet Parser is created as well. if (this->tcp_server == NULL) { int eventId = proactor::Event::uniqueEventId(); NetworkDispatcher * nd = new NetworkDispatcher (eventId); PacketParser * pp = new PacketParser (this->workbook(), this->pktlog, 0); if (nd->start() == false) { g_critical ("Failed starting network dispatcher for tcp server"); return false; } if (this->app()->proactor()->addWorker (eventId, pp) == false) { g_critical ("Failed starting packet parser for tcp server"); return false; } this->tcp_server = nd; this->packet_parser = pp; } network::TcpServerSocket * socket = new network::TcpServerSocket (port); if (socket->start(5) == false) { g_critical ("Failed starting network socket for tcp server on port %d", port); return false; } AcceptThread * accept_thread = new AcceptThread (socket->newAcceptor()); return this->CreateNewServerConnection (socket, accept_thread); } bool Realtime::OpenTcpClient (Sheet * sheet, const std::string & address, int port) { // Has to be above the service ports. if (port < 1000) { g_warning ("Failed starting Tcp client: port (%d) must be above 1000", port); return false; } // We need to create a network dispatcher for each one of these connections because of // the current limitation of the Proactor design. It really needs to be rewritten, but // that is a separate project in and of itself. For now a list of dispatchers must be // kept so that we do not lose track. int eventId = proactor::Event::uniqueEventId(); NetworkDispatcher * dispatcher = new NetworkDispatcher (eventId); if (dispatcher->start() == false) { g_critical ("Failed starting network dispatcher for %s:%d", address.c_str(), port); delete dispatcher; return false; } // Keeping this simple is the reason why we need multiple dispatchers. If I could come // up with a simple way to strap on the ability to have multiple sheets without the need // for an additioanl dispatcher/csv combo I would. It totally destroys the principle of // the proactor design. CsvParser * csv = new CsvParser (sheet, this->pktlog, 0); if (this->app()->proactor()->addWorker (eventId, csv) == false) { g_critical ("Failed starting csv parser and adding to proactor for %s:%d", address.c_str(), port); delete csv; delete dispatcher; return false; } network::TcpClientSocket * socket = new network::TcpClientSocket; if (socket->connect (address.c_str(), port) == false) { g_critical ("Failed making Tcp connection to %s:%d", address.c_str(), port); delete socket; delete csv; delete dispatcher; return false; } return this->CreateNewClientConnection (socket, csv, dispatcher); } void Realtime::Start(void) { Config * cfg = this->app()->config(); ConfigPair * servport = cfg->get_pair (cfg, "realtime", "tcp", "port"); int port = atoi (servport->value); if (this->OpenTcpServer (port) == true) { g_message ("Opened Tcp server on port %d", port); } } GtkWidget * Realtime::CreateMainMenu (void) { GtkWidget * rtmenu = gtk_menu_new(); GtkWidget * rtmenu_item = gtk_menu_item_new_with_label ("Realtime"); GtkWidget * rtmenu_open = gtk_menu_item_new_with_label ("Open Csv stream..."); gtk_menu_shell_append (GTK_MENU_SHELL (rtmenu), rtmenu_open); g_signal_connect (G_OBJECT (rtmenu_open), "activate", G_CALLBACK (StreamOpenDialogCallback), this); gtk_menu_item_set_submenu (GTK_MENU_ITEM (rtmenu_item), rtmenu); return rtmenu_item; } GtkWidget * Realtime::BuildLayout (void) { GtkWidget * gtk_menu = this->app()->gtkmenu(); GtkWidget * box = gtk_vbox_new (FALSE, 0); GtkWidget * realtime_menu = this->CreateMainMenu(); // Append to the existing menu structure from the application. gtk_menu_shell_prepend (GTK_MENU_SHELL (gtk_menu), realtime_menu); // Setup the workbook. wb->signals[SIG_WORKBOOK_CHANGED] = this->app()->signals[Application::SHEET_CHANGED]; wb->gtk_box = box; // Pack all of the objects into a vertical box, and then pack that box into the application. gtk_box_pack_start (GTK_BOX (box), wb->gtk_notebook, TRUE, TRUE, 0); gtk_box_pack_start (GTK_BOX (this->app()->gtkvbox()), box, TRUE, TRUE, 0); return box; }<|fim▁end|>
else { // STUB: Success. Do we want to do anything else here? g_message ("Client connection opened on %s:%s on sheet %s", host_value, port_value, sheet->name); }
<|file_name|>wav.rs<|end_file_name|><|fim▁begin|>//! Audio IO from WAV files. use byteorder::{self, ReadBytesExt, WriteBytesExt, LittleEndian}; use std::fs::File; use std::io::{self, Read, Seek, SeekFrom, Write}; use std::path::Path; use error::{Error, Result}; use types::{SAMPLE_RATE, AudioDevice, Time, Sample}; use utils::helpers::{i16_to_sample, sample_to_16}; /// Reads audio from a wav file. /// /// The reader will continue until it runs out of samples. When it does, the /// reader will return silence until it is reset to the beginning of the file. pub struct WavReader<R: Read> { num_channels: usize, num_samples: Time, samples_read: Time, reader: R } impl WavReader<File> { /// Returns a `WavReader` reading the provided file. pub fn open<P: AsRef<Path>>(filename: P) -> Result<Self> { let file = try!(File::open(filename)); WavReader::new(file) } } impl<R: Read> WavReader<R> { /// Retuns a `WavReader` reading anything implementing `Read`. pub fn new(mut reader: R) -> Result<Self> { let header = try!(WavHeader::read_from_file(&mut reader)); Ok(WavReader { num_channels: header.num_channels as usize, num_samples: (header.data_size / ((header.bit_depth/8) as u32) / (header.num_channels as u32)) as Time, samples_read: 0, reader: reader }) } /// Returns the number of audio samples in the wav file. pub fn get_num_samples(&self) -> Time { self.num_samples } /// Returns true if we have read the entire wav file. pub fn is_done(&self) -> bool { self.samples_read >= self.num_samples } } impl<R: Read+Seek> WavReader<R> { /// Resets the reader to begin reading from the start of the file. pub fn restart(&mut self) -> io::Result<u64> { self.samples_read = 0; self.reader.seek(SeekFrom::Start(44)) } } impl<R: Read> AudioDevice for WavReader<R> { fn num_inputs(&self) -> usize { 0 } fn num_outputs(&self) -> usize { self.num_channels } fn tick(&mut self, _: Time, _: &[Sample], outputs: &mut[Sample]) { for i in 0..self.num_channels { let s = if self.samples_read < self.num_samples { let n = self.reader.read_i16::<LittleEndian>() .expect("Failed to read next sample from wav."); i16_to_sample(n) } else { 0.0 }; outputs[i] = s; } self.samples_read += 1; } } /// Writes audio to a wav file. /// /// The writer initializes the data_size to be 0. This will not be overwritten /// with the proper size until `update_data_size` is called. /// /// While `WavReader` only requires its type be `Seek` to use the `restart` /// method, the `WavWriter` reqiures `Seek` for all types, because the final /// data size must be written to the header when the writer leaves scope. pub struct WavWriter<W: Write+Seek> { num_channels: usize, samples_written: usize, writer: W, } impl WavWriter<File> { /// Returns a `WavWriter` writing to the provided file. pub fn create<P: AsRef<Path>>(filename: P, num_channels: usize) -> Result<Self> { let file = try!(File::create(filename)); WavWriter::new(file, num_channels) } } impl<W: Write+Seek> WavWriter<W> { /// Returns a `WavWriter` writing to anything implementing `Write`. pub fn new(mut writer: W, num_channels: usize) -> Result<Self> { let header = WavHeader::new(num_channels as u16, SAMPLE_RATE as u32, 0u32); try!(header.write_to_file(&mut writer)); Ok(WavWriter { num_channels: num_channels, samples_written: 0, writer: writer, }) } } impl<W: Write+Seek> Drop for WavWriter<W> { fn drop(&mut self) { // Updates the wav header to have the correct amount of data written let data_size = self.samples_written * self.num_channels * 16/8; let file_size = 36+data_size; self.writer.seek(SeekFrom::Start(4)) .expect("Failed to seek wav file size."); self.writer.write_u32::<LittleEndian>(file_size as u32) .expect("Failed to write wav file size."); self.writer.seek(SeekFrom::Start(40)) .expect("Failed to seek wav data size."); self.writer.write_u32::<LittleEndian>(data_size as u32) .expect("Failed to write wav data size."); } } impl<W: Write+Seek> AudioDevice for WavWriter<W> { fn num_inputs(&self) -> usize { self.num_channels } fn num_outputs(&self) -> usize { 0 } fn tick(&mut self, _: Time, inputs: &[Sample], _: &mut[Sample]) { for s in inputs.iter() { self.writer.write_i16::<LittleEndian>(sample_to_16(*s)) .expect("Failed to write next sample to wav file."); } self.samples_written += 1; } } /// Constants for the strings used in a wav header static RIFF: u32 = 0x46464952; static WAVE: u32 = 0x45564157; static FMT_: u32 = 0x20746d66; static DATA: u32 = 0x61746164; /// A struct container for the wav header #[derive(Clone, Debug)] struct WavHeader { riff_hdr: u32, file_size: u32, wave_lbl: u32, fmt_hdr: u32, section_size: u32, format: u16, num_channels: u16, sample_rate: u32, byte_rate: u32, block_align: u16, bit_depth: u16, data_hdr: u32, data_size: u32, } impl WavHeader { /// Returns a new wav header with all values initalized for our supported /// audio formats fn new(num_channels: u16, sample_rate: u32, data_size: u32) -> Self { WavHeader { riff_hdr: RIFF, file_size: data_size+36, wave_lbl: WAVE, fmt_hdr: FMT_, section_size: 16, format: 1, num_channels: num_channels, sample_rate: sample_rate, byte_rate: sample_rate*(num_channels as u32)*16/8, block_align: num_channels*16/8, bit_depth: 16, data_hdr: DATA, data_size: data_size, } } /// Attempts to read a wav header from the provided file fn read_from_file<R: Read>(f: &mut R) -> Result<Self> { let riff_hdr = try!(f.read_u32::<LittleEndian>()); let file_size = try!(f.read_u32::<LittleEndian>()); let wave_lbl = try!(f.read_u32::<LittleEndian>()); let fmt_hdr = try!(f.read_u32::<LittleEndian>()); let section_size = try!(f.read_u32::<LittleEndian>()); let format = try!(f.read_u16::<LittleEndian>()); let num_channels = try!(f.read_u16::<LittleEndian>()); let sample_rate = try!(f.read_u32::<LittleEndian>()); let byte_rate = try!(f.read_u32::<LittleEndian>()); let block_align = try!(f.read_u16::<LittleEndian>()); let bit_depth = try!(f.read_u16::<LittleEndian>()); let data_hdr = try!(f.read_u32::<LittleEndian>()); let data_size = try!(f.read_u32::<LittleEndian>()); let header = WavHeader { riff_hdr: riff_hdr, file_size: file_size, wave_lbl: wave_lbl, fmt_hdr: fmt_hdr, section_size: section_size, format: format, num_channels: num_channels, sample_rate: sample_rate, byte_rate: byte_rate, block_align: block_align, bit_depth: bit_depth, data_hdr: data_hdr, data_size: data_size }; header.check() } /// Returns the header if the wav header has valid fields and uses the /// supported formats, otherwise return a descriptive error fn check(self) -> Result<Self> { // Check the headers are correct if self.riff_hdr != RIFF { return Err(Error::InvalidFile); } if self.wave_lbl != WAVE { return Err(Error::InvalidFile); } if self.fmt_hdr != FMT_ { return Err(Error::InvalidFile); } if self.data_hdr != DATA { return Err(Error::InvalidFile); } // Check sizes are correct if self.file_size != self.data_size + 36 { return Err(Error::InvalidFile); } if self.section_size != 16 { return Err(Error::InvalidFile); } if self.byte_rate != self.sample_rate*(self.num_channels as u32)* (self.bit_depth as u32)/8 { return Err(Error::InvalidFile); } if self.block_align != self.num_channels*self.bit_depth/8 { return Err(Error::InvalidFile); } // Check for formats we can read if self.format != 1 { return Err(Error::Unsupported("Only PCM is supported")); } if self.sample_rate != (SAMPLE_RATE as u32) { return Err(Error::Unsupported( "Sample rate conversion not supported")); } if self.bit_depth != 16 { return Err(Error::Unsupported("Only 16-bit supported")); } // If this header is valid, then return it instead Ok(self) } /// Attempts to write this wav header to the provided file fn write_to_file<W: Write>(&self, w: &mut W) -> byteorder::Result<()> { w.write_u32::<LittleEndian>(self.riff_hdr) .and_then(|()| w.write_u32::<LittleEndian>(self.file_size)) .and_then(|()| w.write_u32::<LittleEndian>(self.wave_lbl)) .and_then(|()| w.write_u32::<LittleEndian>(self.fmt_hdr)) .and_then(|()| w.write_u32::<LittleEndian>(self.section_size)) .and_then(|()| w.write_u16::<LittleEndian>(self.format)) .and_then(|()| w.write_u16::<LittleEndian>(self.num_channels)) .and_then(|()| w.write_u32::<LittleEndian>(self.sample_rate)) .and_then(|()| w.write_u32::<LittleEndian>(self.byte_rate)) .and_then(|()| w.write_u16::<LittleEndian>(self.block_align))<|fim▁hole|> .and_then(|()| w.write_u32::<LittleEndian>(self.data_hdr)) .and_then(|()| w.write_u32::<LittleEndian>(self.data_size)) } } #[cfg(test)] mod test { use std::io::Cursor; use types::AudioDevice; use super::{WavHeader, WavReader, WavWriter}; static WAV_HEADER: [u8; 48] = [0x52, 0x49, 0x46, 0x46, 0x28, 0x00, 0x00, 0x00, 0x57, 0x41, 0x56, 0x45, 0x66, 0x6D, 0x74, 0x20, 0x10, 0x00, 0x00, 0x00, 0x01, 0x00, 0x02, 0x00, 0x44, 0xAC, 0x00, 0x00, 0x10, 0xB1, 0x02, 0x00, 0x04, 0x00, 0x10, 0x00, 0x64, 0x61, 0x74, 0x61, 0x04, 0x00, 0x00, 0x00, 0x00, 0x80, 0xFF, 0x7F]; #[test] fn test_read_wav_header() { let mut cursor = Cursor::new(&WAV_HEADER[..]); let header = WavHeader::read_from_file(&mut cursor).unwrap(); assert_eq!(header.bit_depth, 16); assert_eq!(header.data_size, 4); assert_eq!(header.num_channels, 2); } #[test] fn test_wav_reader() { let cursor = Cursor::new(&WAV_HEADER[..]); let mut reader = WavReader::new(cursor).unwrap(); assert_eq!(reader.num_inputs(), 0); assert_eq!(reader.num_outputs(), 2); assert_eq!(reader.get_num_samples(), 1); assert_eq!(reader.is_done(), false); let mut output = [0.0, 0.0]; reader.tick(0, &[], &mut output); assert_eq!(reader.is_done(), true); assert_eq!(output, [-1.0, 0.9999695]); reader.tick(1, &[], &mut output); assert_eq!(reader.is_done(), true); assert_eq!(output, [0.0, 0.0]); reader.restart().unwrap(); assert_eq!(reader.is_done(), false); reader.tick(0, &[], &mut output); assert_eq!(reader.is_done(), true); assert_eq!(output, [-1.0, 0.9999695]); } #[test] fn test_wav_writer() { let mut buffer = [0u8; 48]; { // Scope the cursor so its borrow on the buffer ends. // Scope the writer so it gets dropped and the file size written. let cursor = Cursor::new(&mut buffer[..]); let mut writer = WavWriter::new(cursor, 2).unwrap(); assert_eq!(writer.num_inputs(), 2); assert_eq!(writer.num_outputs(), 0); writer.tick(0, &[-1.0, 0.9999695], &mut[]); } assert_eq!(&buffer[..], &WAV_HEADER[..]); } }<|fim▁end|>
.and_then(|()| w.write_u16::<LittleEndian>(self.bit_depth))
<|file_name|>eztext.py<|end_file_name|><|fim▁begin|># input lib from pygame.locals import * import pygame, string class ConfigError(KeyError): pass class Config: """ A utility for configuration """ def __init__(self, options, *look_for): assertions = [] for key in look_for: if key[0] in options.keys(): exec('self.'+key[0]+' = options[\''+key[0]+'\']') else: exec('self.'+key[0]+' = '+key[1]) assertions.append(key[0]) for key in options.keys(): if key not in assertions: raise ConfigError(key+' not expected as option') class Input: """ A text input for pygame apps """ def __init__(self, **options): """ Options: x, y, font, color, restricted, maxlength, prompt """ self.options = Config(options, ['x', '0'], ['y', '0'], ['font', 'pygame.font.Font(None, 32)'], ['color', '(0,0,0)'], ['restricted', '\'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!"#$%&\\\'()*+,-./:;<=>?@[\]^_`{|}~\''], ['maxlength', '-1'], ['prompt', '\'\'']) self.x = self.options.x; self.y = self.options.y self.font = self.options.font self.color = self.options.color self.restricted = self.options.restricted self.maxlength = self.options.maxlength self.prompt = self.options.prompt; self.value = '' self.shifted = False def set_pos(self, x, y): """ Set the position to x, y """ self.x = x self.y = y def set_font(self, font): """ Set the font for the input """ self.font = font def draw(self, surface): """ Draw the text input to a surface """ text = self.font.render(self.prompt+self.value, 1, self.color) surface.blit(text, (self.x, self.y)) def getText(self): return self.value def hasTyped(self): if self.value =="": return False else: return True def update(self, events): """ Update the input based on passed events """ for event in events: if event.type == KEYUP: if event.key == K_LSHIFT or event.key == K_RSHIFT: self.shifted = False if event.type == KEYDOWN: if event.key == K_BACKSPACE: self.value = self.value[:-1] elif event.key == K_LSHIFT or event.key == K_RSHIFT: self.shifted = True elif event.key == K_SPACE: self.value += ' ' if not self.shifted: if event.key == K_a and 'a' in self.restricted: self.value += 'a' elif event.key == K_b and 'b' in self.restricted: self.value += 'b' elif event.key == K_c and 'c' in self.restricted: self.value += 'c' elif event.key == K_d and 'd' in self.restricted: self.value += 'd' elif event.key == K_e and 'e' in self.restricted: self.value += 'e' elif event.key == K_f and 'f' in self.restricted: self.value += 'f' elif event.key == K_g and 'g' in self.restricted: self.value += 'g' elif event.key == K_h and 'h' in self.restricted: self.value += 'h' elif event.key == K_i and 'i' in self.restricted: self.value += 'i' elif event.key == K_j and 'j' in self.restricted: self.value += 'j' elif event.key == K_k and 'k' in self.restricted: self.value += 'k' elif event.key == K_l and 'l' in self.restricted: self.value += 'l' elif event.key == K_m and 'm' in self.restricted: self.value += 'm' elif event.key == K_n and 'n' in self.restricted: self.value += 'n' elif event.key == K_o and 'o' in self.restricted: self.value += 'o' elif event.key == K_p and 'p' in self.restricted: self.value += 'p' elif event.key == K_q and 'q' in self.restricted: self.value += 'q' elif event.key == K_r and 'r' in self.restricted: self.value += 'r' elif event.key == K_s and 's' in self.restricted: self.value += 's' elif event.key == K_t and 't' in self.restricted: self.value += 't' elif event.key == K_u and 'u' in self.restricted: self.value += 'u' elif event.key == K_v and 'v' in self.restricted: self.value += 'v' elif event.key == K_w and 'w' in self.restricted: self.value += 'w' elif event.key == K_x and 'x' in self.restricted: self.value += 'x' elif event.key == K_y and 'y' in self.restricted: self.value += 'y' elif event.key == K_z and 'z' in self.restricted: self.value += 'z' elif event.key == K_0 and '0' in self.restricted: self.value += '0' elif event.key == K_1 and '1' in self.restricted: self.value += '1' elif event.key == K_2 and '2' in self.restricted: self.value += '2' elif event.key == K_3 and '3' in self.restricted: self.value += '3' elif event.key == K_4 and '4' in self.restricted: self.value += '4' elif event.key == K_5 and '5' in self.restricted: self.value += '5' elif event.key == K_6 and '6' in self.restricted: self.value += '6' elif event.key == K_7 and '7' in self.restricted: self.value += '7' elif event.key == K_8 and '8' in self.restricted: self.value += '8' elif event.key == K_9 and '9' in self.restricted: self.value += '9' elif event.key == K_BACKQUOTE and '`' in self.restricted: self.value += '`' elif event.key == K_MINUS and '-' in self.restricted: self.value += '-' elif event.key == K_EQUALS and '=' in self.restricted: self.value += '=' elif event.key == K_LEFTBRACKET and '[' in self.restricted: self.value += '[' elif event.key == K_RIGHTBRACKET and ']' in self.restricted: self.value += ']' elif event.key == K_BACKSLASH and '\\' in self.restricted: self.value += '\\' elif event.key == K_SEMICOLON and ';' in self.restricted: self.value += ';' elif event.key == K_QUOTE and '\'' in self.restricted: self.value += '\'' elif event.key == K_COMMA and ',' in self.restricted: self.value += ',' elif event.key == K_PERIOD and '.' in self.restricted: self.value += '.' elif event.key == K_SLASH and '/' in self.restricted: self.value += '/' elif self.shifted: if event.key == K_a and 'A' in self.restricted: self.value += 'A' <|fim▁hole|> elif event.key == K_c and 'C' in self.restricted: self.value += 'C' elif event.key == K_d and 'D' in self.restricted: self.value += 'D' elif event.key == K_e and 'E' in self.restricted: self.value += 'E' elif event.key == K_f and 'F' in self.restricted: self.value += 'F' elif event.key == K_g and 'G' in self.restricted: self.value += 'G' elif event.key == K_h and 'H' in self.restricted: self.value += 'H' elif event.key == K_i and 'I' in self.restricted: self.value += 'I' elif event.key == K_j and 'J' in self.restricted: self.value += 'J' elif event.key == K_k and 'K' in self.restricted: self.value += 'K' elif event.key == K_l and 'L' in self.restricted: self.value += 'L' elif event.key == K_m and 'M' in self.restricted: self.value += 'M' elif event.key == K_n and 'N' in self.restricted: self.value += 'N' elif event.key == K_o and 'O' in self.restricted: self.value += 'O' elif event.key == K_p and 'P' in self.restricted: self.value += 'P' elif event.key == K_q and 'Q' in self.restricted: self.value += 'Q' elif event.key == K_r and 'R' in self.restricted: self.value += 'R' elif event.key == K_s and 'S' in self.restricted: self.value += 'S' elif event.key == K_t and 'T' in self.restricted: self.value += 'T' elif event.key == K_u and 'U' in self.restricted: self.value += 'U' elif event.key == K_v and 'V' in self.restricted: self.value += 'V' elif event.key == K_w and 'W' in self.restricted: self.value += 'W' elif event.key == K_x and 'X' in self.restricted: self.value += 'X' elif event.key == K_y and 'Y' in self.restricted: self.value += 'Y' elif event.key == K_z and 'Z' in self.restricted: self.value += 'Z' elif event.key == K_0 and ')' in self.restricted: self.value += ')' elif event.key == K_1 and '!' in self.restricted: self.value += '!' elif event.key == K_2 and '@' in self.restricted: self.value += '@' elif event.key == K_3 and '#' in self.restricted: self.value += '#' elif event.key == K_4 and '$' in self.restricted: self.value += '$' elif event.key == K_5 and '%' in self.restricted: self.value += '%' elif event.key == K_6 and '^' in self.restricted: self.value += '^' elif event.key == K_7 and '&' in self.restricted: self.value += '&' elif event.key == K_8 and '*' in self.restricted: self.value += '*' elif event.key == K_9 and '(' in self.restricted: self.value += '(' elif event.key == K_BACKQUOTE and '~' in self.restricted: self.value += '~' elif event.key == K_MINUS and '_' in self.restricted: self.value += '_' elif event.key == K_EQUALS and '+' in self.restricted: self.value += '+' elif event.key == K_LEFTBRACKET and '{' in self.restricted: self.value += '{' elif event.key == K_RIGHTBRACKET and '}' in self.restricted: self.value += '}' elif event.key == K_BACKSLASH and '|' in self.restricted: self.value += '|' elif event.key == K_SEMICOLON and ':' in self.restricted: self.value += ':' elif event.key == K_QUOTE and '"' in self.restricted: self.value += '"' elif event.key == K_COMMA and '<' in self.restricted: self.value += '<' elif event.key == K_PERIOD and '>' in self.restricted: self.value += '>' elif event.key == K_SLASH and '?' in self.restricted: self.value += '?' if len(self.value) > self.maxlength and self.maxlength >= 0: self.value = self.value[:-1]<|fim▁end|>
elif event.key == K_b and 'B' in self.restricted: self.value += 'B'
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import datetime from django.db import models, IntegrityError from django.contrib.auth.models import User from django.contrib.sites.models import Site from django.core.urlresolvers import reverse as django_reverse from django.utils.http import urlquote from django.conf import settings from tower import ugettext as _, ugettext_lazy as _lazy from mptt.models import MPTTModel from spark.urlresolvers import reverse, absolute_url from spark.helpers import urlparams from spark.models import City from sharing import utils as sharing_utils from sharing.messages import TWITTER_BADGE_MSG, FACEBOOK_BADGE_MSG from challenges.models import Challenge from challenges import utils class Profile(models.Model): user = models.OneToOneField(User, primary_key=True) # Game progress level = models.PositiveIntegerField(default=1) challenges = models.ManyToManyField(Challenge, through='CompletedChallenge') new_challenges = models.BooleanField(default=False) # Boost 1/2 boost1_completed = models.BooleanField(default=False) latitude = models.FloatField(blank=True, null=True) longitude = models.FloatField(blank=True, null=True) major_city = models.ForeignKey(City, blank=True, null=True) city_name = models.CharField(max_length=255, blank=True, null=True) country_code = models.CharField(max_length=2, blank=True, null=True) us_state = models.CharField(max_length=2, blank=True, null=True) # Boost 2/2 boost2_completed = models.BooleanField(default=False) no_parent = models.BooleanField(default=True) parent_username = models.CharField(max_length=30, blank=True, null=True) date_boost2_localtime = models.DateTimeField(blank=True, null=True) # Flags login_desktop = models.BooleanField(default=False) login_mobile = models.BooleanField(default=False) is_non_android = models.BooleanField(default=False) # Personal stats longest_chain = models.PositiveIntegerField(default=0) def __unicode__(self): return unicode(self.user) def get_absolute_url(self): return reverse('desktop.user', args=[self.user.username]) @property def generic_sharing_url(self): url = urlparams(django_reverse('desktop.user', args=[self.user.username])) return absolute_url(url) def _social_sharing_url(self, service): # django_reverse used instead of reverse because we don't want a locale preprended to sharing links. url = urlparams(django_reverse('desktop.user', args=[self.user.username]), f=service) return absolute_url(url) @property def twitter_sharing_url(self): return self._social_sharing_url('t') @property def facebook_sharing_url(self): return self._social_sharing_url('fb') @property<|fim▁hole|> @property def badges(self): """Returns a list of dicts used for badge list rendering. They represent all badges earned by the user in the Spark game. """ badges = [] completed_challenges = CompletedChallenge.objects.filter(profile=self, date_badge_earned__isnull=False) for cc in completed_challenges: badge_id = utils.get_challenge_id(cc.challenge.level, cc.challenge.number) badge_description = cc.challenge.badge_description badges.append({ 'id': badge_id, 'name': cc.challenge.badge_name, 'description': badge_description, 'date_earned': cc.date_badge_earned, 'new': cc.new_badge, 'twitter_msg': urlquote(unicode(TWITTER_BADGE_MSG % {'badge_name':cc.challenge.badge_name, 'short_url':''})), 'facebook_msg': urlquote(unicode(FACEBOOK_BADGE_MSG % {'badge_name':cc.challenge.badge_name})), 'facebook_img': absolute_url(settings.MEDIA_URL+'img/badges/fb/'+badge_id.replace('_','-')+'.png'), 'facebook_desc': urlquote(badge_description) }) return badges def has_badge(self, badge_id): """Returns whether this user has earned the given badge.""" if badge_id: return CompletedChallenge.objects.filter(profile=self, challenge__pk=badge_id, date_badge_earned__isnull=False).count() == 1 else: return False @property def total_badges_earned(self): """Returns the total number of badges earned by the user. Doesn't include hidden unlocked badges from an upper level. """ return CompletedChallenge.objects.filter(profile=self, date_badge_earned__isnull=False).count() def get_home_location(self, locale): """Returns a string containing the location determined by Google Location Services when Boost your Spark 1/2 was completed by the user. """ from geo.countries import countries if self.country_code and locale in countries: country = countries[locale][self.country_code.lower()] return '%s, %s' % (self.city_name, country) else: return '' @property def spark_started_with(self): if self.parent_username is not None: return self.parent_username return '' @property def most_recent_share(self): """Most recent share stat displayed on desktop dashboard/user pages.""" from stats.models import SharingHistory share = SharingHistory.objects.filter(parent=self)[:1] if share: return share[0].date_shared else: return None @property def shares_over_time(self): """Aggregate data of Spark shares since the start of the campaign. Used by the 'shares over time' diagram in the user dashboard. """ from stats.models import SharingHistory return SharingHistory.get_shares_over_time(self) @property def sparked_countries(self): """List of countries this user has shared their Spark with.""" from .utils import user_node countries = set() node = user_node(self.user) for child in node.get_children(): cc = child.user.profile.country_code if cc: countries.add(cc.lower()) return list(countries) @property def total_shares(self): """Total shares stat displayed on desktop dashboard/user pages.""" from stats.models import SharingHistory return SharingHistory.objects.filter(parent=self).count() @property def challenge_info(self): """Returns a list of dicts containing level/challenge completion information. Used to render both desktop and mobile collapsing challenge lists. """ return utils.get_profile_levels(self) @property def new_challenge_count(self): """Returns the number of newly available challenges in the user's current level.""" if self.new_challenges: challenge_count = utils.CHALLENGE_COUNT_PER_LVL[self.level-1] completed_challenge_count = len(CompletedChallenge.objects.filter(profile=self, challenge__level=self.level)) return challenge_count - completed_challenge_count else: return 0 @property def new_badge_count(self): """Returns the number of recently earned badges.""" return len([b for b in self.badges if b['new']]) @property def qr_code_download(self): """Returns the URL of a QR code which, when scanned, points to: https://[domain]/download?f=qr&user=[username] """ url = absolute_url(urlparams(django_reverse('sharing.download'), user=self.user.username)) return sharing_utils.url2qr(url) @property def continent_code(self): from geo.continents import countries_continents code = '' if self.country_code: code = countries_continents[self.country_code] return code @property def total_countries_sparked(self): """Returns the total number of countries where the user's children are located.""" return len(self.sparked_countries) @property def total_continents_sparked(self): """Returns the total number of continents where the user's children are located.""" from geo.continents import countries_continents from .utils import user_node continents = set() node = user_node(self.user) for child in node.get_children(): cc = child.user.profile.country_code if cc: continents.add(countries_continents[cc]) return len(continents) @property def children_profiles(self): """Returns a list of profiles of the user's children in the user tree.""" from .utils import user_node return [child.user.profile for child in user_node(self.user).get_children()] def clear_new_badges(self): """Clears notifications of recently earned badges.""" CompletedChallenge.objects.filter(profile=self, new_badge=True).update(new_badge=False) def clear_new_challenges(self): """Clears notifications of new available challenges.""" self.new_challenges = False self.save() def complete_challenges(self, challenges): """Helper method to easily save the completion of given challenges for this user.""" from stats.models import GlobalStats error = False if challenges: for challenge in challenges: try: # If the completed challenge is from an upper level and not an easter egg, we keep the badge hidden. # This is done by setting the date_badge_earned to NULL. date = None if self.level < challenge.level and not challenge.easter_egg else datetime.datetime.now() CompletedChallenge.objects.create(profile=self, challenge=challenge, date_badge_earned=date, # Don't set new_badge to True if the badge is hidden. new_badge=date is not None) GlobalStats.increment_total_badges() except IntegrityError: # Challenge was already completed by another concurrent 'update_completed_challenges' task. # In this case, fail silently. pass def trigger_multisparker_badge(self): from challenges.tasks import update_completed_challenges if self.login_desktop and self.login_mobile: update_completed_challenges(self.user.id) def update_ancestors_longest_chain(self): """Updates 'longest chain' stat of all ancestors of this user when relevant. Used after Boost step 2 confirmation so that all users involved have their longest chain stat updated. """ from .utils import user_node ancestors = user_node(self.user).get_ancestors() chain_length = len(ancestors) for profile in (ancestor.user.profile for ancestor in ancestors): if profile.longest_chain < chain_length: profile.longest_chain = chain_length profile.save() chain_length -= 1 def add_city_shares_for_children(self): """Creates city shares in the CitySharingHistory for the global visualization. This is useful when a user already has children when he completes boost 1 (geolocation). As soon as it's completed, city shares are created for all geolocated children. """ from stats.models import CitySharingHistory for child in self.children_profiles: if child.boost1_completed: CitySharingHistory.add_share_from_profiles(self, child) # Retrieves or creates a Profile automatically whenever the profile property is accessed User.profile = property(lambda u: Profile.objects.get_or_create(user=u)[0]) class CompletedChallenge(models.Model): """Mapping table for challenge completion and badge awarding.""" challenge = models.ForeignKey(Challenge) profile = models.ForeignKey(Profile, db_index=True) date_completed = models.DateTimeField(auto_now_add=True) date_badge_earned = models.DateTimeField(blank=True, null=True) new_badge = models.BooleanField(default=False) class Meta: unique_together = ('challenge', 'profile') def __unicode__(self): return "%s <-> %s" % (self.profile, self.challenge) class UserNode(MPTTModel): """ Represents a user in the Spark sharing hierarchy. This model is mainly used for storing chains of shares as user trees. """ user = models.OneToOneField(User, related_name='node', db_index=True) parent = models.ForeignKey('self', default=None, blank=True, null=True, related_name='children') class Meta: db_table='users_tree' class MPTTMeta: pass def __unicode__(self): return unicode(self.user)<|fim▁end|>
def poster_sharing_url(self): return self._social_sharing_url('p')
<|file_name|>generator.py<|end_file_name|><|fim▁begin|># Copyright 2012 SINA Corporation # Copyright 2014 Cisco Systems, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """Extracts OpenStack config option info from module(s).""" from __future__ import print_function import argparse import imp import os import re import socket import sys import textwrap from oslo.config import cfg import six import stevedore.named from climate.openstack.common import gettextutils from climate.openstack.common import importutils gettextutils.install('climate') STROPT = "StrOpt" BOOLOPT = "BoolOpt" INTOPT = "IntOpt" FLOATOPT = "FloatOpt" LISTOPT = "ListOpt" DICTOPT = "DictOpt" MULTISTROPT = "MultiStrOpt" OPT_TYPES = { STROPT: 'string value', BOOLOPT: 'boolean value', INTOPT: 'integer value', FLOATOPT: 'floating point value', LISTOPT: 'list value', DICTOPT: 'dict value', MULTISTROPT: 'multi valued', } OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT, FLOATOPT, LISTOPT, DICTOPT, MULTISTROPT])) PY_EXT = ".py" BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../../../")) WORDWRAP_WIDTH = 60 def generate(argv): parser = argparse.ArgumentParser( description='generate sample configuration file', ) parser.add_argument('-m', dest='modules', action='append') parser.add_argument('-l', dest='libraries', action='append') parser.add_argument('srcfiles', nargs='*') parsed_args = parser.parse_args(argv) mods_by_pkg = dict() for filepath in parsed_args.srcfiles: pkg_name = filepath.split(os.sep)[1] mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]), os.path.basename(filepath).split('.')[0]]) mods_by_pkg.setdefault(pkg_name, list()).append(mod_str) # NOTE(lzyeval): place top level modules before packages pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT)) ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names) pkg_names.extend(ext_names) # opts_by_group is a mapping of group name to an options list # The options list is a list of (module, options) tuples opts_by_group = {'DEFAULT': []} if parsed_args.modules: for module_name in parsed_args.modules: module = _import_module(module_name) if module: for group, opts in _list_opts(module): opts_by_group.setdefault(group, []).append((module_name, opts)) # Look for entry points defined in libraries (or applications) for # option discovery, and include their return values in the output. # # Each entry point should be a function returning an iterable # of pairs with the group name (or None for the default group) # and the list of Opt instances for that group. if parsed_args.libraries: loader = stevedore.named.NamedExtensionManager( 'oslo.config.opts', names=list(set(parsed_args.libraries)), invoke_on_load=False, ) for ext in loader: for group, opts in ext.plugin(): opt_list = opts_by_group.setdefault(group or 'DEFAULT', []) opt_list.append((ext.name, opts)) for pkg_name in pkg_names: mods = mods_by_pkg.get(pkg_name) mods.sort() for mod_str in mods: if mod_str.endswith('.__init__'): mod_str = mod_str[:mod_str.rfind(".")] mod_obj = _import_module(mod_str) if not mod_obj: raise RuntimeError("Unable to import module %s" % mod_str) for group, opts in _list_opts(mod_obj): opts_by_group.setdefault(group, []).append((mod_str, opts)) print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', [])) for group in sorted(opts_by_group.keys()): print_group_opts(group, opts_by_group[group]) def _import_module(mod_str): try: if mod_str.startswith('bin.'): imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:])) return sys.modules[mod_str[4:]] else: return importutils.import_module(mod_str) except Exception as e: sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e))) return None def _is_in_group(opt, group): "Check if opt is in group." for value in group._opts.values(): # NOTE(llu): Temporary workaround for bug #1262148, wait until # newly released oslo.config support '==' operator. if not(value['opt'] != opt): return True return False def _guess_groups(opt, mod_obj): # is it in the DEFAULT group? if _is_in_group(opt, cfg.CONF): return 'DEFAULT' # what other groups is it in? for value in cfg.CONF.values(): if isinstance(value, cfg.CONF.GroupAttr): if _is_in_group(opt, value._group): return value._group.name raise RuntimeError( "Unable to find group for option %s, " "maybe it's defined twice in the same group?" % opt.name ) def _list_opts(obj): def is_opt(o): return (isinstance(o, cfg.Opt) and not isinstance(o, cfg.SubCommandOpt)) opts = list() for attr_str in dir(obj): attr_obj = getattr(obj, attr_str) if is_opt(attr_obj): opts.append(attr_obj) elif (isinstance(attr_obj, list) and all(map(lambda x: is_opt(x), attr_obj))): opts.extend(attr_obj) ret = {} for opt in opts: ret.setdefault(_guess_groups(opt, obj), []).append(opt) return ret.items() def print_group_opts(group, opts_by_module): print("[%s]" % group) print('') for mod, opts in opts_by_module: print('#') print('# Options defined in %s' % mod) print('#') print('') for opt in opts: _print_opt(opt) print('') def _get_my_ip(): try: csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) csock.connect(('8.8.8.8', 80)) (addr, port) = csock.getsockname() csock.close() return addr except socket.error: return None def _sanitize_default(name, value): """Set up a reasonably sensible default for pybasedir, my_ip and host.""" if value.startswith(sys.prefix): # NOTE(jd) Don't use os.path.join, because it is likely to think the # second part is an absolute pathname and therefore drop the first # part. value = os.path.normpath("/usr/" + value[len(sys.prefix):]) elif value.startswith(BASEDIR): return value.replace(BASEDIR, '/usr/lib/python/site-packages') elif BASEDIR in value: return value.replace(BASEDIR, '') elif value == _get_my_ip(): return '10.0.0.1' elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name: return 'climate' elif value.strip() != value: return '"%s"' % value return value def _print_opt(opt): opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help if not opt_help: sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name) opt_help = ""<|fim▁hole|> sys.stderr.write("%s\n" % str(err)) sys.exit(1) opt_help = u'%s (%s)' % (opt_help, OPT_TYPES[opt_type]) print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH))) if opt.deprecated_opts: for deprecated_opt in opt.deprecated_opts: if deprecated_opt.name: deprecated_group = (deprecated_opt.group if deprecated_opt.group else "DEFAULT") print('# Deprecated group/name - [%s]/%s' % (deprecated_group, deprecated_opt.name)) try: if opt_default is None: print('#%s=<None>' % opt_name) elif opt_type == STROPT: assert(isinstance(opt_default, six.string_types)) print('#%s=%s' % (opt_name, _sanitize_default(opt_name, opt_default))) elif opt_type == BOOLOPT: assert(isinstance(opt_default, bool)) print('#%s=%s' % (opt_name, str(opt_default).lower())) elif opt_type == INTOPT: assert(isinstance(opt_default, int) and not isinstance(opt_default, bool)) print('#%s=%s' % (opt_name, opt_default)) elif opt_type == FLOATOPT: assert(isinstance(opt_default, float)) print('#%s=%s' % (opt_name, opt_default)) elif opt_type == LISTOPT: assert(isinstance(opt_default, list)) print('#%s=%s' % (opt_name, ','.join(opt_default))) elif opt_type == DICTOPT: assert(isinstance(opt_default, dict)) opt_default_strlist = [str(key) + ':' + str(value) for (key, value) in opt_default.items()] print('#%s=%s' % (opt_name, ','.join(opt_default_strlist))) elif opt_type == MULTISTROPT: assert(isinstance(opt_default, list)) if not opt_default: opt_default = [''] for default in opt_default: print('#%s=%s' % (opt_name, default)) print('') except Exception: sys.stderr.write('Error in option "%s"\n' % opt_name) sys.exit(1) def main(): generate(sys.argv[1:]) if __name__ == '__main__': main()<|fim▁end|>
opt_type = None try: opt_type = OPTION_REGEX.search(str(type(opt))).group(0) except (ValueError, AttributeError) as err:
<|file_name|>SubscriptionLink.java<|end_file_name|><|fim▁begin|>/* * Copyright 2014 - 2016 Real Logic Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.aeron.driver; import io.aeron.driver.media.ReceiveChannelEndpoint; import io.aeron.driver.media.UdpChannel; import org.agrona.concurrent.status.ReadablePosition; import java.util.IdentityHashMap; import java.util.Map; /** * Subscription registration from a client used for liveness tracking */ public class SubscriptionLink implements DriverManagedResource { private final long registrationId; private final long clientLivenessTimeoutNs; private final int streamId; private final boolean isReliable; private final String channelUri; private final ReceiveChannelEndpoint channelEndpoint; private final AeronClient aeronClient; private final Map<PublicationImage, ReadablePosition> positionByImageMap = new IdentityHashMap<>(); private final IpcPublication ipcPublication; private final ReadablePosition ipcPublicationSubscriberPosition; private final UdpChannel spiedChannel; private NetworkPublication spiedPublication = null; private ReadablePosition spiedPosition = null; private boolean reachedEndOfLife = false; public SubscriptionLink( final long registrationId, final ReceiveChannelEndpoint channelEndpoint, final int streamId, final String channelUri, final AeronClient aeronClient, final long clientLivenessTimeoutNs, final boolean isReliable) { this.registrationId = registrationId; this.channelEndpoint = channelEndpoint; this.streamId = streamId; this.channelUri = channelUri; this.aeronClient = aeronClient; this.ipcPublication = null; this.ipcPublicationSubscriberPosition = null; this.spiedChannel = null; this.clientLivenessTimeoutNs = clientLivenessTimeoutNs; this.isReliable = isReliable; } public SubscriptionLink( final long registrationId, final int streamId, final String channelUri, final IpcPublication ipcPublication, final ReadablePosition subscriberPosition, final AeronClient aeronClient, final long clientLivenessTimeoutNs) { this.registrationId = registrationId; this.channelEndpoint = null; // will prevent matches between PublicationImages and IpcPublications this.streamId = streamId; this.channelUri = channelUri; this.aeronClient = aeronClient; this.ipcPublication = ipcPublication; ipcPublication.incRef(); this.ipcPublicationSubscriberPosition = subscriberPosition; this.spiedChannel = null; this.clientLivenessTimeoutNs = clientLivenessTimeoutNs; this.isReliable = true; } public SubscriptionLink( final long registrationId, final UdpChannel spiedChannel, final int streamId, final String channelUri, final AeronClient aeronClient, final long clientLivenessTimeoutNs) { this.registrationId = registrationId; this.channelEndpoint = null; this.streamId = streamId; this.channelUri = channelUri; this.aeronClient = aeronClient; this.ipcPublication = null; this.ipcPublicationSubscriberPosition = null; this.spiedChannel = spiedChannel; this.clientLivenessTimeoutNs = clientLivenessTimeoutNs; this.isReliable = true; } public long registrationId() { return registrationId; } public ReceiveChannelEndpoint channelEndpoint() { return channelEndpoint; } public int streamId() { return streamId; } public String channelUri() { return channelUri; } public boolean isReliable() { return isReliable; } public boolean matches(final ReceiveChannelEndpoint channelEndpoint, final int streamId) { return channelEndpoint == this.channelEndpoint && streamId == this.streamId; } public boolean matches(final NetworkPublication publication) { boolean result = false; if (null != spiedChannel) { result = streamId == publication.streamId() && publication.sendChannelEndpoint().udpChannel().canonicalForm().equals(spiedChannel.canonicalForm()); } return result; } public void addImage(final PublicationImage image, final ReadablePosition position) { positionByImageMap.put(image, position); } public void removeImage(final PublicationImage image) { positionByImageMap.remove(image); } public void addSpiedPublication(final NetworkPublication publication, final ReadablePosition position) { spiedPublication = publication; spiedPosition = position; } public void removeSpiedPublication() { spiedPublication = null; spiedPosition = null; }<|fim▁hole|> public void close() { positionByImageMap.forEach(PublicationImage::removeSubscriber); if (null != ipcPublication) { ipcPublication.removeSubscription(ipcPublicationSubscriberPosition); ipcPublication.decRef(); } else if (null != spiedPublication) { spiedPublication.removeSpyPosition(spiedPosition); } } public void onTimeEvent(final long time, final DriverConductor conductor) { if (time > (aeronClient.timeOfLastKeepalive() + clientLivenessTimeoutNs)) { reachedEndOfLife = true; conductor.cleanupSubscriptionLink(SubscriptionLink.this); } } public boolean hasReachedEndOfLife() { return reachedEndOfLife; } public void timeOfLastStateChange(final long time) { // not set this way } public long timeOfLastStateChange() { return aeronClient.timeOfLastKeepalive(); } public void delete() { close(); } }<|fim▁end|>
<|file_name|>data_set.py<|end_file_name|><|fim▁begin|># Copyright (C) University of Tennessee Health Science Center, Memphis, TN. # # This program is free software: you can redistribute it and/or modify it # under the terms of the GNU Affero General Public License # as published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero General Public License for more details. # # This program is available from Source Forge: at GeneNetwork Project # (sourceforge.net/projects/genenetwork/). # # Contact Drs. Robert W. Williams and Xiaodong Zhou (2010) # at [email protected] and [email protected] # # This module is used by GeneNetwork project (www.genenetwork.org) from __future__ import absolute_import, print_function, division import os import math import string import collections import codecs import json import gzip import cPickle as pickle import itertools from operator import itemgetter from redis import Redis Redis = Redis() from flask import Flask, g import reaper from base import webqtlConfig from base import species from dbFunction import webqtlDatabaseFunction from utility import webqtlUtil from utility.benchmark import Bench from utility import chunks from utility.tools import locate, locate_ignore_error from maintenance import get_group_samplelists from MySQLdb import escape_string as escape from pprint import pformat as pf # Used by create_database to instantiate objects # Each subclass will add to this DS_NAME_MAP = {} def create_dataset(dataset_name, dataset_type = None): if not dataset_type: dataset_type = Dataset_Getter(dataset_name) print("dataset_type is:", dataset_type) dataset_ob = DS_NAME_MAP[dataset_type] dataset_class = globals()[dataset_ob] return dataset_class(dataset_name) class Dataset_Types(object): def __init__(self): self.datasets = {} file_name = "wqflask/static/new/javascript/dataset_menu_structure.json" with open(file_name, 'r') as fh: data = json.load(fh) print("*" * 70) for species in data['datasets']: for group in data['datasets'][species]: for dataset_type in data['datasets'][species][group]: for dataset in data['datasets'][species][group][dataset_type]: short_dataset_name = dataset[1] if dataset_type == "Phenotypes": new_type = "Publish" elif dataset_type == "Genotypes": new_type = "Geno" else: new_type = "ProbeSet" self.datasets[short_dataset_name] = new_type def __call__(self, name): return self.datasets[name] # Do the intensive work at startup one time only Dataset_Getter = Dataset_Types() def create_datasets_list(): key = "all_datasets" result = Redis.get(key) if result: print("Cache hit!!!") datasets = pickle.loads(result) else: datasets = list() with Bench("Creating DataSets object"): type_dict = {'Publish': 'PublishFreeze', 'ProbeSet': 'ProbeSetFreeze', 'Geno': 'GenoFreeze'} for dataset_type in type_dict: query = "SELECT Name FROM {}".format(type_dict[dataset_type]) for result in g.db.execute(query).fetchall(): #The query at the beginning of this function isn't necessary here, but still would #rather just reuse it #print("type: {}\tname: {}".format(dataset_type, result.Name)) dataset = create_dataset(result.Name, dataset_type) datasets.append(dataset) Redis.set(key, pickle.dumps(datasets, pickle.HIGHEST_PROTOCOL)) Redis.expire(key, 60*60) return datasets def create_in_clause(items): """Create an in clause for mysql""" in_clause = ', '.join("'{}'".format(x) for x in mescape(*items)) in_clause = '( {} )'.format(in_clause) return in_clause def mescape(*items): """Multiple escape""" escaped = [escape(str(item)) for item in items] #print("escaped is:", escaped) return escaped class Markers(object): """Todo: Build in cacheing so it saves us reading the same file more than once""" def __init__(self, name): json_data_fh = open(locate(name + '.json','genotype/json')) try: markers = json.load(json_data_fh) except: markers = [] for marker in markers: if (marker['chr'] != "X") and (marker['chr'] != "Y"): marker['chr'] = int(marker['chr']) # print("Mb:", marker['Mb']) marker['Mb'] = float(marker['Mb']) self.markers = markers #print("self.markers:", self.markers) def add_pvalues(self, p_values): print("length of self.markers:", len(self.markers)) print("length of p_values:", len(p_values)) if type(p_values) is list: # THIS IS only needed for the case when we are limiting the number of p-values calculated #if len(self.markers) > len(p_values): # self.markers = self.markers[:len(p_values)] for marker, p_value in itertools.izip(self.markers, p_values): if not p_value: continue marker['p_value'] = float(p_value) if math.isnan(marker['p_value']) or marker['p_value'] <= 0: marker['lod_score'] = 0 marker['lrs_value'] = 0 else: marker['lod_score'] = -math.log10(marker['p_value']) #Using -log(p) for the LRS; need to ask Rob how he wants to get LRS from p-values marker['lrs_value'] = -math.log10(marker['p_value']) * 4.61 elif type(p_values) is dict: filtered_markers = [] for marker in self.markers: #print("marker[name]", marker['name']) #print("p_values:", p_values) if marker['name'] in p_values: #print("marker {} IS in p_values".format(i)) marker['p_value'] = p_values[marker['name']] if math.isnan(marker['p_value']) or (marker['p_value'] <= 0): marker['lod_score'] = 0 marker['lrs_value'] = 0 else: marker['lod_score'] = -math.log10(marker['p_value']) #Using -log(p) for the LRS; need to ask Rob how he wants to get LRS from p-values marker['lrs_value'] = -math.log10(marker['p_value']) * 4.61 filtered_markers.append(marker) #else: #print("marker {} NOT in p_values".format(i)) #self.markers.remove(marker) #del self.markers[i] self.markers = filtered_markers class HumanMarkers(Markers): def __init__(self, name, specified_markers = []): marker_data_fh = open(locate('genotype') + '/' + name + '.bim') self.markers = [] for line in marker_data_fh: splat = line.strip().split() #print("splat:", splat) if len(specified_markers) > 0: if splat[1] in specified_markers: marker = {} marker['chr'] = int(splat[0]) marker['name'] = splat[1] marker['Mb'] = float(splat[3]) / 1000000 else: continue else: marker = {} marker['chr'] = int(splat[0]) marker['name'] = splat[1] marker['Mb'] = float(splat[3]) / 1000000 self.markers.append(marker) #print("markers is: ", pf(self.markers)) def add_pvalues(self, p_values): super(HumanMarkers, self).add_pvalues(p_values) class DatasetGroup(object): """ Each group has multiple datasets; each species has multiple groups. For example, Mouse has multiple groups (BXD, BXA, etc), and each group has multiple datasets associated with it. """ def __init__(self, dataset): """This sets self.group and self.group_id""" print("DATASET NAME2:", dataset.name) self.name, self.id = g.db.execute(dataset.query_for_group).fetchone() if self.name == 'BXD300': self.name = "BXD" self.f1list = None self.parlist = None self.get_f1_parent_strains() #print("parents/f1s: {}:{}".format(self.parlist, self.f1list)) self.species = webqtlDatabaseFunction.retrieve_species(self.name) self.incparentsf1 = False self.allsamples = None self._datasets = None def get_specified_markers(self, markers = []): self.markers = HumanMarkers(self.name, markers) def get_markers(self): #print("self.species is:", self.species) if self.species == "human": marker_class = HumanMarkers else: marker_class = Markers self.markers = marker_class(self.name) def datasets(self): key = "group_dataset_menu:v2:" + self.name print("key is2:", key) dataset_menu = [] print("[tape4] webqtlConfig.PUBLICTHRESH:", webqtlConfig.PUBLICTHRESH) print("[tape4] type webqtlConfig.PUBLICTHRESH:", type(webqtlConfig.PUBLICTHRESH)) results = g.db.execute(''' (SELECT '#PublishFreeze',PublishFreeze.FullName,PublishFreeze.Name FROM PublishFreeze,InbredSet WHERE PublishFreeze.InbredSetId = InbredSet.Id and InbredSet.Name = %s and PublishFreeze.public > %s) UNION (SELECT '#GenoFreeze',GenoFreeze.FullName,GenoFreeze.Name FROM GenoFreeze, InbredSet WHERE GenoFreeze.InbredSetId = InbredSet.Id and InbredSet.Name = %s and GenoFreeze.public > %s) UNION (SELECT Tissue.Name, ProbeSetFreeze.FullName,ProbeSetFreeze.Name FROM ProbeSetFreeze, ProbeFreeze, InbredSet, Tissue WHERE ProbeSetFreeze.ProbeFreezeId = ProbeFreeze.Id and ProbeFreeze.TissueId = Tissue.Id and ProbeFreeze.InbredSetId = InbredSet.Id and InbredSet.Name like %s and ProbeSetFreeze.public > %s ORDER BY Tissue.Name, ProbeSetFreeze.CreateTime desc, ProbeSetFreeze.AvgId) ''', (self.name, webqtlConfig.PUBLICTHRESH, self.name, webqtlConfig.PUBLICTHRESH, "%" + self.name + "%", webqtlConfig.PUBLICTHRESH)) the_results = results.fetchall() #for tissue_name, dataset in itertools.groupby(the_results, itemgetter(0)): for dataset_item in the_results: tissue_name = dataset_item[0] dataset = dataset_item[1] dataset_short = dataset_item[2] if tissue_name in ['#PublishFreeze', '#GenoFreeze']: dataset_menu.append(dict(tissue=None, datasets=[(dataset, dataset_short)])) else: dataset_sub_menu = [item[1:] for item in dataset] tissue_already_exists = False tissue_position = None for i, tissue_dict in enumerate(dataset_menu): if tissue_dict['tissue'] == tissue_name: tissue_already_exists = True tissue_position = i break if tissue_already_exists: print("dataset_menu:", dataset_menu[i]['datasets']) dataset_menu[i]['datasets'].append((dataset, dataset_short)) else: dataset_menu.append(dict(tissue=tissue_name, datasets=[(dataset, dataset_short)])) Redis.set(key, pickle.dumps(dataset_menu, pickle.HIGHEST_PROTOCOL)) Redis.expire(key, 60*5) self._datasets = dataset_menu return self._datasets def get_f1_parent_strains(self): try: # NL, 07/27/2010. ParInfo has been moved from webqtlForm.py to webqtlUtil.py; f1, f12, maternal, paternal = webqtlUtil.ParInfo[self.name] except KeyError: f1 = f12 = maternal = paternal = None if f1 and f12: self.f1list = [f1, f12] if maternal and paternal: self.parlist = [maternal, paternal] def get_samplelist(self): key = "samplelist:v2:" + self.name print("key is:", key) with Bench("Loading cache"): result = Redis.get(key) if result: print("Sample List Cache hit!!!") print("Before unjsonifying {}: {}".format(type(result), result)) self.samplelist = json.loads(result) print(" type: ", type(self.samplelist)) print(" self.samplelist: ", self.samplelist) else: print("Cache not hit for", self.name) genotype_fn = locate_ignore_error(self.name+".geno",'genotype') mapping_fn = locate_ignore_error(self.name+".fam",'mapping') if mapping_fn: self.samplelist = get_group_samplelists.get_samplelist("plink", mapping_fn) elif genotype_fn: self.samplelist = get_group_samplelists.get_samplelist("geno", genotype_fn) else: self.samplelist = None print("Sample list: ",self.samplelist) Redis.set(key, json.dumps(self.samplelist)) Redis.expire(key, 60*5) def all_samples_ordered(self): result = [] lists = (self.parlist, self.f1list, self.samplelist) [result.extend(l) for l in lists if l] return result def read_genotype_file(self): '''Read genotype from .geno file instead of database''' #genotype_1 is Dataset Object without parents and f1 #genotype_2 is Dataset Object with parents and f1 (not for intercross) genotype_1 = reaper.Dataset() # reaper barfs on unicode filenames, so here we ensure it's a string full_filename = str(locate(self.name+'.geno','genotype')) genotype_1.read(full_filename) if genotype_1.type == "group" and self.parlist: genotype_2 = genotype_1.add(Mat=self.parlist[0], Pat=self.parlist[1]) #, F1=_f1) else: genotype_2 = genotype_1 #determine default genotype object if self.incparentsf1 and genotype_1.type != "intercross": genotype = genotype_2 else: self.incparentsf1 = 0 genotype = genotype_1 self.samplelist = list(genotype.prgy) return genotype class DataSet(object): """ DataSet class defines a dataset in webqtl, can be either Microarray, Published phenotype, genotype, or user input dataset(temp) """ def __init__(self, name): assert name, "Need a name" self.name = name self.id = None self.shortname = None self.fullname = None self.type = None self.setup() self.check_confidentiality() self.retrieve_other_names() self.group = DatasetGroup(self) # sets self.group and self.group_id and gets genotype self.group.get_samplelist() self.species = species.TheSpecies(self) def get_desc(self): """Gets overridden later, at least for Temp...used by trait's get_given_name""" return None # Delete this eventually @property def riset(): Weve_Renamed_This_As_Group def retrieve_other_names(self): """ If the data set name parameter is not found in the 'Name' field of the data set table, check if it is actually the FullName or ShortName instead. This is not meant to retrieve the data set info if no name at all is passed. """ try: if self.type == "ProbeSet": query_args = tuple(escape(x) for x in ( str(webqtlConfig.PUBLICTHRESH), self.name, self.name, self.name)) self.id, self.name, self.fullname, self.shortname, self.tissue = g.db.execute(""" SELECT ProbeSetFreeze.Id, ProbeSetFreeze.Name, ProbeSetFreeze.FullName, ProbeSetFreeze.ShortName, Tissue.Name FROM ProbeSetFreeze, ProbeFreeze, Tissue WHERE ProbeSetFreeze.public > %s AND ProbeSetFreeze.ProbeFreezeId = ProbeFreeze.Id AND ProbeFreeze.TissueId = Tissue.Id AND (ProbeSetFreeze.Name = '%s' OR ProbeSetFreeze.FullName = '%s' OR ProbeSetFreeze.ShortName = '%s') """ % (query_args)).fetchone() else: query_args = tuple(escape(x) for x in ( (self.type + "Freeze"), str(webqtlConfig.PUBLICTHRESH), self.name, self.name, self.name)) self.tissue = "N/A" self.id, self.name, self.fullname, self.shortname = g.db.execute(""" SELECT Id, Name, FullName, ShortName FROM %s WHERE public > %s AND (Name = '%s' OR FullName = '%s' OR ShortName = '%s') """ % (query_args)).fetchone() except TypeError: print("Dataset {} is not yet available in GeneNetwork.".format(self.name)) pass def get_trait_data(self, sample_list=None): if sample_list: self.samplelist = sample_list else: self.samplelist = self.group.samplelist if self.group.parlist != None and self.group.f1list != None: if (self.group.parlist + self.group.f1list) in self.samplelist: self.samplelist += self.group.parlist + self.group.f1list query = """ SELECT Strain.Name, Strain.Id FROM Strain, Species WHERE Strain.Name IN {} and Strain.SpeciesId=Species.Id and Species.name = '{}' """.format(create_in_clause(self.samplelist), *mescape(self.group.species)) results = dict(g.db.execute(query).fetchall()) sample_ids = [results[item] for item in self.samplelist] # MySQL limits the number of tables that can be used in a join to 61, # so we break the sample ids into smaller chunks # Postgres doesn't have that limit, so we can get rid of this after we transition chunk_size = 50 number_chunks = int(math.ceil(len(sample_ids) / chunk_size)) trait_sample_data = [] for sample_ids_step in chunks.divide_into_chunks(sample_ids, number_chunks): if self.type == "Publish": dataset_type = "Phenotype" else: dataset_type = self.type temp = ['T%s.value' % item for item in sample_ids_step] if self.type == "Publish": query = "SELECT {}XRef.Id,".format(escape(self.type)) else: query = "SELECT {}.Name,".format(escape(dataset_type)) data_start_pos = 1 query += string.join(temp, ', ') query += ' FROM ({}, {}XRef, {}Freeze) '.format(*mescape(dataset_type, self.type, self.type)) for item in sample_ids_step: query += """ left join {}Data as T{} on T{}.Id = {}XRef.DataId and T{}.StrainId={}\n """.format(*mescape(self.type, item, item, self.type, item, item)) if self.type == "Publish": query += """ WHERE {}XRef.InbredSetId = {}Freeze.InbredSetId and {}Freeze.Name = '{}' and {}.Id = {}XRef.{}Id order by {}.Id """.format(*mescape(self.type, self.type, self.type, self.name, dataset_type, self.type, dataset_type, dataset_type)) else: query += """ WHERE {}XRef.{}FreezeId = {}Freeze.Id and {}Freeze.Name = '{}' and {}.Id = {}XRef.{}Id order by {}.Id """.format(*mescape(self.type, self.type, self.type, self.type, self.name, dataset_type, self.type, self.type, dataset_type)) #print("trait data query: ", query) results = g.db.execute(query).fetchall() #print("query results:", results) trait_sample_data.append(results) trait_count = len(trait_sample_data[0]) self.trait_data = collections.defaultdict(list) # put all of the separate data together into a dictionary where the keys are # trait names and values are lists of sample values for trait_counter in range(trait_count): trait_name = trait_sample_data[0][trait_counter][0] for chunk_counter in range(int(number_chunks)): self.trait_data[trait_name] += ( trait_sample_data[chunk_counter][trait_counter][data_start_pos:]) class PhenotypeDataSet(DataSet): DS_NAME_MAP['Publish'] = 'PhenotypeDataSet' def setup(self): print("IS A PHENOTYPEDATASET") # Fields in the database table self.search_fields = ['Phenotype.Post_publication_description', 'Phenotype.Pre_publication_description', 'Phenotype.Pre_publication_abbreviation', 'Phenotype.Post_publication_abbreviation', 'Phenotype.Lab_code', 'Publication.PubMed_ID', 'Publication.Abstract', 'Publication.Title', 'Publication.Authors', 'PublishXRef.Id'] # Figure out what display_fields is self.display_fields = ['name', 'pubmed_id', 'pre_publication_description', 'post_publication_description', 'original_description', 'pre_publication_abbreviation', 'post_publication_abbreviation', 'lab_code', 'submitter', 'owner', 'authorized_users', 'authors', 'title', 'abstract', 'journal', 'volume', 'pages', 'month', 'year', 'sequence', 'units', 'comments'] # Fields displayed in the search results table header self.header_fields = ['Index', 'Record', 'Description', 'Authors', 'Year', 'Max LRS', 'Max LRS Location', 'Additive Effect'] self.type = 'Publish' self.query_for_group = ''' SELECT InbredSet.Name, InbredSet.Id FROM InbredSet, PublishFreeze WHERE PublishFreeze.InbredSetId = InbredSet.Id AND PublishFreeze.Name = "%s" ''' % escape(self.name) def check_confidentiality(self): # (Urgently?) Need to write this pass def get_trait_list(self): query = """ select PublishXRef.Id from PublishXRef, PublishFreeze where PublishFreeze.InbredSetId=PublishXRef.InbredSetId and PublishFreeze.Id = {} """.format(escape(str(self.id))) results = g.db.execute(query).fetchall() trait_data = {} for trait in results: trait_data[trait[0]] = self.retrieve_sample_data(trait[0]) return trait_data def get_trait_info(self, trait_list, species = ''): for this_trait in trait_list: if not this_trait.haveinfo: this_trait.retrieve_info(get_qtl_info=True) description = this_trait.post_publication_description #If the dataset is confidential and the user has access to confidential #phenotype traits, then display the pre-publication description instead #of the post-publication description if this_trait.confidential: this_trait.description_display = "" continue # for now if not webqtlUtil.hasAccessToConfidentialPhenotypeTrait( privilege=self.privilege, userName=self.userName, authorized_users=this_trait.authorized_users): description = this_trait.pre_publication_description if len(description) > 0: this_trait.description_display = description.strip() else: this_trait.description_display = "" if not this_trait.year.isdigit(): this_trait.pubmed_text = "N/A" else: this_trait.pubmed_text = this_trait.year if this_trait.pubmed_id: this_trait.pubmed_link = webqtlConfig.PUBMEDLINK_URL % this_trait.pubmed_id #LRS and its location this_trait.LRS_score_repr = "N/A" this_trait.LRS_score_value = 0 this_trait.LRS_location_repr = "N/A" this_trait.LRS_location_value = 1000000 if this_trait.lrs: result = g.db.execute(""" select Geno.Chr, Geno.Mb from Geno, Species where Species.Name = %s and Geno.Name = %s and Geno.SpeciesId = Species.Id """, (species, this_trait.locus)).fetchone() if result: if result[0] and result[1]: LRS_Chr = result[0] LRS_Mb = result[1] #XZ: LRS_location_value is used for sorting try: LRS_location_value = int(LRS_Chr)*1000 + float(LRS_Mb) except: if LRS_Chr.upper() == 'X': LRS_location_value = 20*1000 + float(LRS_Mb) else: LRS_location_value = ord(str(LRS_chr).upper()[0])*1000 + float(LRS_Mb) this_trait.LRS_score_repr = LRS_score_repr = '%3.1f' % this_trait.lrs this_trait.LRS_score_value = LRS_score_value = this_trait.lrs this_trait.LRS_location_repr = LRS_location_repr = 'Chr%s: %.6f' % (LRS_Chr, float(LRS_Mb)) def retrieve_sample_data(self, trait): query = """ SELECT Strain.Name, PublishData.value, PublishSE.error, NStrain.count FROM (PublishData, Strain, PublishXRef, PublishFreeze) left join PublishSE on (PublishSE.DataId = PublishData.Id AND PublishSE.StrainId = PublishData.StrainId) left join NStrain on (NStrain.DataId = PublishData.Id AND NStrain.StrainId = PublishData.StrainId) WHERE PublishXRef.InbredSetId = PublishFreeze.InbredSetId AND PublishData.Id = PublishXRef.DataId AND PublishXRef.Id = %s AND PublishFreeze.Id = %s AND PublishData.StrainId = Strain.Id Order BY Strain.Name """ results = g.db.execute(query, (trait, self.id)).fetchall() return results class GenotypeDataSet(DataSet): DS_NAME_MAP['Geno'] = 'GenotypeDataSet' def setup(self): # Fields in the database table self.search_fields = ['Name', 'Chr'] # Find out what display_fields is self.display_fields = ['name', 'chr', 'mb', 'source2', 'sequence'] # Fields displayed in the search results table header self.header_fields = ['Index', 'ID', 'Location'] # Todo: Obsolete or rename this field self.type = 'Geno' self.query_for_group = ''' SELECT InbredSet.Name, InbredSet.Id FROM InbredSet, GenoFreeze WHERE GenoFreeze.InbredSetId = InbredSet.Id AND GenoFreeze.Name = "%s" ''' % escape(self.name) def check_confidentiality(self): return geno_mrna_confidentiality(self) def get_trait_list(self): query = """ select Geno.Name from Geno, GenoXRef where GenoXRef.GenoId = Geno.Id and GenoFreezeId = {} """.format(escape(str(self.id))) results = g.db.execute(query).fetchall() trait_data = {} for trait in results: trait_data[trait[0]] = self.retrieve_sample_data(trait[0]) return trait_data def get_trait_info(self, trait_list, species=None): for this_trait in trait_list: if not this_trait.haveinfo: this_trait.retrieveInfo() #XZ: trait_location_value is used for sorting trait_location_repr = 'N/A' trait_location_value = 1000000 if this_trait.chr and this_trait.mb: try: trait_location_value = int(this_trait.chr)*1000 + this_trait.mb except: if this_trait.chr.upper() == 'X': trait_location_value = 20*1000 + this_trait.mb else: trait_location_value = ord(str(this_trait.chr).upper()[0])*1000 + this_trait.mb <|fim▁hole|> def retrieve_sample_data(self, trait): query = """ SELECT Strain.Name, GenoData.value, GenoSE.error, GenoData.Id FROM (GenoData, GenoFreeze, Strain, Geno, GenoXRef) left join GenoSE on (GenoSE.DataId = GenoData.Id AND GenoSE.StrainId = GenoData.StrainId) WHERE Geno.SpeciesId = %s AND Geno.Name = %s AND GenoXRef.GenoId = Geno.Id AND GenoXRef.GenoFreezeId = GenoFreeze.Id AND GenoFreeze.Name = %s AND GenoXRef.DataId = GenoData.Id AND GenoData.StrainId = Strain.Id Order BY Strain.Name """ results = g.db.execute(query, (webqtlDatabaseFunction.retrieve_species_id(self.group.name), trait, self.name)).fetchall() return results class MrnaAssayDataSet(DataSet): ''' An mRNA Assay is a quantitative assessment (assay) associated with an mRNA trait This used to be called ProbeSet, but that term only refers specifically to the Affymetrix platform and is far too specific. ''' DS_NAME_MAP['ProbeSet'] = 'MrnaAssayDataSet' def setup(self): # Fields in the database table self.search_fields = ['Name', 'Description', 'Probe_Target_Description', 'Symbol', 'Alias', 'GenbankId', 'UniGeneId', 'RefSeq_TranscriptId'] # Find out what display_fields is self.display_fields = ['name', 'symbol', 'description', 'probe_target_description', 'chr', 'mb', 'alias', 'geneid', 'genbankid', 'unigeneid', 'omim', 'refseq_transcriptid', 'blatseq', 'targetseq', 'chipid', 'comments', 'strand_probe', 'strand_gene', 'probe_set_target_region', 'probe_set_specificity', 'probe_set_blat_score', 'probe_set_blat_mb_start', 'probe_set_blat_mb_end', 'probe_set_strand', 'probe_set_note_by_rw', 'flag'] # Fields displayed in the search results table header self.header_fields = ['Index', 'Record', 'Symbol', 'Description', 'Location', 'Mean', 'Max LRS', 'Max LRS Location', 'Additive Effect'] # Todo: Obsolete or rename this field self.type = 'ProbeSet' self.query_for_group = ''' SELECT InbredSet.Name, InbredSet.Id FROM InbredSet, ProbeSetFreeze, ProbeFreeze WHERE ProbeFreeze.InbredSetId = InbredSet.Id AND ProbeFreeze.Id = ProbeSetFreeze.ProbeFreezeId AND ProbeSetFreeze.Name = "%s" ''' % escape(self.name) def check_confidentiality(self): return geno_mrna_confidentiality(self) def get_trait_list_1(self): query = """ select ProbeSet.Name from ProbeSet, ProbeSetXRef where ProbeSetXRef.ProbeSetId = ProbeSet.Id and ProbeSetFreezeId = {} """.format(escape(str(self.id))) results = g.db.execute(query).fetchall() trait_data = {} for trait in results: print("Retrieving sample_data for ", trait[0]) trait_data[trait[0]] = self.retrieve_sample_data(trait[0]) return trait_data def get_trait_info(self, trait_list=None, species=''): # Note: setting trait_list to [] is probably not a great idea. if not trait_list: trait_list = [] for this_trait in trait_list: if not this_trait.haveinfo: this_trait.retrieveInfo(QTL=1) if not this_trait.symbol: this_trait.symbol = "N/A" #XZ, 12/08/2008: description #XZ, 06/05/2009: Rob asked to add probe target description description_string = unicode(str(this_trait.description).strip(codecs.BOM_UTF8), 'utf-8') target_string = unicode(str(this_trait.probe_target_description).strip(codecs.BOM_UTF8), 'utf-8') if len(description_string) > 1 and description_string != 'None': description_display = description_string else: description_display = this_trait.symbol if (len(description_display) > 1 and description_display != 'N/A' and len(target_string) > 1 and target_string != 'None'): description_display = description_display + '; ' + target_string.strip() # Save it for the jinja2 template this_trait.description_display = description_display #XZ: trait_location_value is used for sorting trait_location_repr = 'N/A' trait_location_value = 1000000 if this_trait.chr and this_trait.mb: #Checks if the chromosome number can be cast to an int (i.e. isn't "X" or "Y") #This is so we can convert the location to a number used for sorting trait_location_value = self.convert_location_to_value(this_trait.chr, this_trait.mb) #try: # trait_location_value = int(this_trait.chr)*1000 + this_trait.mb #except ValueError: # if this_trait.chr.upper() == 'X': # trait_location_value = 20*1000 + this_trait.mb # else: # trait_location_value = (ord(str(this_trait.chr).upper()[0])*1000 + # this_trait.mb) #ZS: Put this in function currently called "convert_location_to_value" this_trait.location_repr = 'Chr%s: %.6f' % (this_trait.chr, float(this_trait.mb)) this_trait.location_value = trait_location_value #Get mean expression value query = ( """select ProbeSetXRef.mean from ProbeSetXRef, ProbeSet where ProbeSetXRef.ProbeSetFreezeId = %s and ProbeSet.Id = ProbeSetXRef.ProbeSetId and ProbeSet.Name = '%s' """ % (escape(str(this_trait.dataset.id)), escape(this_trait.name))) #print("query is:", pf(query)) result = g.db.execute(query).fetchone() mean = result[0] if result else 0 if mean: this_trait.mean = "%2.3f" % mean #LRS and its location this_trait.LRS_score_repr = 'N/A' this_trait.LRS_score_value = 0 this_trait.LRS_location_repr = 'N/A' this_trait.LRS_location_value = 1000000 #Max LRS and its Locus location if this_trait.lrs and this_trait.locus: query = """ select Geno.Chr, Geno.Mb from Geno, Species where Species.Name = '{}' and Geno.Name = '{}' and Geno.SpeciesId = Species.Id """.format(species, this_trait.locus) result = g.db.execute(query).fetchone() if result: lrs_chr, lrs_mb = result #XZ: LRS_location_value is used for sorting lrs_location_value = self.convert_location_to_value(lrs_chr, lrs_mb) this_trait.LRS_score_repr = '%3.1f' % this_trait.lrs this_trait.LRS_score_value = this_trait.lrs this_trait.LRS_location_repr = 'Chr%s: %.6f' % (lrs_chr, float(lrs_mb)) def convert_location_to_value(self, chromosome, mb): try: location_value = int(chromosome)*1000 + float(mb) except ValueError: if chromosome.upper() == 'X': location_value = 20*1000 + float(mb) else: location_value = (ord(str(chromosome).upper()[0])*1000 + float(mb)) return location_value def get_sequence(self): query = """ SELECT ProbeSet.BlatSeq FROM ProbeSet, ProbeSetFreeze, ProbeSetXRef WHERE ProbeSet.Id=ProbeSetXRef.ProbeSetId and ProbeSetFreeze.Id = ProbeSetXRef.ProbSetFreezeId and ProbeSet.Name = %s ProbeSetFreeze.Name = %s """ % (escape(self.name), escape(self.dataset.name)) results = g.db.execute(query).fetchone() return results[0] def retrieve_sample_data(self, trait): query = """ SELECT Strain.Name, ProbeSetData.value, ProbeSetSE.error, ProbeSetData.Id FROM (ProbeSetData, ProbeSetFreeze, Strain, ProbeSet, ProbeSetXRef) left join ProbeSetSE on (ProbeSetSE.DataId = ProbeSetData.Id AND ProbeSetSE.StrainId = ProbeSetData.StrainId) WHERE ProbeSet.Name = '%s' AND ProbeSetXRef.ProbeSetId = ProbeSet.Id AND ProbeSetXRef.ProbeSetFreezeId = ProbeSetFreeze.Id AND ProbeSetFreeze.Name = '%s' AND ProbeSetXRef.DataId = ProbeSetData.Id AND ProbeSetData.StrainId = Strain.Id Order BY Strain.Name """ % (escape(trait), escape(self.name)) results = g.db.execute(query).fetchall() #print("RETRIEVED RESULTS HERE:", results) return results def retrieve_genes(self, column_name): query = """ select ProbeSet.Name, ProbeSet.%s from ProbeSet,ProbeSetXRef where ProbeSetXRef.ProbeSetFreezeId = %s and ProbeSetXRef.ProbeSetId=ProbeSet.Id; """ % (column_name, escape(str(self.id))) results = g.db.execute(query).fetchall() return dict(results) class TempDataSet(DataSet): '''Temporary user-generated data set''' def setup(self): self.search_fields = ['name', 'description'] self.display_fields = ['name', 'description'] self.header_fields = ['Name', 'Description'] self.type = 'Temp' # Need to double check later how these are used self.id = 1 self.fullname = 'Temporary Storage' self.shortname = 'Temp' @staticmethod def handle_pca(desc): if 'PCA' in desc: # Todo: Modernize below lines desc = desc[desc.rindex(':')+1:].strip() else: desc = desc[:desc.index('entered')].strip() return desc def get_desc(self): g.db.execute('SELECT description FROM Temp WHERE Name=%s', self.name) desc = g.db.fetchone()[0] desc = self.handle_pca(desc) return desc def get_group(self): self.cursor.execute(""" SELECT InbredSet.Name, InbredSet.Id FROM InbredSet, Temp WHERE Temp.InbredSetId = InbredSet.Id AND Temp.Name = "%s" """, self.name) self.group, self.group_id = self.cursor.fetchone() #return self.group def retrieve_sample_data(self, trait): query = """ SELECT Strain.Name, TempData.value, TempData.SE, TempData.NStrain, TempData.Id FROM TempData, Temp, Strain WHERE TempData.StrainId = Strain.Id AND TempData.Id = Temp.DataId AND Temp.name = '%s' Order BY Strain.Name """ % escape(trait.name) results = g.db.execute(query).fetchall() def geno_mrna_confidentiality(ob): dataset_table = ob.type + "Freeze" #print("dataset_table [%s]: %s" % (type(dataset_table), dataset_table)) query = '''SELECT Id, Name, FullName, confidentiality, AuthorisedUsers FROM %s WHERE Name = %%s''' % (dataset_table) result = g.db.execute(query, ob.name) (dataset_id, name, full_name, confidential, authorized_users) = result.fetchall()[0] if confidential: return True<|fim▁end|>
this_trait.location_repr = 'Chr%s: %.6f' % (this_trait.chr, float(this_trait.mb) ) this_trait.location_value = trait_location_value
<|file_name|>variadic.js<|end_file_name|><|fim▁begin|>/** * Generate a function that accepts a variable number of arguments as the last * function argument. * * @param {Function} fn * @return {Function} */ module.exports = function (fn) { var count = Math.max(fn.length - 1, 0); return function () { var args = new Array(count); var index = 0; // https://github.com/petkaantonov/bluebird/wiki/Optimization-killers#3-managing-arguments for (; index < count; index++) { args[index] = arguments[index]; } <|fim▁hole|> variadic.push(arguments[index]); } return fn.apply(this, args); }; };<|fim▁end|>
var variadic = args[count] = []; for (; index < arguments.length; index++) {
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Amazon.com, Inc. or its affiliates. # All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # from boto.regioninfo import RegionInfo, get_regions from boto.regioninfo import connect def regions(): """ Get all available regions for the AWS CloudHSM service. :rtype: list :return: A list of :class:`boto.regioninfo.RegionInfo` """ from boto.cloudhsm.layer1 import CloudHSMConnection return get_regions('cloudhsm', connection_cls=CloudHSMConnection) def connect_to_region(region_name, **kw_params): from boto.cloudhsm.layer1 import CloudHSMConnection return connect('cloudhsm', region_name, connection_cls=CloudHSMConnection,<|fim▁hole|><|fim▁end|>
**kw_params)
<|file_name|>people_result.cc<|end_file_name|><|fim▁begin|>// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/app_list/search/people/people_result.h" #include <vector> #include "base/bind.h" #include "base/memory/ref_counted.h" #include "base/strings/utf_string_conversions.h" #include "chrome/browser/profiles/profile.h" #include "chrome/browser/signin/profile_oauth2_token_service_factory.h" #include "chrome/browser/signin/signin_manager_factory.h" #include "chrome/browser/ui/app_list/search/common/url_icon_source.h" #include "chrome/browser/ui/app_list/search/people/person.h" #include "chrome/browser/ui/browser_navigator.h" #include "chrome/common/extensions/api/hangouts_private.h" #include "components/signin/core/browser/profile_oauth2_token_service.h" #include "components/signin/core/browser/signin_manager.h" #include "content/public/browser/user_metrics.h" #include "extensions/browser/event_router.h" #include "grit/generated_resources.h" #include "grit/theme_resources.h" #include "ui/base/l10n/l10n_util.h" #include "ui/base/resource/resource_bundle.h"<|fim▁hole|> namespace OnHangoutRequested = extensions::api::hangouts_private::OnHangoutRequested; using extensions::api::hangouts_private::User; using extensions::api::hangouts_private::HangoutRequest; namespace { const int kIconSize = 32; const char kImageSizePath[] = "s64-p/"; const char kEmailUrlPrefix[] = "mailto:"; const char* const kHangoutsExtensionIds[] = { "nckgahadagoaajjgafhacjanaoiihapd", "ljclpkphhpbpinifbeabbhlfddcpfdde", "ppleadejekpmccmnpjdimmlfljlkdfej", "eggnbpckecmjlblplehfpjjdhhidfdoj", "jfjjdfefebklmdbmenmlehlopoocnoeh", "knipolnnllmklapflnccelgolnpehhpl" }; // Add a query parameter to specify the size to fetch the image in. The // original profile image can be of an arbitrary size, we ask the server to // crop it to a square 64x64 using its smart cropping algorithm. GURL GetImageUrl(const GURL& url) { std::string image_filename = url.ExtractFileName(); if (image_filename.empty()) return url; return url.Resolve(kImageSizePath + image_filename); } } // namespace namespace app_list { PeopleResult::PeopleResult(Profile* profile, scoped_ptr<Person> person) : profile_(profile), person_(person.Pass()), weak_factory_(this) { set_id(person_->id); set_title(base::UTF8ToUTF16(person_->display_name)); set_relevance(person_->interaction_rank); set_details(base::UTF8ToUTF16(person_->email)); RefreshHangoutsExtensionId(); SetDefaultActions(); image_ = gfx::ImageSkia( new UrlIconSource(base::Bind(&PeopleResult::OnIconLoaded, weak_factory_.GetWeakPtr()), profile_->GetRequestContext(), GetImageUrl(person_->image_url), kIconSize, IDR_PROFILE_PICTURE_LOADING), gfx::Size(kIconSize, kIconSize)); SetIcon(image_); } PeopleResult::~PeopleResult() { } void PeopleResult::Open(int event_flags) { // Action 0 will always be our default action. InvokeAction(0, event_flags); } void PeopleResult::InvokeAction(int action_index, int event_flags) { if (hangouts_extension_id_.empty()) { // If the hangouts app is not available, the only option we are showing // to the user is 'Send Email'. SendEmail(); } else { switch (action_index) { case 0: OpenChat(); break; case 1: SendEmail(); break; default: LOG(ERROR) << "Invalid people search action: " << action_index; } } } scoped_ptr<ChromeSearchResult> PeopleResult::Duplicate() { return scoped_ptr<ChromeSearchResult>( new PeopleResult(profile_, person_->Duplicate().Pass())).Pass(); } void PeopleResult::OnIconLoaded() { // Remove the existing image reps since the icon data is loaded and they // need to be re-created. const std::vector<gfx::ImageSkiaRep>& image_reps = image_.image_reps(); for (size_t i = 0; i < image_reps.size(); ++i) image_.RemoveRepresentation(image_reps[i].scale()); SetIcon(image_); } void PeopleResult::SetDefaultActions() { Actions actions; ui::ResourceBundle& bundle = ui::ResourceBundle::GetSharedInstance(); if (!hangouts_extension_id_.empty()) { actions.push_back(Action( *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_CHAT), *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_CHAT_HOVER), *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_CHAT_PRESSED), l10n_util::GetStringUTF16(IDS_PEOPLE_SEARCH_ACTION_CHAT_TOOLTIP))); } actions.push_back(Action( *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_EMAIL), *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_EMAIL_HOVER), *bundle.GetImageSkiaNamed(IDR_PEOPLE_SEARCH_ACTION_EMAIL_PRESSED), l10n_util::GetStringUTF16(IDS_PEOPLE_SEARCH_ACTION_EMAIL_TOOLTIP))); SetActions(actions); } void PeopleResult::OpenChat() { HangoutRequest request; request.type = extensions::api::hangouts_private::HANGOUT_TYPE_CHAT; // from: the user this chat request is originating from. SigninManagerBase* signin_manager = SigninManagerFactory::GetInstance()->GetForProfile(profile_); DCHECK(signin_manager); request.from = signin_manager->GetAuthenticatedAccountId(); // to: list of users with whom to start this hangout is with. linked_ptr<User> target(new User()); target->id = person_->owner_id; request.to.push_back(target); scoped_ptr<extensions::Event> event( new extensions::Event(OnHangoutRequested::kEventName, OnHangoutRequested::Create(request))); // TODO(rkc): Change this once we remove the hangoutsPrivate API. // See crbug.com/306672 extensions::EventRouter::Get(profile_) ->DispatchEventToExtension(hangouts_extension_id_, event.Pass()); content::RecordAction(base::UserMetricsAction("PeopleSearch_OpenChat")); } void PeopleResult::SendEmail() { chrome::NavigateParams params(profile_, GURL(kEmailUrlPrefix + person_->email), content::PAGE_TRANSITION_LINK); // If no window exists, this will open a new window this one tab. params.disposition = NEW_FOREGROUND_TAB; chrome::Navigate(&params); content::RecordAction(base::UserMetricsAction("PeopleSearch_SendEmail")); } void PeopleResult::RefreshHangoutsExtensionId() { // TODO(rkc): Change this once we remove the hangoutsPrivate API. // See crbug.com/306672 for (size_t i = 0; i < arraysize(kHangoutsExtensionIds); ++i) { if (extensions::EventRouter::Get(profile_)->ExtensionHasEventListener( kHangoutsExtensionIds[i], OnHangoutRequested::kEventName)) { hangouts_extension_id_ = kHangoutsExtensionIds[i]; return; } } hangouts_extension_id_.clear(); } ChromeSearchResultType PeopleResult::GetType() { return SEARCH_PEOPLE_SEARCH_RESULT; } } // namespace app_list<|fim▁end|>
<|file_name|>reverse_complex1.rs<|end_file_name|><|fim▁begin|>fn main() { if !(!(a && b && c) <caret>&& !d) { <|fim▁hole|>}<|fim▁end|>
}
<|file_name|>test_parse_jats.py<|end_file_name|><|fim▁begin|># coding=utf-8 import json import os import unittest from collections import OrderedDict from bs4 import BeautifulSoup from ddt import ddt, data, unpack from elifetools import parseJATS as parser from elifetools import rawJATS as raw_parser from elifetools.utils import date_struct from tests.file_utils import ( sample_xml, json_expected_file, read_fixture, read_sample_xml, ) from tests import soup_body @ddt class TestParseJats(unittest.TestCase): def setUp(self): pass def soup(self, filename): # return soup return parser.parse_document(sample_xml(filename)) def json_expected(self, filename, function_name): json_expected = None json_file = json_expected_file(filename, function_name) try: with open(json_file, "rb") as json_file_fp: json_expected = json.loads(json_file_fp.read().decode("utf-8")) except IOError: # file may not exist or the value is None for this article pass return json_expected @data("elife-kitchen-sink.xml") def test_parse_document(self, filename): soup = parser.parse_document(sample_xml(filename)) self.assertTrue(isinstance(soup, BeautifulSoup)) """ Quick test cases during development checking syntax errors and coverage """ @unpack @data( ( "elife04493.xml", "Neuron hemilineages provide the functional ground plan for the <i>Drosophila</i> ventral nervous system", ) ) def test_full_title_json(self, filename, expected): full_title_json = parser.full_title_json(self.soup(filename)) self.assertEqual(expected, full_title_json) @unpack @data( ( "elife04490.xml", "Both the frequency of sesquiterpene-emitting individuals and the defense capacity of individual plants determine the consequences of sesquiterpene volatile emission for individuals and their neighbors in populations of the wild tobacco <i>Nicotiana attenuata</i>.", ), ("elife_poa_e06828.xml", ""), ) def test_impact_statement_json(self, filename, expected): impact_statement_json = parser.impact_statement_json(self.soup(filename)) self.assertEqual(expected, impact_statement_json) @unpack @data(("elife-kitchen-sink.xml", 6), ("elife-02833-v2.xml", 0)) def test_ethics_json_by_file(self, filename, expected_length): soup = parser.parse_document(sample_xml(filename)) self.assertEqual(len(parser.ethics_json(soup)), expected_length) @unpack @data( ( read_fixture("test_ethics_json", "content_01.xml"), read_fixture("test_ethics_json", "content_01_expected.py"), ), ) def test_ethics_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.ethics_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data(("elife-kitchen-sink.xml", list), ("elife_poa_e06828.xml", None)) def test_acknowledgements_json_by_file(self, filename, expected): acknowledgements_json = parser.acknowledgements_json(self.soup(filename)) if expected is None: self.assertEqual(expected, acknowledgements_json) else: self.assertEqual(expected, type(acknowledgements_json)) @unpack @data(("elife04490.xml", 3)) def test_appendices_json_by_file(self, filename, expected_len): soup = parser.parse_document(sample_xml(filename)) tag_content = parser.appendices_json(soup) self.assertEqual(len(tag_content), expected_len) @unpack @data( # example based on 14093 v1 with many sections and content ( read_fixture("test_appendices_json", "content_01.xml"), read_fixture("test_appendices_json", "content_01_expected.py"), ), # example based on 14022 v3 having a section with no title in it, with some additional scenarios ( read_fixture("test_appendices_json", "content_02.xml"), read_fixture("test_appendices_json", "content_02_expected.py"), ), # appendix with no sections, based on 00666 kitchen sink ( read_fixture("test_appendices_json", "content_03.xml"), read_fixture("test_appendices_json", "content_03_expected.py"), ), # appendix with a section and a box, also based on 00666 kitchen sink ( read_fixture("test_appendices_json", "content_04.xml"), read_fixture("test_appendices_json", "content_04_expected.py"), ), # appendix with a boxed-text in a subsequent section based on article ( read_fixture("test_appendices_json", "content_05.xml"), read_fixture("test_appendices_json", "content_05_expected.py"), ), ) def test_appendices_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.appendices_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( # appendix with inline-graphic, based on 17092 v1 ( read_fixture("test_appendices_json_base_url", "content_01.xml"), None, read_fixture("test_appendices_json_base_url", "content_01_expected.py"), ), # appendix with inline-graphic, based on 17092 v1 ( read_fixture("test_appendices_json_base_url", "content_02.xml"), "https://example.org/", read_fixture("test_appendices_json_base_url", "content_02_expected.py"), ), ) def test_appendices_json_with_base_url(self, xml_content, base_url, expected): soup = parser.parse_xml(xml_content) tag_content = parser.appendices_json(soup_body(soup), base_url) self.assertEqual(expected, tag_content) @unpack @data( ( "elife04490.xml", [ "<i>Nicotiana attenuata</i>", "<i>Manduca sexta</i>", u"Geocoris spp.", "<i>Trichobaris mucorea</i>", u"direct and indirect defense", u"diversity", ], ), ("elife07586.xml", []), ) def test_keywords_json(self, filename, expected): keywords_json = parser.keywords_json(self.soup(filename)) self.assertEqual(expected, keywords_json) @unpack @data( ('<root xmlns:xlink="http://www.w3.org/1999/xlink"/>', []), ( '<root xmlns:xlink="http://www.w3.org/1999/xlink"><kwd-group kwd-group-type="research-organism"><title>Research organism</title><kwd><italic>A. thaliana</italic></kwd><kwd>Other</kwd></kwd-group></root>', ["<i>A. thaliana</i>"], ), ( '<root xmlns:xlink="http://www.w3.org/1999/xlink"><kwd-group kwd-group-type="research-organism"><title>Research organism</title><kwd>None</kwd></kwd-group></root>', [], ), ) def test_research_organism_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.research_organism_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ("<root></root>", None), ("<root><ack></ack></root>", None), ( "<root><ack><title>Acknowledgements</title><p>Paragraph</p></ack></root>", [OrderedDict([("type", "paragraph"), ("text", u"Paragraph")])], ), ( "<root><ack><title>Acknowledgements</title><p>Paragraph</p><p><italic>italic</italic></p></ack></root>", [ OrderedDict([("type", "paragraph"), ("text", u"Paragraph")]), OrderedDict([("type", "paragraph"), ("text", u"<i>italic</i>")]), ], ), ) def test_acknowledgements_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.acknowledgements_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ("elife02304.xml", 2), ("elife02935.xml", 2), ) def test_datasets_json_by_file(self, filename, expected_len): soup = parser.parse_document(sample_xml(filename)) tag_content = parser.datasets_json(soup) self.assertEqual(len(tag_content), expected_len) @unpack @data( # Datasets from 00825 v1, has generated, used, etal and italic tags ( read_fixture("test_datasets_json", "content_01.xml"), read_fixture("test_datasets_json", "content_01_expected.py"), ), # Datasets from 00666 kitchen sink, includes a DOI ( read_fixture("test_datasets_json", "content_02.xml"), read_fixture("test_datasets_json", "content_02_expected.py"), ), # 10856 v2, excerpt, for adding dates to datasets missing a year value ( read_fixture("test_datasets_json", "content_03.xml"), read_fixture("test_datasets_json", "content_03_expected.py"), ), # Datasets example with section sec-type data-availability ( read_fixture("test_datasets_json", "content_04.xml"), read_fixture("test_datasets_json", "content_04_expected.py"), ), # Datasets example with a blank paragraph on some PoA XML files based 33420 v1 ( read_fixture("test_datasets_json", "content_05.xml"), read_fixture("test_datasets_json", "content_05_expected.py"), ), # Datasets example with section sec-type data-availability and using element-citation tag ( read_fixture("test_datasets_json", "content_06.xml"), read_fixture("test_datasets_json", "content_06_expected.py"), ), # Datasets example for PoA XML in new style tagging, based 33420 v1 ( read_fixture("test_datasets_json", "content_07.xml"), read_fixture("test_datasets_json", "content_07_expected.py"), ), # Datasets example with new pub-id uri tagging ( read_fixture("test_datasets_json", "content_08.xml"), read_fixture("test_datasets_json", "content_08_expected.py"), ), # Datasets example with multiple datasets availability paragraphs ( read_fixture("test_datasets_json", "content_09.xml"), read_fixture("test_datasets_json", "content_09_expected.py"), ), ) def test_datasets_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.datasets_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( # Example of fn-group and fn tags included and ignored in footnotes_json output ( read_fixture("test_footnotes_json", "content_01.xml"), read_fixture("test_footnotes_json", "content_01_expected.py"), ), # Test for no back tag ("<article/>", None), # Test for no fn-group tags ("<article><back/></article>", None), # Only fn-group tag with a content-type ( '<article><back><fn-group content-type="competing-interest"/></back></article>', None, ), )<|fim▁hole|> tag_content = parser.footnotes_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ("elife02304.xml", 2), ("elife02935.xml", 6), ) def test_supplementary_files_json_by_file(self, filename, expected_len): soup = parser.parse_document(sample_xml(filename)) tag_content = parser.supplementary_files_json(soup) self.assertEqual(len(tag_content), expected_len) @unpack @data( # Datasets from 16996 v1 PoA ( read_fixture("test_supplementary_files_json", "content_01.xml"), read_fixture("test_supplementary_files_json", "content_01_expected.py"), ), # Datasets from 08477 v1 VoR ( read_fixture("test_supplementary_files_json", "content_02.xml"), read_fixture("test_supplementary_files_json", "content_02_expected.py"), ), # 02184 v1, older style PoA has supplementary files directly in the article-meta ( read_fixture("test_supplementary_files_json", "content_03.xml"), read_fixture("test_supplementary_files_json", "content_03_expected.py"), ), # 04493 v1 PoA, multiple old style supplementary files ( read_fixture("test_supplementary_files_json", "content_04.xml"), read_fixture("test_supplementary_files_json", "content_04_expected.py"), ), # 10110 v1 excerpt, should only extract the supplementary-material from the back matter ( read_fixture("test_supplementary_files_json", "content_05.xml"), read_fixture("test_supplementary_files_json", "content_05_expected.py"), ), # 03405 v1, label and no title tag ( read_fixture("test_supplementary_files_json", "content_06.xml"), read_fixture("test_supplementary_files_json", "content_06_expected.py"), ), # 00333 v1, mimetype contains a slash so ignore sub-mimetype ( read_fixture("test_supplementary_files_json", "content_07.xml"), read_fixture("test_supplementary_files_json", "content_07_expected.py"), ), # 26759 v2, example of title tag and no label tag ( read_fixture("test_supplementary_files_json", "content_08.xml"), read_fixture("test_supplementary_files_json", "content_08_expected.py"), ), ) def test_supplementary_files_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.supplementary_files_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ( "elife02304.xml", "The funders had no role in study design, data collection and interpretation, or the decision to submit the work for publication.", ) ) def test_funding_statement_json_by_file(self, filename, expected): soup = parser.parse_document(sample_xml(filename)) tag_content = parser.funding_statement_json(soup) self.assertEqual(tag_content, expected) @unpack @data( ('<root xmlns:xlink="http://www.w3.org/1999/xlink"></root>', None), ( '<root xmlns:xlink="http://www.w3.org/1999/xlink"><funding-statement>Funding statement</funding-statement></root>', "Funding statement", ), ( '<root xmlns:xlink="http://www.w3.org/1999/xlink"><funding-statement><italic>Special</italic> funding statement</funding-statement></root>', "<i>Special</i> funding statement", ), ) def test_funding_statement_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.funding_statement_json(soup) self.assertEqual(tag_content, expected) @unpack @data( ("elife02304.xml", 3), ("elife02935.xml", 16), ) def test_funding_awards_json_by_file(self, filename, expected_len): soup = parser.parse_document(sample_xml(filename)) tag_content = parser.funding_awards_json(soup) self.assertEqual(len(tag_content), expected_len) @unpack @data( # 07383 v1 has an institution as the recipient ( read_fixture("test_funding_awards_json", "content_01.xml"), read_fixture("test_funding_awards_json", "content_01_expected.py"), ), # Funding from new kitchen sink ( read_fixture("test_funding_awards_json", "content_02.xml"), read_fixture("test_funding_awards_json", "content_02_expected.py"), ), # 08245 v1 edge case, unusual principal-award-recipient ( read_fixture("test_funding_awards_json", "content_03.xml"), read_fixture("test_funding_awards_json", "content_03_expected.py"), ), # 00801 v1 edge case, rewrite funding award ( read_fixture("test_funding_awards_json", "content_04.xml"), read_fixture("test_funding_awards_json", "content_04_expected.py"), ), # 04250 v1 edge case, rewrite to add funding award recipients ( read_fixture("test_funding_awards_json", "content_05.xml"), read_fixture("test_funding_awards_json", "content_05_expected.py"), ), # 06412 v2 edge case, rewrite to add funding award recipients ( read_fixture("test_funding_awards_json", "content_06.xml"), read_fixture("test_funding_awards_json", "content_06_expected.py"), ), # 03609 v1 example funding award with multiple recipients ( read_fixture("test_funding_awards_json", "content_07.xml"), read_fixture("test_funding_awards_json", "content_07_expected.py"), ), ) def test_funding_awards_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.funding_awards_json(soup) self.assertEqual(tag_content, expected) @unpack @data( # test for no sub-article ( "<root/>", OrderedDict(), ), # example from elife 00666 kitchen sink XML ( read_fixture("test_editor_evaluation", "content_01.xml"), read_fixture("test_editor_evaluation", "content_01_expected.py"), ), ) def test_editor_evaluation(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.editor_evaluation(soup) self.assertEqual(expected, tag_content) @unpack @data( ("elife-kitchen-sink.xml", None, OrderedDict()), ("elife_poa_e06828.xml", OrderedDict(), None), ) def test_decision_letter(self, filename, expected, not_expected): sub_article_content = parser.decision_letter(self.soup(filename)) if expected is not None: self.assertEqual(expected, sub_article_content) if not_expected is not None: self.assertNotEqual(not_expected, sub_article_content) @unpack @data( # 04871 v2, excerpt, remove unwanted sections ( read_fixture("test_decision_letter", "content_01.xml"), read_fixture("test_decision_letter", "content_01_expected.py"), ), # 10856 v2, excerpt, add missing description via a rewrite ( read_fixture("test_decision_letter", "content_02.xml"), read_fixture("test_decision_letter", "content_02_expected.py"), ), ) def test_decision_letter_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.decision_letter(soup) self.assertEqual(expected, tag_content) @unpack @data( ("elife-kitchen-sink.xml", None, OrderedDict()), ("elife_poa_e06828.xml", OrderedDict(), None), ) def test_author_response(self, filename, expected, not_expected): sub_article_content = parser.author_response(self.soup(filename)) if expected is not None: self.assertEqual(expected, sub_article_content) if not_expected is not None: self.assertNotEqual(not_expected, sub_article_content) @unpack @data( # 04871 v2, excerpt, remove unwanted sections ( read_fixture("test_author_response", "content_01.xml"), read_fixture("test_author_response", "content_01_expected.py"), ), ) def test_author_response_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.author_response(soup) self.assertEqual(expected, tag_content) @unpack @data(("elife-kitchen-sink.xml", None, []), ("elife_poa_e06828.xml", [], None)) def test_body(self, filename, expected, not_expected): body = parser.body(self.soup(filename)) if expected is not None: self.assertEqual(expected, body) if not_expected is not None: self.assertNotEqual(not_expected, body) @unpack @data( # very simple body, wrap in a section ( read_fixture("test_body_json", "content_01.xml"), read_fixture("test_body_json", "content_01_expected.py"), ), # normal boxed-text and section, keep these ( read_fixture("test_body_json", "content_02.xml"), read_fixture("test_body_json", "content_02_expected.py"), ), # boxed-text paragraphs inside the caption tag, based on 05519 v2 ( read_fixture("test_body_json", "content_03.xml"), read_fixture("test_body_json", "content_03_expected.py"), ), # 00301 v1 do not keep boxed-text and wrap in section ( read_fixture("test_body_json", "content_04.xml"), read_fixture("test_body_json", "content_04_expected.py"), ), # 00646 v1 boxed text to keep, and wrap in section ( read_fixture("test_body_json", "content_05.xml"), read_fixture("test_body_json", "content_05_expected.py"), ), # 02945 v1, correction article keep the boxed-text ( read_fixture("test_body_json", "content_06.xml"), read_fixture("test_body_json", "content_06_expected.py"), ), # 12844 v1, based on, edge case to rewrite unacceptable sections that have no titles ( read_fixture("test_body_json", "content_07.xml"), read_fixture("test_body_json", "content_07_expected.py"), ), # 09977 v2, based on, edge case to remove a specific section with no content ( read_fixture("test_body_json", "content_08.xml"), read_fixture("test_body_json", "content_08_expected.py"), ), # 09977 v3, based on, edge case to keep a specific section that does have content ( read_fixture("test_body_json", "content_09.xml"), read_fixture("test_body_json", "content_09_expected.py"), ), # 05519 v2, based on, edge case to remove an unwanted section ( read_fixture("test_body_json", "content_10.xml"), read_fixture("test_body_json", "content_10_expected.py"), ), # 00013 v1, excerpt, add an id to a section ( read_fixture("test_body_json", "content_11.xml"), read_fixture("test_body_json", "content_11_expected.py"), ), # 04232 v2, excerpt, remove an unwanted section ( read_fixture("test_body_json", "content_12.xml"), read_fixture("test_body_json", "content_12_expected.py"), ), # 07157 v1, add title to a section ( read_fixture("test_body_json", "content_13.xml"), read_fixture("test_body_json", "content_13_expected.py"), ), # excerpt of 23383 v1 where there is a boxed-text that was formerly stripped away, check it remains ( read_fixture("test_body_json", "content_14.xml"), read_fixture("test_body_json", "content_14_expected.py"), ), ) def test_body_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.body_json(soup) self.assertEqual(expected, tag_content) @unpack @data( ( read_fixture("test_body_json_base_url", "content_01.xml"), None, read_fixture("test_body_json_base_url", "content_01_expected.py"), ), ( read_fixture("test_body_json_base_url", "content_02.xml"), "https://example.org/", read_fixture("test_body_json_base_url", "content_02_expected.py"), ), ( read_fixture("test_body_json_base_url", "content_03.xml"), None, read_fixture("test_body_json_base_url", "content_03_expected.py"), ), ( read_fixture("test_body_json_base_url", "content_04.xml"), "https://example.org/", read_fixture("test_body_json_base_url", "content_04_expected.py"), ), ) def test_body_json_with_base_url(self, xml_content, base_url, expected): soup = parser.parse_xml(xml_content) tag_content = parser.body_json(soup, base_url=base_url) self.assertEqual(expected, tag_content) @unpack @data( # 08647 v1 PoA editor has blank string in the affiliation tags ( read_fixture("test_editors_json", "content_01.xml"), read_fixture("test_editors_json", "content_01_expected.py"), ), # 09560 v1 example, has two editors ( read_fixture("test_editors_json", "content_02.xml"), read_fixture("test_editors_json", "content_02_expected.py"), ), # 23804 v3 example, has no role tag and is rewritten ( read_fixture("test_editors_json", "content_03.xml"), read_fixture("test_editors_json", "content_03_expected.py"), ), # 22028 v1 example, has a country but no institution ( read_fixture("test_editors_json", "content_04.xml"), read_fixture("test_editors_json", "content_04_expected.py"), ), # kitchen sink example, has senior editor and reviewers ( read_fixture("test_editors_json", "content_05.xml"), read_fixture("test_editors_json", "content_05_expected.py"), ), # kitchen sink example, reviewing editor and senior editor is the same person ( read_fixture("test_editors_json", "content_06.xml"), read_fixture("test_editors_json", "content_06_expected.py"), ), # reviewing editor and senior editor is the same person in both mentions plus a reviewer ( read_fixture("test_editors_json", "content_07.xml"), read_fixture("test_editors_json", "content_07_expected.py"), ), ) def test_editors_json_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.editors_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( # Author with phone number, 02833 v2 ( read_fixture("test_authors_json", "content_01.xml"), read_fixture("test_authors_json", "content_01_expected.py"), ), # 02935 v1, group authors (collab) but no members of those groups ( read_fixture("test_authors_json", "content_02.xml"), read_fixture("test_authors_json", "content_02_expected.py"), ), # 02935 v2, excerpt for group author parsing ( read_fixture("test_authors_json", "content_03.xml"), read_fixture("test_authors_json", "content_03_expected.py"), ), # 09376 v1, excerpt to rewrite an author ORCID ( read_fixture("test_authors_json", "content_04.xml"), read_fixture("test_authors_json", "content_04_expected.py"), ), # 06956 v1, excerpt, add an affiliation name to an author ( read_fixture("test_authors_json", "content_05.xml"), read_fixture("test_authors_json", "content_05_expected.py"), ), # 21337 v1, example to pick up the email of corresponding authors from authors notes ( read_fixture("test_authors_json", "content_06.xml"), read_fixture("test_authors_json", "content_06_expected.py"), ), # 00007 v1, example with a present address ( read_fixture("test_authors_json", "content_07.xml"), read_fixture("test_authors_json", "content_07_expected.py"), ), # 00666 kitchen sink (with extra whitespace removed), example to pick up the email of corresponding author ( read_fixture("test_authors_json", "content_08.xml"), read_fixture("test_authors_json", "content_08_expected.py"), ), # 09594 v2 example of non-standard footnote fn-type other and id starting with 'fn' ( read_fixture("test_authors_json", "content_09.xml"), read_fixture("test_authors_json", "content_09_expected.py"), ), # 21230 v1 example of author role to parse ( read_fixture("test_authors_json", "content_10.xml"), read_fixture("test_authors_json", "content_10_expected.py"), ), # 21230 v1 as an example of competing interests rewrite rule for elife ( read_fixture("test_authors_json", "content_11.xml"), read_fixture("test_authors_json", "content_11_expected.py"), ), # 00351 v1 example of author role to parse ( read_fixture("test_authors_json", "content_12.xml"), read_fixture("test_authors_json", "content_12_expected.py"), ), # 21723 v1 another example of author role ( read_fixture("test_authors_json", "content_13.xml"), read_fixture("test_authors_json", "content_13_expected.py"), ), # author with a bio based on kitchen sink 00777 ( read_fixture("test_authors_json", "content_14.xml"), read_fixture("test_authors_json", "content_14_expected.py"), ), # 02273 v1 example, equal contribution to parse ( read_fixture("test_authors_json", "content_15.xml"), read_fixture("test_authors_json", "content_15_expected.py"), ), # 09148 v1 example, and author with two email addresses ( read_fixture("test_authors_json", "content_16.xml"), read_fixture("test_authors_json", "content_16_expected.py"), ), # example of new kitchen sink group authors with multiple email addresses, based on 17044 v1 ( read_fixture("test_authors_json", "content_17.xml"), read_fixture("test_authors_json", "content_17_expected.py"), ), # example of inline email address ( read_fixture("test_authors_json", "content_18.xml"), read_fixture("test_authors_json", "content_18_expected.py"), ), ) def test_authors_json_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.authors_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( # 00855 v1, example of just person authors ( read_fixture("test_author_line", "content_01.xml"), u"Randy Schekman, Mark Patterson", ), # 08714 v1, group authors only ( read_fixture("test_author_line", "content_02.xml"), u"MalariaGEN Plasmodium falciparum Community Project", ), # elife00351.xml, one author ( read_fixture("test_author_line", "content_03.xml"), "Richard Smith", ), # elife_poa_e06828.xml, multiple authors adds et al. ( read_fixture("test_author_line", "content_04.xml"), "Michael S Fleming et al.", ), ) def test_author_line_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.author_line(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( (None, None), (["Randy Schekman"], "Randy Schekman"), (["Randy Schekman", "Mark Patterson"], "Randy Schekman, Mark Patterson"), (["Randy Schekman", "Mark Patterson", "eLife"], "Randy Schekman et al."), ) def test_format_author_line(self, author_names, expected): self.assertEqual(parser.format_author_line(author_names), expected) @data( # aff tag linked via an rid to id attribute ( read_fixture("test_format_aff", "content_01.xml"), read_fixture("test_format_aff", "content_01_expected.py"), ), # inline aff tag example, no id attribute ( read_fixture("test_format_aff", "content_02.xml"), read_fixture("test_format_aff", "content_02_expected.py"), ), # aff example mostly just text with no subtags ( read_fixture("test_format_aff", "content_03.xml"), read_fixture("test_format_aff", "content_03_expected.py"), ), # aff example with ror institution-id ( read_fixture("test_format_aff", "content_04.xml"), read_fixture("test_format_aff", "content_04_expected.py"), ), # edge case, no aff tag or the rid idoes not match an aff id (None, (None, {})), ) @unpack def test_format_aff_edge_cases(self, xml_content, expected): if xml_content: soup = parser.parse_xml(xml_content) aff_tag = soup_body(soup) else: # where the tag is None aff_tag = xml_content tag_content = parser.format_aff(aff_tag) self.assertEqual(expected, tag_content) @unpack @data( (None, []), ( [ {"name": {"preferred": "Randy Schekman"}}, {"name": {"preferred": "Mark Patterson"}}, {"name": "eLife"}, ], ["Randy Schekman", "Mark Patterson", "eLife"], ), ) def test_extract_author_line_names(self, authors_json, expected): self.assertEqual(parser.extract_author_line_names(authors_json), expected) @data( # standard expected author with name tag ( read_fixture("test_format_contributor", "content_01.xml"), read_fixture("test_format_contributor", "content_01_expected.py"), ), # edge case, no valid contrib tags ( read_fixture("test_format_contributor", "content_02.xml"), read_fixture("test_format_contributor", "content_02_expected.py"), ), # edge case, string-name wrapper ( read_fixture("test_format_contributor", "content_03.xml"), read_fixture("test_format_contributor", "content_03_expected.py"), ), # edge case, incorrect aff tag xref values will not cause an error if aff tag is not found ( read_fixture("test_format_contributor", "content_04.xml"), read_fixture("test_format_contributor", "content_04_expected.py"), ), ) @unpack def test_format_contributor_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) contrib_tag = raw_parser.article_contributors(soup)[0] tag_content = parser.format_contributor(contrib_tag, soup) self.assertEqual(expected, tag_content) @unpack @data(("(+1) 800-555-5555", "+18005555555")) def test_phone_number_json(self, phone, expected): self.assertEqual(parser.phone_number_json(phone), expected) @unpack @data( (None, OrderedDict(), OrderedDict()), # example of clinical trial contributors ( read_fixture("test_references_json_authors", "content_01.py"), OrderedDict([("type", u"clinical-trial")]), read_fixture("test_references_json_authors", "content_01_expected.py"), ), # example of patent contributors ( read_fixture("test_references_json_authors", "content_02.py"), OrderedDict([("type", u"patent")]), read_fixture("test_references_json_authors", "content_02_expected.py"), ), # example of thesis contributors ( read_fixture("test_references_json_authors", "content_03.py"), OrderedDict([("type", u"thesis")]), read_fixture("test_references_json_authors", "content_03_expected.py"), ), ) def test_references_json_authors(self, ref_authors, ref_content, expected): references_json = parser.references_json_authors(ref_authors, ref_content) self.assertEqual(expected, references_json) @data( # Web reference with no title, use the uri from 01892 ( read_fixture("test_references_json", "content_01.xml"), read_fixture("test_references_json", "content_01_expected.py"), ), # Thesis title from 00626, also in converted to unknown because of its comment tag ( read_fixture("test_references_json", "content_02.xml"), read_fixture("test_references_json", "content_02_expected.py"), ), # fpage value with usual characters from 00170 ( read_fixture("test_references_json", "content_03.xml"), read_fixture("test_references_json", "content_03_expected.py"), ), # fpage contains dots, 00569 ( read_fixture("test_references_json", "content_04.xml"), read_fixture("test_references_json", "content_04_expected.py"), ), # pages value of in press, 00109 ( read_fixture("test_references_json", "content_05.xml"), read_fixture("test_references_json", "content_05_expected.py"), ), # year value of in press, 02535 ( read_fixture("test_references_json", "content_06.xml"), read_fixture("test_references_json", "content_06_expected.py"), ), # conference, 03532 v3 ( read_fixture("test_references_json", "content_07.xml"), read_fixture("test_references_json", "content_07_expected.py"), ), # web with doi but no uri, 04775 v2 ( read_fixture("test_references_json", "content_08.xml"), read_fixture("test_references_json", "content_08_expected.py"), ), # Clinical trial example, from new kitchen sink 00666 ( read_fixture("test_references_json", "content_09.xml"), read_fixture("test_references_json", "content_09_expected.py"), ), # Journal reference with no article-title, 05462 v3 ( read_fixture("test_references_json", "content_10.xml"), read_fixture("test_references_json", "content_10_expected.py"), ), # book reference, gets converted to book-chapter, and has no editors, 16412 v1 ( read_fixture("test_references_json", "content_11.xml"), read_fixture("test_references_json", "content_11_expected.py"), ), # journal reference with no article title and no lpage, 11282 v2 ( read_fixture("test_references_json", "content_12.xml"), read_fixture("test_references_json", "content_12_expected.py"), ), # reference with no title uses detail as the titel, 00311 v1 ( read_fixture("test_references_json", "content_13.xml"), read_fixture("test_references_json", "content_13_expected.py"), ), # reference of type other with no details, 15266 v1 ( read_fixture("test_references_json", "content_14.xml"), read_fixture("test_references_json", "content_14_expected.py"), ), # reference of type journal with no journal name, 00340 v1 ( read_fixture("test_references_json", "content_15.xml"), read_fixture("test_references_json", "content_15_expected.py"), ), # reference of type book with no source, 00051 v1 ( read_fixture("test_references_json", "content_16.xml"), read_fixture("test_references_json", "content_16_expected.py"), ), # reference of type book with no publisher, 00031 v1 ( read_fixture("test_references_json", "content_17.xml"), read_fixture("test_references_json", "content_17_expected.py"), ), # reference of type book with no bookTitle, 03069 v2 ( read_fixture("test_references_json", "content_18.xml"), read_fixture("test_references_json", "content_18_expected.py"), ), # reference with unicode in collab tag, also gets turned into unknown type, 18023 v1 ( read_fixture("test_references_json", "content_19.xml"), read_fixture("test_references_json", "content_19_expected.py"), ), # data reference with no source, 16800 v2 ( read_fixture("test_references_json", "content_20.xml"), read_fixture("test_references_json", "content_20_expected.py"), ), # reference with an lpage and not fpage still gets pages value set, 13905 v2 ( read_fixture("test_references_json", "content_21.xml"), read_fixture("test_references_json", "content_21_expected.py"), ), # Reference with a collab using italic tag, from 05423 v2 ( read_fixture("test_references_json", "content_22.xml"), read_fixture("test_references_json", "content_22_expected.py"), ), # Reference with a non-numeric year, from 09215 v1 ( read_fixture("test_references_json", "content_23.xml"), read_fixture("test_references_json", "content_23_expected.py"), ), # no year value, 00051, with json rewriting enabled by adding elife XML metadata ( read_fixture("test_references_json", "content_24.xml"), read_fixture("test_references_json", "content_24_expected.py"), ), # elife 12125 v3 bib11 will get deleted in the JSON rewriting ( read_fixture("test_references_json", "content_25.xml"), read_fixture("test_references_json", "content_25_expected.py"), ), # 19532 v2 bib27 has a date-in-citation and no year tag, will get rewritten ( read_fixture("test_references_json", "content_26.xml"), read_fixture("test_references_json", "content_26_expected.py"), ), # 00666 kitchen sink reference of type patent ( read_fixture("test_references_json", "content_27.xml"), read_fixture("test_references_json", "content_27_expected.py"), ), # 20352 v2 reference of type patent, with a rewrite of the country value ( read_fixture("test_references_json", "content_28.xml"), read_fixture("test_references_json", "content_28_expected.py"), ), # 20492 v3, report type reference with no publisher-name gets converted to unknown ( read_fixture("test_references_json", "content_29.xml"), read_fixture("test_references_json", "content_29_expected.py"), ), # 15504 v2, reference with a pmid ( read_fixture("test_references_json", "content_30.xml"), read_fixture("test_references_json", "content_30_expected.py"), ), # 15504 v2, reference with an isbn ( read_fixture("test_references_json", "content_31.xml"), read_fixture("test_references_json", "content_31_expected.py"), ), # 18296 v3, reference of type preprint ( read_fixture("test_references_json", "content_32.xml"), read_fixture("test_references_json", "content_32_expected.py"), ), # 16394 v2, reference of type thesis with no publisher, convert to unknown ( read_fixture("test_references_json", "content_33.xml"), read_fixture("test_references_json", "content_33_expected.py"), ), # 09672 v2, reference of type conference-proceeding with no conference ( read_fixture("test_references_json", "content_34.xml"), read_fixture("test_references_json", "content_34_expected.py"), ), # 07460 v1, reference rewriting test, rewrites date and authors ( read_fixture("test_references_json", "content_35.xml"), read_fixture("test_references_json", "content_35_expected.py"), ), # 20522 v1, reference rewriting year value in json output ( read_fixture("test_references_json", "content_36.xml"), read_fixture("test_references_json", "content_36_expected.py"), ), # 09520 v2, reference rewriting conference data ( read_fixture("test_references_json", "content_37.xml"), read_fixture("test_references_json", "content_37_expected.py"), ), # from 00666 kitchen sink example, will add a uri to the references json from the doi value ( read_fixture("test_references_json", "content_38.xml"), read_fixture("test_references_json", "content_38_expected.py"), ), # from 00666 kitchen sink example, reference of type periodical ( read_fixture("test_references_json", "content_39.xml"), read_fixture("test_references_json", "content_39_expected.py"), ), # 00666 kitchen sink example with a version tag ( read_fixture("test_references_json", "content_40.xml"), read_fixture("test_references_json", "content_40_expected.py"), ), # 23193 v2 book references has editors and no authors ( read_fixture("test_references_json", "content_41.xml"), read_fixture("test_references_json", "content_41_expected.py"), ), # example of data citation with a pub-id accession, based on article 07836 ( read_fixture("test_references_json", "content_42.xml"), read_fixture("test_references_json", "content_42_expected.py"), ), # example of data citation with a object-id tag accession, gets converted to unknown because of the comment tag, based on article 07048 ( read_fixture("test_references_json", "content_43.xml"), read_fixture("test_references_json", "content_43_expected.py"), ), # example of data citation with a pub-id pub-id-type="archive", parse it as an accession number, based on 00666 kitchen sink example ( read_fixture("test_references_json", "content_44.xml"), read_fixture("test_references_json", "content_44_expected.py"), ), # example of ref of type webpage, based on article 10.5334/sta.606, note: does not parse author names ( read_fixture("test_references_json", "content_45.xml"), read_fixture("test_references_json", "content_45_expected.py"), ), # example of ref of type report, with a doi but no uri, uri gets filled in ( read_fixture("test_references_json", "content_46.xml"), read_fixture("test_references_json", "content_46_expected.py"), ), # example of ref of type journal with no pages ( read_fixture("test_references_json", "content_47.xml"), read_fixture("test_references_json", "content_47_expected.py"), ), # example of ref author having a suffix from the elife 00666 kitchen sink XML ( read_fixture("test_references_json", "content_48.xml"), read_fixture("test_references_json", "content_48_expected.py"), ), # example of ref with an elocation-id, no pages, from elife-kitchen-sink.xml ( read_fixture("test_references_json", "content_49.xml"), read_fixture("test_references_json", "content_49_expected.py"), ), # example of ref with a strange year tag value, json_rewrite invoked, from elife-09215-v1.xml ( read_fixture("test_references_json", "content_50.xml"), read_fixture("test_references_json", "content_50_expected.py"), ), # example of thesis ref with a doi, its uri will be populated from the doi ( read_fixture("test_references_json", "content_51.xml"), read_fixture("test_references_json", "content_51_expected.py"), ), ) @unpack def test_references_json_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.references_json(soup) self.assertEqual(expected, tag_content) @unpack @data((None, None, None)) def test_references_publisher(self, publisher_name, publisher_loc, expected): self.assertEqual( parser.references_publisher(publisher_name, publisher_loc), expected ) @unpack @data( (None, 0), ("<root><italic></italic></root>", 0), ("<root><sec><p>Content</p></sec></root>", 1), ) def test_body_blocks(self, xml_content, expected_len): if xml_content: soup = parser.parse_xml(xml_content) body_tag = soup_body(soup_body(soup)) else: body_tag = xml_content body_block_tags = parser.body_blocks(body_tag) self.assertEqual(len(body_block_tags), expected_len) @unpack @data( (None, None, None, None, None, None, None, None, None), ("title", None, None, None, None, None, "title", None, None), (None, "label", None, None, None, None, None, "label", None), ("title", "label", "caption", None, None, None, "title", "label", None), ("title", "label", "caption", True, None, None, "title", "label", "caption"), (None, "label", None, None, True, None, "label", None, None), (None, "label", None, None, True, True, "label", None, None), ("title", None, None, None, True, True, None, "title", None), ("title", None, None, None, None, True, None, "title", None), ("title", "label", None, None, True, None, "title", "label", None), ("title.", None, None, None, None, True, None, "title", None), (None, "label:", None, None, True, None, "label:", None, None), ) def test_body_block_title_label_caption( self, title_value, label_value, caption_content, set_caption, prefer_title, prefer_label, expected_title, expected_label, expected_caption, ): tag_content = OrderedDict() parser.body_block_title_label_caption( tag_content, title_value, label_value, caption_content, set_caption, prefer_title, prefer_label, ) self.assertEqual(tag_content.get("label"), expected_label) self.assertEqual(tag_content.get("title"), expected_title) self.assertEqual(tag_content.get("caption"), expected_caption) @unpack @data( ( read_fixture("test_body_block_content", "content_01.xml"), read_fixture("test_body_block_content", "content_01_expected.py"), ), ( read_fixture("test_body_block_content", "content_02.xml"), read_fixture("test_body_block_content", "content_02_expected.py"), ), ( read_fixture("test_body_block_content", "content_03.xml"), read_fixture("test_body_block_content", "content_03_expected.py"), ), ( read_fixture("test_body_block_content", "content_04.xml"), read_fixture("test_body_block_content", "content_04_expected.py"), ), ( read_fixture("test_body_block_content", "content_05.xml"), read_fixture("test_body_block_content", "content_05_expected.py"), ), ( read_fixture("test_body_block_content", "content_06.xml"), read_fixture("test_body_block_content", "content_06_expected.py"), ), ( read_fixture("test_body_block_content", "content_07.xml"), read_fixture("test_body_block_content", "content_07_expected.py"), ), ( read_fixture("test_body_block_content", "content_08.xml"), read_fixture("test_body_block_content", "content_08_expected.py"), ), ( read_fixture("test_body_block_content", "content_09.xml"), read_fixture("test_body_block_content", "content_09_expected.py"), ), ( read_fixture("test_body_block_content", "content_10.xml"), read_fixture("test_body_block_content", "content_10_expected.py"), ), ( read_fixture("test_body_block_content", "content_11.xml"), read_fixture("test_body_block_content", "content_11_expected.py"), ), ( read_fixture("test_body_block_content", "content_12.xml"), read_fixture("test_body_block_content", "content_12_expected.py"), ), ( read_fixture("test_body_block_content", "content_13.xml"), read_fixture("test_body_block_content", "content_13_expected.py"), ), ( read_fixture("test_body_block_content", "content_14.xml"), read_fixture("test_body_block_content", "content_14_expected.py"), ), ( read_fixture("test_body_block_content", "content_15.xml"), read_fixture("test_body_block_content", "content_15_expected.py"), ), # example of copyright statements ending in a full stop, based on article 27041 ( read_fixture("test_body_block_content", "content_16.xml"), read_fixture("test_body_block_content", "content_16_expected.py"), ), # example of video with attributions, based on article 17243 ( read_fixture("test_body_block_content", "content_17.xml"), read_fixture("test_body_block_content", "content_17_expected.py"), ), ( read_fixture("test_body_block_content", "content_18.xml"), read_fixture("test_body_block_content", "content_18_expected.py"), ), ( read_fixture("test_body_block_content", "content_19.xml"), read_fixture("test_body_block_content", "content_19_expected.py"), ), ( read_fixture("test_body_block_content", "content_20.xml"), read_fixture("test_body_block_content", "content_20_expected.py"), ), ( read_fixture("test_body_block_content", "content_21.xml"), read_fixture("test_body_block_content", "content_21_expected.py"), ), ( read_fixture("test_body_block_content", "content_22.xml"), read_fixture("test_body_block_content", "content_22_expected.py"), ), ( read_fixture("test_body_block_content", "content_23.xml"), read_fixture("test_body_block_content", "content_23_expected.py"), ), ( read_fixture("test_body_block_content", "content_24.xml"), read_fixture("test_body_block_content", "content_24_expected.py"), ), # media tag that is not a video ( read_fixture("test_body_block_content", "content_25.xml"), read_fixture("test_body_block_content", "content_25_expected.py"), ), ( read_fixture("test_body_block_content", "content_26.xml"), read_fixture("test_body_block_content", "content_26_expected.py"), ), ( read_fixture("test_body_block_content", "content_27.xml"), read_fixture("test_body_block_content", "content_27_expected.py"), ), ( read_fixture("test_body_block_content", "content_28.xml"), read_fixture("test_body_block_content", "content_28_expected.py"), ), # disp-quote content-type="editor-comment" is turned into an excerpt block ( read_fixture("test_body_block_content", "content_29.xml"), read_fixture("test_body_block_content", "content_29_expected.py"), ), # 00109 v1, figure with a supplementary file that has multiple caption paragraphs ( read_fixture("test_body_block_content", "content_30.xml"), read_fixture("test_body_block_content", "content_30_expected.py"), ), # code block, based on elife 20352 v2, contains new lines too ( read_fixture("test_body_block_content", "content_31.xml"), read_fixture("test_body_block_content", "content_31_expected.py"), ), # example of a table with a break tag, based on 7141 v1 ( read_fixture("test_body_block_content", "content_32.xml"), read_fixture("test_body_block_content", "content_32_expected.py"), ), # example of a figure, based on 00007 v1 ( read_fixture("test_body_block_content", "content_33.xml"), read_fixture("test_body_block_content", "content_33_expected.py"), ), # example table with a caption and no title needs a title added, based on 05604 v1 ( read_fixture("test_body_block_content", "content_34.xml"), read_fixture("test_body_block_content", "content_34_expected.py"), ), # example video with only the DOI in the caption paragraph, based on 02277 v1 ( read_fixture("test_body_block_content", "content_35.xml"), read_fixture("test_body_block_content", "content_35_expected.py"), ), # example animated gif as a video in 00666 kitchen sink ( read_fixture("test_body_block_content", "content_36.xml"), read_fixture("test_body_block_content", "content_36_expected.py"), ), # example of named-content to be converted to HTML from new kitchen sink 00666 ( read_fixture("test_body_block_content", "content_37.xml"), read_fixture("test_body_block_content", "content_37_expected.py"), ), # example of table author-callout-style styles to replace as a class attribute, based on 24231 v1 ( read_fixture("test_body_block_content", "content_38.xml"), read_fixture("test_body_block_content", "content_38_expected.py"), ), # example inline table adapted from 00666 kitchen sink ( read_fixture("test_body_block_content", "content_39.xml"), read_fixture("test_body_block_content", "content_39_expected.py"), ), # example key resources inline table that has a label will be turned into a figure block ( read_fixture("test_body_block_content", "content_40.xml"), read_fixture("test_body_block_content", "content_40_expected.py"), ), # test for stripping out comment tag content when it is inside a paragraph tag ( read_fixture("test_body_block_content", "content_41.xml"), read_fixture("test_body_block_content", "content_41_expected.py"), ), # test ignoring nested fig title as a box-text title ( read_fixture("test_body_block_content", "content_42.xml"), read_fixture("test_body_block_content", "content_42_expected.py"), ), ) def test_body_block_content(self, xml_content, expected): soup = parser.parse_xml(xml_content) # find the first tag in the root with a name for child in soup.root.children: if child.name: body_tag = child break tag_content = parser.body_block_content(body_tag) self.assertEqual(expected, tag_content) @unpack @data( ( read_fixture("test_body_block_content_render", "content_01.xml"), read_fixture("test_body_block_content_render", "content_01_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_02.xml"), read_fixture("test_body_block_content_render", "content_02_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_03.xml"), read_fixture("test_body_block_content_render", "content_03_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_04.xml"), read_fixture("test_body_block_content_render", "content_04_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_05.xml"), read_fixture("test_body_block_content_render", "content_05_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_06.xml"), read_fixture("test_body_block_content_render", "content_06_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_07.xml"), read_fixture("test_body_block_content_render", "content_07_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_08.xml"), read_fixture("test_body_block_content_render", "content_08_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_09.xml"), read_fixture("test_body_block_content_render", "content_09_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_10.xml"), read_fixture("test_body_block_content_render", "content_10_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_11.xml"), read_fixture("test_body_block_content_render", "content_11_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_12.xml"), read_fixture("test_body_block_content_render", "content_12_expected.py"), ), ( read_fixture("test_body_block_content_render", "content_13.xml"), read_fixture("test_body_block_content_render", "content_13_expected.py"), ), # disp-quote content-type="editor-comment" is turned into an excerpt block ( read_fixture("test_body_block_content_render", "content_14.xml"), read_fixture("test_body_block_content_render", "content_14_expected.py"), ), # Boxed text with no title tag uses the first sentence of the caption paragraph, 00288 v1 ( read_fixture("test_body_block_content_render", "content_15.xml"), read_fixture("test_body_block_content_render", "content_15_expected.py"), ), # Example of boxed-text with content inside its caption tag ( read_fixture("test_body_block_content_render", "content_16.xml"), read_fixture("test_body_block_content_render", "content_16_expected.py"), ), # code block, based on elife 20352 v2, contains new lines too ( read_fixture("test_body_block_content_render", "content_17.xml"), read_fixture("test_body_block_content_render", "content_17_expected.py"), ), # Example of monospace tags ( read_fixture("test_body_block_content_render", "content_18.xml"), read_fixture("test_body_block_content_render", "content_18_expected.py"), ), # example of a table to not pickup a child element title, based on 22264 v2 ( read_fixture("test_body_block_content_render", "content_19.xml"), read_fixture("test_body_block_content_render", "content_19_expected.py"), ), # example table: a label, no title, no caption ( read_fixture("test_body_block_content_render", "content_20.xml"), read_fixture("test_body_block_content_render", "content_20_expected.py"), ), # example table: a label, a title, no caption ( read_fixture("test_body_block_content_render", "content_21.xml"), read_fixture("test_body_block_content_render", "content_21_expected.py"), ), # example table: a label, no title, a caption ( read_fixture("test_body_block_content_render", "content_22.xml"), read_fixture("test_body_block_content_render", "content_22_expected.py"), ), # example table: a label, a title, and a caption ( read_fixture("test_body_block_content_render", "content_23.xml"), read_fixture("test_body_block_content_render", "content_23_expected.py"), ), # example table: no label, no title, and a caption ( read_fixture("test_body_block_content_render", "content_24.xml"), read_fixture("test_body_block_content_render", "content_24_expected.py"), ), # example fig with a caption and no title, based on 00281 v1 ( read_fixture("test_body_block_content_render", "content_25.xml"), read_fixture("test_body_block_content_render", "content_25_expected.py"), ), # example media with a label and no title, based on 00007 v1 ( read_fixture("test_body_block_content_render", "content_26.xml"), read_fixture("test_body_block_content_render", "content_26_expected.py"), ), # example test from 02935 v2 of list within a list to not add child list-item to the parent list twice ( read_fixture("test_body_block_content_render", "content_27.xml"), read_fixture("test_body_block_content_render", "content_27_expected.py"), ), # example list from 00666 kitchen sink with paragraphs and list inside a list-item ( read_fixture("test_body_block_content_render", "content_28.xml"), read_fixture("test_body_block_content_render", "content_28_expected.py"), ), # example of a video inside a fig group based on 00666 kitchen sink ( read_fixture("test_body_block_content_render", "content_29.xml"), read_fixture("test_body_block_content_render", "content_29_expected.py"), ), # example of a video as supplementary material inside a fig-group based on 06726 v2 ( read_fixture("test_body_block_content_render", "content_30.xml"), read_fixture("test_body_block_content_render", "content_30_expected.py"), ), # example of fig supplementary-material with only a title tag, based on elife-26759-v1.xml except # this example is fixed so the caption tag wraps the entire caption, and values are abbreviated # to test how the punctuation is stripped from the end of the supplementary-material title value # when it is converted to a label ( read_fixture("test_body_block_content_render", "content_31.xml"), read_fixture("test_body_block_content_render", "content_31_expected.py"), ), # example of disp-formula inside a disp-quote based on 55588 ( read_fixture("test_body_block_content_render", "content_32.xml"), read_fixture("test_body_block_content_render", "content_32_expected.py"), ), ) def test_body_block_content_render(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.body_block_content_render(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ( read_fixture("test_render_raw_body", "content_01.xml"), read_fixture("test_render_raw_body", "content_01_expected.py"), ), ( read_fixture("test_render_raw_body", "content_02.xml"), read_fixture("test_render_raw_body", "content_02_expected.py"), ), ( read_fixture("test_render_raw_body", "content_03.xml"), read_fixture("test_render_raw_body", "content_03_expected.py"), ), # Below when there is a space between paragraph tags, it should not render as a paragraph ( read_fixture("test_render_raw_body", "content_04.xml"), read_fixture("test_render_raw_body", "content_04_expected.py"), ), ( read_fixture("test_render_raw_body", "content_05.xml"), read_fixture("test_render_raw_body", "content_05_expected.py"), ), ( read_fixture("test_render_raw_body", "content_06.xml"), read_fixture("test_render_raw_body", "content_06_expected.py"), ), ( read_fixture("test_render_raw_body", "content_07.xml"), read_fixture("test_render_raw_body", "content_07_expected.py"), ), ( read_fixture("test_render_raw_body", "content_08.xml"), read_fixture("test_render_raw_body", "content_08_expected.py"), ), ( read_fixture("test_render_raw_body", "content_09.xml"), read_fixture("test_render_raw_body", "content_09_expected.py"), ), # excerpt from 00646 v1 with a boxed-text inline-graphic ( read_fixture("test_render_raw_body", "content_10.xml"), read_fixture("test_render_raw_body", "content_10_expected.py"), ), ) def test_render_raw_body(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.render_raw_body(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ( read_fixture("test_abstract_json", "content_01.xml"), read_fixture("test_abstract_json", "content_01_expected.py"), ), # executive-summary will return None ( read_fixture("test_abstract_json", "content_02.xml"), read_fixture("test_abstract_json", "content_02_expected.py"), ), # test lots of inline tagging ( read_fixture("test_abstract_json", "content_03.xml"), read_fixture("test_abstract_json", "content_03_expected.py"), ), # structured abstract example based on BMJ Open bmjopen-4-e003269.xml ( read_fixture("test_abstract_json", "content_04.xml"), read_fixture("test_abstract_json", "content_04_expected.py"), ), # structured abstract elife example ( read_fixture("test_abstract_json", "content_05.xml"), read_fixture("test_abstract_json", "content_05_expected.py"), ), ) def test_abstract_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.abstract_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ( read_fixture("test_digest_json", "content_01.xml"), read_fixture("test_digest_json", "content_01_expected.py"), ), ) def test_digest_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.digest_json(soup_body(soup)) self.assertEqual(expected, tag_content) """ Unit test small or special cases """ @unpack @data( # snippet of XML from elife-kitchen-sink.xml (read_fixture("", "article_dates.xml"), "pub", ("28", "02", "2014")), ) def test_ymd(self, xml_content, test_date_type, expected): soup = parser.parse_xml(xml_content) date_tag = raw_parser.pub_date(soup, date_type=test_date_type)[0] self.assertEqual(expected, parser.ymd(date_tag)) @unpack @data( # snippet of XML from elife-kitchen-sink.xml (read_fixture("", "article_dates.xml"), "received", date_struct(2012, 6, 22)), (read_fixture("", "article_dates.xml"), None, None), (read_fixture("", "article_dates.xml"), "not_a_date_type", None), ) def test_history_date(self, xml_content, date_type, expected): soup = parser.parse_xml(xml_content) self.assertEqual(expected, parser.history_date(soup, date_type)) """ Functions that require more than one argument to test against json output """ @unpack @data( # typical eLife format ( read_fixture("test_journal_issn", "content_01.xml"), "electronic", None, "2050-084X", ), # eLife format with specifying the publication format (read_fixture("test_journal_issn", "content_02.xml"), None, None, "2050-084X"), # a non-eLife format ( read_fixture("test_journal_issn", "content_03.xml"), None, "epub", "2057-4991", ), ) def test_journal_issn(self, xml_content, pub_format, pub_type, expected): soup = parser.parse_xml(xml_content) tag_content = parser.journal_issn( soup_body(soup), pub_format=pub_format, pub_type=pub_type ) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", None, ), ( read_fixture("test_author_contributions", "content_01.xml"), read_fixture("test_author_contributions", "content_01_expected.py"), ), ) def test_author_contributions(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.author_contributions(soup, "con") self.assertEqual(expected, tag_content) @unpack @data( ( # snippet from elife-kitchen-sink.xml read_fixture("test_competing_interests", "content_01.xml"), read_fixture("test_competing_interests", "content_01_expected.py"), ), ( # snippet from elife00190.xml read_fixture("test_competing_interests", "content_02.xml"), read_fixture("test_competing_interests", "content_02_expected.py"), ), ( # snippet from elife-00666.xml read_fixture("test_competing_interests", "content_03.xml"), read_fixture("test_competing_interests", "content_03_expected.py"), ), ) def test_competing_interests(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.competing_interests(soup, ["conflict", "COI-statement"]) self.assertEqual(expected, tag_content) @unpack @data( # example with no author notes ( "<article/>", None, ), # example from elife-kitchen-sink.xml ( read_fixture("", "article_author_notes.xml"), read_fixture("test_full_author_notes", "content_01_expected.py"), ), ) def test_full_author_notes(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_author_notes(soup) self.assertEqual(expected, tag_content) """ Functions that only need soup to test them against json output """ @unpack @data( # example with no abstracts, such as a correction article ( "<article/>", [], ), # example from elife-kitchen-sink.xml ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture("test_abstracts", "content_01_expected.py"), ), # example from elife00013.xml ( read_sample_xml("elife00013.xml"), read_fixture("test_abstracts", "content_02_expected.py"), ), # example from elife_poa_e06828.xml ( read_sample_xml("elife_poa_e06828.xml"), read_fixture("test_abstracts", "content_03_expected.py"), ), ) def test_abstracts(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.abstracts(soup) self.assertEqual(expected, tag_content) @unpack @data( # example based on eLife format ( read_fixture("test_abstract", "content_01.xml"), read_fixture("test_abstract", "content_01_expected.py"), ), # example based on BMJ Open bmjopen-4-e003269.xml ( read_fixture("test_abstract", "content_02.xml"), read_fixture("test_abstract", "content_02_expected.py"), ), # example with no abstract, such as a correction article ( "<article/>", None, ), ) def test_abstract_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.abstract(soup) self.assertEqual(expected, tag_content) @unpack @data( ( # very basic abstract read_fixture("test_abstract_xml", "content_01.xml"), read_fixture("test_abstract_xml", "content_01_expected.py"), ), ( # abstract tag with id attribute and mathml tags read_fixture("test_abstract_xml", "content_02.xml"), read_fixture("test_abstract_xml", "content_02_expected.py"), ), ( # structured abstract example read_fixture("test_abstract_xml", "content_03.xml"), read_fixture("test_abstract_xml", "content_03_expected.py"), ), ( # no abstract tag read_fixture("test_abstract_xml", "content_04.xml"), read_fixture("test_abstract_xml", "content_04_expected.py"), ), ) def test_abstract_xml(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.abstract_xml(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), "July 18, 2012", ), ) @data("elife-kitchen-sink.xml") def test_accepted_date_date(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.accepted_date_date(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 18, ), ) @data("elife-kitchen-sink.xml") def test_accepted_date_day(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.accepted_date_day(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 7, ), ) @data("elife-kitchen-sink.xml") def test_accepted_date_month(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.accepted_date_month(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 1342569600, ), ) @data("elife-kitchen-sink.xml") def test_accepted_date_timestamp(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.accepted_date_timestamp(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 2012, ), ) @data("elife-kitchen-sink.xml") def test_accepted_date_year(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.accepted_date_year(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no data ( "<article/>", None, ), # example from elife-kitchen-sink.xml ( read_sample_xml("elife-kitchen-sink.xml"), """Acknowledgements We thank Michael Fischbach, Richard Losick, and Russell Vance for critical reading of the manuscript. NK is a Fellow in the Integrated Microbial Biodiversity Program of the Canadian Institute for Advanced Research.""", ), ) def test_ack(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.ack(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no data ( "<article/>", None, ), # example from elife-kitchen-sink.xml ( read_sample_xml("elife-kitchen-sink.xml"), """Acknowledgements We thank Michael Fischbach, Richard Losick, and Russell Vance for critical reading of the manuscript. NK is a Fellow in the Integrated Microbial Biodiversity Program of the Canadian Institute for Advanced Research.""", ), ) def test_acknowledgements(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.acknowledgements(soup) self.assertEqual(expected, tag_content) @unpack @data( ( ( '<article xmlns:mml="http://www.w3.org/1998/Math/MathML" ' 'xmlns:xlink="http://www.w3.org/1999/xlink" ' 'article-type="research-article" dtd-version="1.1d3">' ), "research-article", ), ) def test_article_type(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.article_type(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no author notes ( "<article/>", None, ), # example from elife-kitchen-sink.xml ( read_fixture("", "article_author_notes.xml"), [ "\n†\nThese authors contributed equally to this work\n", "\n‡\nThese authors also contributed equally to this work\n", "\n**\nDeceased\n", ], ), ) def test_author_notes(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.author_notes(soup) self.assertEqual(expected, tag_content) @unpack @data( ( sample_xml("elife-kitchen-sink.xml"), read_fixture("test_authors", "content_01_expected.py"), ), ( sample_xml("elife00013.xml"), read_fixture("test_authors", "content_02_expected.py"), ), ( sample_xml("elife_poa_e06828.xml"), read_fixture("test_authors", "content_03_expected.py"), ), ( sample_xml("elife02935.xml"), read_fixture("test_authors", "content_04_expected.py"), ), ( sample_xml("elife00270.xml"), read_fixture("test_authors", "content_05_expected.py"), ), ( sample_xml("elife00351.xml"), read_fixture("test_authors", "content_06_expected.py"), ), ( sample_xml("elife-00666.xml"), read_fixture("test_authors", "content_07_expected.py"), ), ) def test_authors(self, filename, expected): soup = parser.parse_document(filename) tag_content = parser.authors(soup) self.assertEqual(expected, tag_content) @unpack @data( ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture("test_authors_non_byline", "content_01_expected.py"), ), ( read_sample_xml("elife-00666.xml"), read_fixture("test_authors_non_byline", "content_02_expected.py"), ), ) def test_authors_non_byline(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.authors_non_byline(soup) self.assertEqual(expected, tag_content) @unpack @data( # 07383 v1 has a institution in the principal award recipient ( read_fixture("test_award_groups", "content_01.xml"), read_fixture("test_award_groups", "content_01_expected.py"), ), # example from elife-kitchen-sink.xml ( read_fixture("test_award_groups", "content_02.xml"), read_fixture("test_award_groups", "content_02_expected.py"), ), # example from elife-09215-v1.xml ( read_fixture("test_award_groups", "content_03.xml"), read_fixture("test_award_groups", "content_03_expected.py"), ), # example from elife00013.xml ( read_fixture("test_award_groups", "content_04.xml"), read_fixture("test_award_groups", "content_04_expected.py"), ), # example from elife-00666.xml ( read_fixture("test_award_groups", "content_05.xml"), read_fixture("test_award_groups", "content_05_expected.py"), ), ) def test_award_groups(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.award_groups(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", [], ), ( read_fixture("", "article_meta.xml"), ["Cell biology", "Computational and systems biology"], ), ) def test_category(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.category(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 2014, ), # poa XML has no collection date ( "<article/>", None, ), ) def test_collection_year(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.collection_year(soup) self.assertEqual(expected, tag_content) @unpack @data( ("<root></root>", None), ( """ <root> <pub-date pub-type="collection"> <year>2016</year> </pub-date> </root>""", 2016, ), ( """ <root> <pub-date date-type="collection"> <year>2016</year> </pub-date> </root>""", 2016, ), ) def test_collection_year_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.collection_year(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", [], ), ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture("test_component_doi", "content_01_expected.py"), ), ) def test_component_doi(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.component_doi(soup) self.assertEqual(expected, tag_content) @unpack @data( ( sample_xml("elife-kitchen-sink.xml"), read_fixture("test_components", "content_01_expected.py"), ), ( sample_xml("elife02304.xml"), read_fixture("test_components", "content_02_expected.py"), ), ( sample_xml("elife05502.xml"), read_fixture("test_components", "content_03_expected.py"), ), ( sample_xml("elife04490.xml"), read_fixture("test_components", "content_04_expected.py"), ), ( sample_xml("elife-14093-v1.xml"), read_fixture("test_components", "content_05_expected.py"), ), ( sample_xml("elife-00666.xml"), read_fixture("test_components", "content_06_expected.py"), ), ) def test_components(self, filename, expected): soup = parser.parse_document(filename) tag_content = parser.components(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", None, ), # example from elife-kitchen-sink.xml ( read_fixture("test_conflict", "content_01.xml"), read_fixture("test_conflict", "content_01_expected.py"), ), ) def test_conflict(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.conflict(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture("test_contributors", "content_01_expected.py"), ), # example from elife-02833-v2.xml ( read_sample_xml("elife-02833-v2.xml"), read_fixture("test_contributors", "content_02_expected.py"), ), # example from elife-00666.xml ( read_sample_xml("elife-00666.xml"), read_fixture("test_contributors", "content_03_expected.py"), ), ) def test_contributors(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.contributors(soup) self.assertEqual(expected, tag_content) @data( # edge case, no permissions tag ("<root><article></article></root>", None), # example from elife-kitchen-sink.xml ( read_fixture("", "article_permissions.xml"), "Alegado et al", ), ) @unpack def test_copyright_holder(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.copyright_holder(soup) self.assertEqual(expected, tag_content) @data( # edge case, no permissions tag ("<root><article></article></root>", None), # example from elife-kitchen-sink.xml ( read_fixture("", "article_permissions.xml"), "Alegado et al.", ), # example from elife00240.xml ( read_fixture("test_copyright_holder_json", "content_01.xml"), "Pickett", ), # example from elife09853.xml ( read_fixture("test_copyright_holder_json", "content_02.xml"), "Becker and Gitler", ), # example from elife02935.xml which is CC0 license ( read_fixture("test_copyright_holder_json", "content_03.xml"), None, ), ) @unpack def test_copyright_holder_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.copyright_holder_json(soup) self.assertEqual(expected, tag_content) @data( # edge case, no permissions tag ("<root><article></article></root>", None), # example from elife-kitchen-sink.xml ( read_fixture("", "article_permissions.xml"), "© 2012, Alegado et al", ), ) @unpack def test_copyright_statement(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.copyright_statement(soup) self.assertEqual(expected, tag_content) @data( # edge case, no permissions tag ("<root><article></article></root>", None), # example from elife-kitchen-sink.xml ( read_fixture("", "article_permissions.xml"), 2012, ), ) @unpack def test_copyright_year_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.copyright_year(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no author notes ( "<article/>", [], ), # example from elife-kitchen-sink.xml ( read_fixture("", "article_author_notes.xml"), [ "*For correspondence: [email protected](JC);", "[email protected](NK);", "[email protected](MH)", ], ), ) def test_correspondence(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.correspondence(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no abstracts, such as a correction article ( "<article/>", None, ), # example from elife-kitchen-sink.xml ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture("test_digest", "content_01_expected.py"), ), # example from elife_poa_e06828.xml (read_sample_xml("elife_poa_e06828.xml"), None), ) def test_digest(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.digest(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", [], ), ( read_fixture("", "article_meta.xml"), ["Research article"], ), ) def test_display_channel(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.display_channel(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml (read_fixture("", "article_meta.xml"), "10.7554/eLife.00013"), ) @data("elife-kitchen-sink.xml") def test_doi(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.doi(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml (read_fixture("", "article_meta.xml"), "e00013"), ) @data("elife-kitchen-sink.xml") def test_elocation_id(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.elocation_id(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no abstracts, such as a correction article ( "<article/>", None, ), # example from elife-kitchen-sink.xml ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture("test_full_abstract", "content_01_expected.py"), ), # example from elife00013.xml ( read_sample_xml("elife00013.xml"), read_fixture("test_full_abstract", "content_02_expected.py"), ), # example from elife_poa_e06828.xml ( read_sample_xml("elife_poa_e06828.xml"), read_fixture("test_full_abstract", "content_03_expected.py"), ), ) def test_full_abstract(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_abstract(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no affs ( "<article/>", [], ), # example from elife-kitchen-sink.xml ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture("test_full_affiliation", "content_01_expected.py"), ), ) def test_full_affiliation(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_affiliation(soup) self.assertEqual(expected, tag_content) @data( # elife-kitchen-sink.xml example ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture( "test_full_award_group_funding_source", "content_01_expected.py" ), ), ) @unpack def test_full_award_group_funding_source(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_award_group_funding_source(soup) self.assertEqual(expected, tag_content) @data( # edge case, no id attribute on award-group tag, and id will be generated, based on 10.1098/rsob.150230 ( read_fixture("test_full_award_groups", "content_01.xml"), read_fixture("test_full_award_groups", "content_01_expected.py"), ), # elife-kitchen-sink.xml example ( read_fixture("test_full_award_groups", "content_02.xml"), read_fixture("test_full_award_groups", "content_02_expected.py"), ), ) @unpack def test_full_award_groups(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_award_groups(soup_body(soup)) self.assertEqual(expected, tag_content) @data( # edge case, no id attribute on corresp tag, will be empty but not cause an error, based on 10.2196/resprot.3838 ( "<article><author-notes><corresp>Corresponding Author: Elisa J Gordon<email>[email protected]</email></corresp></author-notes></article>", {}, ), # example with no author notes ( "<article/>", {}, ), # example from elife-kitchen-sink.xml ( read_fixture("", "article_author_notes.xml"), { "cor1": ["[email protected]"], "cor2": ["[email protected]"], "cor3": ["[email protected]"], }, ), # example elife-02833-v2.xml ( read_sample_xml("elife-02833-v2.xml"), { "cor1": ["[email protected]"], "cor2": ["(+1) 617-432-1906"], }, ), ) @unpack def test_full_correspondence(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_correspondence(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no abstracts, such as a correction article ( "<article/>", None, ), # example from elife-kitchen-sink.xml ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture("test_full_digest", "content_01_expected.py"), ), # example from elife_poa_e06828.xml (read_sample_xml("elife_poa_e06828.xml"), None), ) def test_full_digest(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_digest(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("test_full_funding_statement", "content_01.xml"), read_fixture("test_full_funding_statement", "content_01_expected.py"), ), ) def test_full_funding_statement(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_funding_statement(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", {}, ), ( read_fixture("", "article_meta.xml"), { "author-keywords": [ "<italic>Salpingoeca rosetta</italic>", "Algoriphagus", "bacterial sulfonolipid", "multicellular development", ], "research-organism": ["Mouse", "<italic>C. elegans</italic>", "Other"], }, ), ( read_sample_xml("elife_poa_e06828.xml"), { "author-keywords": [ "neurotrophins", "RET signaling", "DRG neuron development", "cis and trans activation", ], "research-organism": ["Mouse"], }, ), ) def test_full_keyword_groups(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_keyword_groups(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", [], ), ( read_fixture("", "article_meta.xml"), [ "<italic>Salpingoeca rosetta</italic>", "Algoriphagus", "bacterial sulfonolipid", "multicellular development", ], ), ( read_sample_xml("elife_poa_e06828.xml"), [ "neurotrophins", "RET signaling", "DRG neuron development", "cis and trans activation", ], ), ) def test_full_keywords(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_keywords(soup) self.assertEqual(expected, tag_content) @data( # edge case, no permissions tag ("<root><article></article></root>", None), # example from elife-kitchen-sink.xml ( read_fixture("", "article_permissions.xml"), ( "This article is distributed under the terms of the " '<ext-link ext-link-type="uri" ' 'xlink:href="http://creativecommons.org/licenses/by/4.0/">' "Creative Commons Attribution License</ext-link>, which permits " "unrestricted use and redistribution provided that the original " "author and source are credited." ), ), ) @unpack def test_full_license(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_license(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", [], ), ( read_fixture("", "article_meta.xml"), ["Mouse", "<italic>C. elegans</italic>", "Other"], ), ) def test_full_research_organism(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_research_organism(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", {}, ), ( read_fixture("", "article_meta.xml"), { "display-channel": ["Research article"], "heading": ["Cell biology", "Computational and systems biology"], }, ), ) def test_full_subject_area(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_subject_area(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), ( "Bacterial regulation of colony development in the closest " "living relatives of animals" ), ), ( read_sample_xml("elife_poa_e06828.xml"), ( "<italic>Cis</italic> and <italic>trans</italic> RET signaling control the " "survival and central projection growth of rapidly adapting mechanoreceptors" ), ), ) @data("elife-kitchen-sink.xml") def test_full_title(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.full_title(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("test_funding_statement", "content_01.xml"), read_fixture("test_funding_statement", "content_01_expected.py"), ), ) def test_funding_statement(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.funding_statement(soup) self.assertEqual(expected, tag_content) @unpack @data( ( sample_xml("elife-kitchen-sink.xml"), read_fixture("test_graphics", "content_01_expected.py"), ), ( sample_xml("elife00013.xml"), read_fixture("test_graphics", "content_02_expected.py"), ), ( sample_xml("elife00240.xml"), read_fixture("test_graphics", "content_03_expected.py"), ), ( sample_xml("elife04953.xml"), read_fixture("test_graphics", "content_04_expected.py"), ), ( sample_xml("elife00133.xml"), read_fixture("test_graphics", "content_05_expected.py"), ), ) def test_graphics(self, filename, expected): soup = parser.parse_document(filename) tag_content = parser.graphics(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", "", ), ( read_sample_xml("elife-kitchen-sink.xml"), "The chemical nature of RIF-1 may reveal a new class of bacterial signaling molecules.", ), ( read_sample_xml("elife_poa_e06828.xml"), "", ), ) def test_impact_statement(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.impact_statement(soup) self.assertEqual(expected, tag_content) @unpack @data( ( sample_xml("elife-kitchen-sink.xml"), read_fixture("test_inline_graphics", "content_01_expected.py"), ), ( sample_xml("elife00240.xml"), read_fixture("test_inline_graphics", "content_02_expected.py"), ), ) def test_inline_graphics(self, filename, expected): soup = parser.parse_document(filename) tag_content = parser.inline_graphics(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", True, ), ( read_fixture("", "article_meta.xml"), False, ), ( read_sample_xml("elife_poa_e06828.xml"), True, ), ) def test_is_poa(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.is_poa(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), "eLife", ), ) def test_journal_id(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.journal_id(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), "eLife", ), ) def test_journal_title(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.journal_title(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", [], ), ( read_fixture("", "article_meta.xml"), [ "Salpingoeca rosetta", "Algoriphagus", "bacterial sulfonolipid", "multicellular development", ], ), ( read_sample_xml("elife_poa_e06828.xml"), [ "neurotrophins", "RET signaling", "DRG neuron development", "cis and trans activation", ], ), ) def test_keywords(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.keywords(soup) self.assertEqual(expected, tag_content) @data( # edge case, no permissions tag ("<root><article></article></root>", None), # example from elife-kitchen-sink.xml ( read_fixture("", "article_permissions.xml"), ( "This article is distributed under the terms of the " "Creative Commons Attribution License, which permits " "unrestricted use and redistribution provided that " "the original author and source are credited." ), ), ) @unpack def test_license(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.license(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( # example license from 00666 ( read_fixture("test_license_json", "content_01.xml"), read_fixture("test_license_json", "content_01_expected.py"), ), # edge case, no permissions tag ("<root><article></article></root>", None), ) def test_license_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.license_json(soup) self.assertEqual(expected, tag_content) @data( # edge case, no permissions tag ("<root><article></article></root>", None), # example from elife-kitchen-sink.xml ( read_fixture("", "article_permissions.xml"), ("http://creativecommons.org/licenses/by/4.0/"), ), ) @unpack def test_license_url(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.license_url(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ( sample_xml("elife-kitchen-sink.xml"), read_fixture("test_media", "content_01_expected.py"), ), ( sample_xml("elife02304.xml"), read_fixture("test_media", "content_02_expected.py"), ), ( sample_xml("elife00007.xml"), read_fixture("test_media", "content_03_expected.py"), ), ( sample_xml("elife04953.xml"), read_fixture("test_media", "content_04_expected.py"), ), ( sample_xml("elife00005.xml"), read_fixture("test_media", "content_05_expected.py"), ), ( sample_xml("elife05031.xml"), read_fixture("test_media", "content_06_expected.py"), ), ( sample_xml("elife04493.xml"), read_fixture("test_media", "content_07_expected.py"), ), ( sample_xml("elife06726.xml"), read_fixture("test_media", "content_08_expected.py"), ), ) def test_media(self, filename, expected): soup = parser.parse_document(filename) tag_content = parser.media(soup) self.assertEqual(expected, tag_content) @unpack @data( # pub-date values from 00666 kitchen sink ( read_fixture("test_pub_dates", "content_01.xml"), read_fixture("test_pub_dates", "content_01_expected.py"), ), # example from cstp77 ( read_fixture("test_pub_dates", "content_02.xml"), read_fixture("test_pub_dates", "content_02_expected.py"), ), # example from bmjopen-2013-003269 ( read_fixture("test_pub_dates", "content_03.xml"), read_fixture("test_pub_dates", "content_03_expected.py"), ), ) def test_pub_dates_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_dates(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 1393545600, ), # poa XML before pub-date is added ( "<article/>", None, ), ) def test_pub_date_timestamp(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_timestamp(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), "February 28, 2014", ), # poa XML before pub-date is added ( "<article/>", None, ), ) def test_pub_date_date(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_date(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 28, ), # poa XML before pub-date is added ( "<article/>", None, ), ) def test_pub_date_day(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_day(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 2, ), # poa XML before pub-date is added ( "<article/>", None, ), ) def test_pub_date_month(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_month(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 2014, ), # poa XML before pub-date is added ( "<article/>", None, ), ) def test_pub_date_year(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_date_year(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), "eLife Sciences Publications, Ltd", ), ) def test_publisher(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.publisher(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), "00013", ), ) def test_publisher_id(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.publisher_id(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), "June 22, 2012", ), ) @data("elife-kitchen-sink.xml") def test_received_date_date(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_date(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 22, ), ) @data("elife-kitchen-sink.xml") def test_received_date_day(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_day(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 6, ), ) @data("elife-kitchen-sink.xml") def test_received_date_month(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_month(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 1340323200, ), ) @data("elife-kitchen-sink.xml") def test_received_date_timestamp(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_timestamp(soup) self.assertEqual(expected, tag_content) @unpack @data( # snippet of XML from elife-kitchen-sink.xml ( read_fixture("", "article_dates.xml"), 2012, ), ) @data("elife-kitchen-sink.xml") def test_received_date_year(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.received_date_year(soup) self.assertEqual(expected, tag_content) def test_references(self): # Alias of refs soup = parser.parse_xml("<article/>") self.assertEqual(parser.references(soup), []) @unpack @data( # non-elife example with issue tag from cstp77 ( read_fixture("test_refs", "content_01.xml"), read_fixture("test_refs", "content_01_expected.py"), ), # mixed-citation example 1 from bmjopen ( read_fixture("test_refs", "content_02.xml"), read_fixture("test_refs", "content_02_expected.py"), ), # mixed-citation example 2 from bmjopen ( read_fixture("test_refs", "content_03.xml"), read_fixture("test_refs", "content_03_expected.py"), ), # mixed-citation example 3 from bmjopen ( read_fixture("test_refs", "content_04.xml"), read_fixture("test_refs", "content_04_expected.py"), ), # citation example from redalyc - udea ( read_fixture("test_refs", "content_05.xml"), read_fixture("test_refs", "content_05_expected.py"), ), # example of data citation with a pub-id accession, based on article 07836 ( read_fixture("test_refs", "content_06.xml"), read_fixture("test_refs", "content_06_expected.py"), ), # example of data citation with a object-id tag accession, based on article 07048 ( read_fixture("test_refs", "content_07.xml"), read_fixture("test_refs", "content_07_expected.py"), ), # example of mixed-citation with string-name, based on non-elife article ( read_fixture("test_refs", "content_08.xml"), read_fixture("test_refs", "content_08_expected.py"), ), # example of data citation with a pub-id pub-id-type="archive", parse it as an accession number, based on 00666 kitchen sink example ( read_fixture("test_refs", "content_09.xml"), read_fixture("test_refs", "content_09_expected.py"), ), # example of citation with a pub-id pub-id-type="pmid", from elife-kitchen-sink.xml ( read_fixture("test_refs", "content_10.xml"), read_fixture("test_refs", "content_10_expected.py"), ), # example of person-group with a collab, from elife-kitchen-sink.xml ( read_fixture("test_refs", "content_11.xml"), read_fixture("test_refs", "content_11_expected.py"), ), ) def test_refs_edge_cases(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.refs(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), [ { "ext_link_type": "doi", "related_article_type": "commentary", "xlink_href": None, } ], ), ) def test_related_article(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.related_article(soup) self.assertEqual(expected, tag_content) @unpack @data( ( read_fixture("test_sub_articles", "content_01.xml"), read_fixture("test_sub_articles", "content_01_expected.py"), ), # editor evaluation sub-article parsing ( read_fixture("test_sub_articles", "content_02.xml"), read_fixture("test_sub_articles", "content_02_expected.py"), ), ) def test_sub_articles(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.sub_articles(soup) self.assertEqual(expected, tag_content) @unpack @data( ( read_sample_xml("elife-kitchen-sink.xml"), {"dataro1": {}, "dataro2": {}, "dataro3": {}}, ), ( read_sample_xml("elife_poa_e06828.xml"), {}, ), ) def test_related_object_ids(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.related_object_ids(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", [], ), ( read_fixture("", "article_meta.xml"), ["Mouse", "C. elegans", "Other"], ), ) def test_research_organism(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.research_organism(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), [{"content-type": "pdf", "type": "self-uri", "position": 1, "ordinal": 1}], ), ( read_sample_xml("elife_poa_e06828.xml"), [], ), ) def test_self_uri(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.self_uri(soup) self.assertEqual(expected, tag_content) @unpack @data( ( "<article/>", [], ), ( read_fixture("", "article_meta.xml"), ["Research article", "Cell biology", "Computational and systems biology"], ), ) def test_subject_area(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.subject_area(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no data ( "<article/>", [], ), ( read_sample_xml("elife-kitchen-sink.xml"), read_fixture("test_supplementary_material", "content_01_expected.py"), ), ( read_sample_xml("elife02304.xml"), read_fixture("test_supplementary_material", "content_02_expected.py"), ), ) def test_supplementary_material(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.supplementary_material(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), ( "Bacterial regulation of colony development in the closest " "living relatives of animals" ), ), ) @data("elife-kitchen-sink.xml") def test_title(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title(soup) self.assertEqual(expected, tag_content) @unpack @data( # example with no title prefix ( read_fixture("test_title_prefix", "content_01.xml"), read_fixture("test_title_prefix", "content_01_expected.py"), ), # example from elife00240.xml ( read_fixture("test_title_prefix", "content_02.xml"), read_fixture("test_title_prefix", "content_02_expected.py"), ), # example from elife00270.xml ( read_fixture("test_title_prefix", "content_03.xml"), read_fixture("test_title_prefix", "content_03_expected.py"), ), # example from elife00351.xml ( read_fixture("test_title_prefix", "content_04.xml"), read_fixture("test_title_prefix", "content_04_expected.py"), ), ) def test_title_prefix(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title_prefix(soup) self.assertEqual(expected, tag_content) @unpack @data( ('<root xmlns:xlink="http://www.w3.org/1999/xlink"></root>', None), (read_fixture("test_title_prefix_json", "content_01.xml"), u"Breast Cancer"), ( read_fixture("test_title_prefix_json", "content_02.xml"), u"The Natural History of Model Organisms", ), ( read_fixture("test_title_prefix_json", "content_03.xml"), u"p53 Family Proteins", ), (read_fixture("test_title_prefix_json", "content_04.xml"), u"TOR Signaling"), # example from elife-27438-v1.xml has no sub-display-channel and title_prefix is None (read_fixture("test_title_prefix_json", "content_05.xml"), None), # example from elife-27438-v2.xml which does have a title_prefix and it rewritten (read_fixture("test_title_prefix_json", "content_06.xml"), "Point of View"), ) def test_title_prefix_json(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title_prefix_json(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml (read_fixture("", "article_meta.xml"), "Bacterial regulation"), ) @data("elife-kitchen-sink.xml") def test_title_short(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title_short(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), "bacterial-regulation-of-colony-development-in-the-closest-living-relatives-of-animals", ), ) @data("elife-kitchen-sink.xml") def test_title_slug(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.title_slug(soup) self.assertEqual(expected, tag_content) @unpack @data( # example from elife-kitchen-sink.xml ( read_fixture("", "article_meta.xml"), "3", ), ( read_sample_xml("elife_poa_e06828.xml"), None, ), ) def test_volume(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.volume(soup) self.assertEqual(expected, tag_content) @unpack @data( # example issue from a non-eLife article (read_fixture("test_issue", "content_01.xml"), "1"), # example of no article issue (read_fixture("test_issue", "content_02.xml"), None), ) def test_issue(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.issue(soup) self.assertEqual(expected, tag_content) @unpack @data( # example fpage from a non-eLife article (read_fixture("test_fpage", "content_01.xml"), "1"), # example of no article fpage (read_fixture("test_fpage", "content_02.xml"), None), ) def test_fpage(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.fpage(soup) self.assertEqual(expected, tag_content) @unpack @data( # example lpage from a non-eLife article (read_fixture("test_lpage", "content_01.xml"), "2"), # example of no article lpage (read_fixture("test_lpage", "content_02.xml"), None), ) def test_lpage(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.lpage(soup) self.assertEqual(expected, tag_content) def test_parse_mixed_citations(self): data = parser.mixed_citations(self.soup("elife-kitchen-sink.xml")) expected = read_fixture("test_parse_mixed_citations", "content_01_expected.py") self.assertEqual(expected, data) @unpack @data( # example with no history ( read_fixture("test_version_history", "content_01.xml"), read_fixture("test_version_history", "content_01_expected.py"), ), # example based on 00666 kitchen sink ( read_fixture("test_version_history", "content_02.xml"), read_fixture("test_version_history", "content_02_expected.py"), ), ) def test_version_history(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.version_history(soup) self.assertEqual(expected, tag_content) @unpack @data( ( read_fixture("test_clinical_trials", "content_01.xml"), read_fixture("test_clinical_trials", "content_01_expected.py"), ), # eLife example ( read_fixture("test_clinical_trials", "content_02.xml"), read_fixture("test_clinical_trials", "content_02_expected.py"), ), # example with all tag attributes and a related-object tag to ignore ( read_fixture("test_clinical_trials", "content_03.xml"), read_fixture("test_clinical_trials", "content_03_expected.py"), ), ) def test_clinical_trials(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.clinical_trials(soup_body(soup)) self.assertEqual(expected, tag_content) @unpack @data( ("", []), ( read_fixture("test_pub_history", "content_01.xml"), read_fixture("test_pub_history", "content_01_expected.py"), ), ( read_fixture("test_pub_history", "content_02.xml"), read_fixture("test_pub_history", "content_02_expected.py"), ), ( read_fixture("test_pub_history", "content_03.xml"), read_fixture("test_pub_history", "content_03_expected.py"), ), ) def test_pub_history(self, xml_content, expected): soup = parser.parse_xml(xml_content) tag_content = parser.pub_history(soup) self.assertEqual(expected, tag_content) if __name__ == "__main__": unittest.main()<|fim▁end|>
def test_footnotes_json(self, xml_content, expected): soup = parser.parse_xml(xml_content)
<|file_name|>options.js<|end_file_name|><|fim▁begin|>// Saves options to chrome.storage function save_options () { var saveDict = [] var i = 1 $('input').map(function () { var dict = { id: 'scbcc' + i, value: this.value } i++ console.log('save: ', dict) ga('send', 'event', 'setting', 'save', this.value) saveDict.push(dict) }).get() chrome.storage.sync.set({ scbccRegexDict: saveDict }) } // Restores select box and checkbox state using the preferences // stored in chrome.storage. function restore_options () { chrome.storage.sync.get({ scbccRegexDict: [] }, function (items) { $('#field1').attr('value', items.scbccRegexDict[0].value) for (var i = 0; i < items.scbccRegexDict.length; i++) { var value = items.scbccRegexDict[i].value var next = i var addto = '#remove' + next var addRemove = '#field' + (next + 1) next = next + 1 var newIn = '<input autocomplete="off" placeholder="e.g. /this is test/g" id="field' + next + '" name="field' + next + '" type="text" tabindex="1" value=' + value + '>' var newInput = $(newIn) var removeBtn = '<button id="remove' + (next) + '" class="btn btn-danger remove-me" >-</button>' var removeButton = $(removeBtn)<|fim▁hole|> } $('#count').val(next) $('.remove-me').click(function (e) { e.preventDefault() ga('send', 'event', 'setting', 'remove_regex') var fieldNum = this.id.charAt(this.id.length - 1) var fieldID = '#field' + fieldNum $(this).remove() $(fieldID).remove() $('#style').attr('href', 'extra/styles.css') }) } var next = items.scbccRegexDict.length || 1 $('.add-more').click(function (e) { ga('send', 'event', 'setting', 'add_regex') e.preventDefault() var addto = '#remove' + next var addRemove = '#field' + (next + 1) next = next + 1 var newIn = '<input autocomplete="off" placeholder="e.g. /this is test/g" id="field' + next + '" name="field' + next + '" type="text" tabindex="1">' var newInput = $(newIn) var removeBtn = '<button id="remove' + (next) + '" class="btn btn-danger remove-me" >-</button>' var removeButton = $(removeBtn) $(addto).after(newInput) $(addRemove).after(removeButton) $('#count').val(next) $('.remove-me').click(function (e) { e.preventDefault() ga('send', 'event', 'setting', 'remove_regex') var fieldNum = this.id.charAt(this.id.length - 1) var fieldID = '#field' + fieldNum $(this).remove() $(fieldID).remove() $('#style').attr('href', 'extra/styles.css') }) }) }) } document.addEventListener('DOMContentLoaded', restore_options) document.getElementById('save').addEventListener('click', save_options)<|fim▁end|>
$(addto).after(newInput) if (i !== 0) { $(addRemove).after(removeButton)
<|file_name|>Execute.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Copyright (c) 2001 - 2016 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "test/Execute.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog" """ Test the Execute() function for executing actions directly. """ import TestSCons <|fim▁hole|> test.write('my_copy.py', """\ import sys open(sys.argv[2], 'wb').write(open(sys.argv[1], 'rb').read()) try: exitval = int(sys.argv[3]) except IndexError: exitval = 0 sys.exit(exitval) """) test.write('SConstruct', """\ Execute(r'%(_python_)s my_copy.py a.in a.out') Execute(Action(r'%(_python_)s my_copy.py b.in b.out')) env = Environment(COPY = 'my_copy.py') env.Execute(r'%(_python_)s my_copy.py c.in c.out') env.Execute(Action(r'%(_python_)s my_copy.py d.in d.out')) v = env.Execute(r'%(_python_)s $COPY e.in e.out') assert v == 0, v v = env.Execute(Action(r'%(_python_)s $COPY f.in f.out')) assert v == 0, v v = env.Execute(r'%(_python_)s $COPY g.in g.out 1') assert v == 1, v v = env.Execute(Action(r'%(_python_)s $COPY h.in h.out 2')) assert v == 2, v import shutil Execute(lambda target, source, env: shutil.copy('i.in', 'i.out')) Execute(Action(lambda target, source, env: shutil.copy('j.in', 'j.out'))) env.Execute(lambda target, source, env: shutil.copy('k.in', 'k.out')) env.Execute(Action(lambda target, source, env: shutil.copy('l.in', 'l.out'))) Execute(Copy('m.out', 'm.in')) Execute(Copy('nonexistent.out', 'nonexistent.in')) """ % locals()) test.write('a.in', "a.in\n") test.write('b.in', "b.in\n") test.write('c.in', "c.in\n") test.write('d.in', "d.in\n") test.write('e.in', "e.in\n") test.write('f.in', "f.in\n") test.write('g.in', "g.in\n") test.write('h.in', "h.in\n") test.write('i.in', "i.in\n") test.write('j.in', "j.in\n") test.write('k.in', "k.in\n") test.write('l.in', "l.in\n") test.write('m.in', "m.in\n") import sys if sys.platform == 'win32': expect = r"""scons: \*\*\* Error 1 scons: \*\*\* Error 2 scons: \*\*\* nonexistent.in/\*\.\*: (The system cannot find the path specified|Das System kann den angegebenen Pfad nicht finden)""" else: expect = r"""scons: \*\*\* Error 1 scons: \*\*\* Error 2 scons: \*\*\* nonexistent\.in: No such file or directory""" test.run(arguments = '.', stdout = None, stderr = None) test.must_contain_all_lines(test.stderr(), expect.splitlines(), find=TestSCons.search_re) test.must_match('a.out', "a.in\n") test.must_match('b.out', "b.in\n") test.must_match('c.out', "c.in\n") test.must_match('d.out', "d.in\n") test.must_match('e.out', "e.in\n") test.must_match('f.out', "f.in\n") test.must_match('g.out', "g.in\n") test.must_match('h.out', "h.in\n") test.must_match('i.out', "i.in\n") test.must_match('j.out', "j.in\n") test.must_match('k.out', "k.in\n") test.must_match('l.out', "l.in\n") test.must_match('m.out', "m.in\n") test.pass_test() # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|>
_python_ = TestSCons._python_ test = TestSCons.TestSCons()
<|file_name|>param_dict.py<|end_file_name|><|fim▁begin|>#!/user/bin/env python """ @package mi.dataset.param_dict @file mi/dataset/param_dict.py @author Emily Hahn @brief Extend the protocol param dict to handle dataset encoding exceptions """ import re from mi.core.instrument.protocol_param_dict import ProtocolParameterDict, ParameterDescription from mi.core.instrument.protocol_param_dict import ParameterValue, ParameterDictVisibility from mi.core.log import get_logger ; log = get_logger() class DatasetParameterValue(ParameterValue): def clear_value(self): """ Ensure value is cleared to None """ self.value = None class Parameter(object): """ A parameter dictionary item. """ def __init__(self, name, f_format, value=None, expiration=None): """ Parameter value constructor. @param name The parameter name. @param f_format The function that formats the parameter value for a set command. @param value The parameter value (initializes to None). """ self.description = ParameterDescription(name, menu_path_read=None, submenu_read=None, menu_path_write=None, submenu_write=None, multi_match=False, visibility=ParameterDictVisibility.READ_WRITE, direct_access=False, startup_param=False, default_value=None, init_value=None, get_timeout=10, set_timeout=10, display_name=None, description=None, type=None, units=None, value_description=None) self.value = DatasetParameterValue(name, f_format, value=value, expiration=expiration) self.name = name def update(self, input):<|fim▁hole|> will be new new value. In subclasses, this must be updated to handle a real string of data appropriately. @param input A string that is the parameter value. @retval True if an update was successful, False otherwise. """ self.value.set_value(input) return True def get_value(self, timestamp=None): """ Get the value of the parameter that has been stored in the ParameterValue object. @param timestamp timestamp to use for expiration calculation @retval The actual data value if it is valid @raises InstrumentParameterExpirationException If the value has expired """ return self.value.get_value(timestamp) def clear_value(self): """ Clear the value in the parameter by setting it to None """ self.value.clear_value() class RegexParameter(Parameter): def __init__(self, name, pattern, f_getval, f_format, value=None, regex_flags=None, expiration=None): """ Parameter value constructor. @param name The parameter name. @param pattern The regex that matches the parameter in line output. @param f_getval The fuction that extracts the value from a regex match. @param f_format The function that formats the parameter value for a set command. @param value The parameter value (initializes to None). @param regex_flags Flags that should be passed to the regex in this parameter. Should comply with regex compile() interface (XORed flags). @throws TypeError if regex flags are bad @see ProtocolParameterDict.add() for details of parameters """ Parameter.__init__(self, name, f_format, value=value, expiration=expiration) self.pattern = pattern if regex_flags == None: self.regex = re.compile(pattern) else: self.regex = re.compile(pattern, regex_flags) self.f_getval = f_getval def update(self, input): """ Attempt to update a parameter value. If the input string matches the value regex, extract and update the dictionary value. @param input A string possibly containing the parameter value. @retval True if an update was successful, False otherwise. """ if not (isinstance(input, str)): match = self.regex.search(str(input)) else: match = self.regex.search(input) if match: self.value.set_value(self.f_getval(match)) return True else: return False class DatasetParameterDict(ProtocolParameterDict): """ Dataset parameter dictionary. Manages, matches and formats parameters. """ def __init__(self): """ Constructor. """ super(DatasetParameterDict, self).__init__() self._encoding_errors = [] def add(self, name, pattern, f_getval, f_format, value=None, regex_flags=None): """ Add a parameter object to the dictionary using a regex for extraction. @param name The parameter name. @param pattern The regex that matches the parameter in line output. @param f_getval The fuction that extracts the value from a regex match. @param f_format The function that formats the parameter value for a set command. @param regex_flags Flags that should be passed to the regex in this parameter. Should comply with regex compile() interface (XORed flags). """ val = RegexParameter(name, pattern, f_getval, f_format, value=value, regex_flags=regex_flags) self._param_dict[name] = val def update(self, in_data): """ Update the dictionaray with a line input. Iterate through all objects and attempt to match and update a parameter. Only updates the first match encountered. If we pass in a target params list then will will only iterate through those allowing us to limit upstate to only specific parameters. @param in_data A set of data to match to a dictionary object. @raise InstrumentParameterException on invalid target prams @raise KeyError on invalid parameter name """ params = self._param_dict.keys() for name in params: log.trace("update param dict name: %s", name) try: val = self._param_dict[name] val.update(in_data) except Exception as e: # set the value to None if we failed val.clear_value() log.error("Dataset parameter dict error encoding Name:%s, set to None", name) self._encoding_errors.append({name: None}) def get_encoding_errors(self): """ Return the encoding errors list """ return self._encoding_errors<|fim▁end|>
""" Attempt to udpate a parameter value. By default, this assumes the input
<|file_name|>controller.js<|end_file_name|><|fim▁begin|>/* Copyright 2013 Northern Arizona University This file is part of Sweet Jumps. Sweet Jumps is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Sweet Jumps is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Sweet Jumps. If not, see <http://www.gnu.org/licenses/>. */ 'use strict' var logger = require('log4js').getLogger('[ctrl] {%= name %}') /** * {%= name %} controller initialization function.<|fim▁hole|> * @param {object} options Configuration if it exists * @param {[type]} context The instance of the SweetJumps class. Preferably you would not use this unless necessary (to getModule for instance). */ module.exports = function (app, options, context) { logger.info('{%= name %} controller init') app.get('/', function (req, res) { res.render('{%= filename %}/index', { 'message': 'Controller: {%= name %}' }) }) }<|fim▁end|>
* @param {express} app The global express app. Preferably mount a subapp or router for all of this controller's routes.
<|file_name|>api_client.py<|end_file_name|><|fim▁begin|>import math from service.fake_api_results import ALL_TITLES, OFFICIAL_COPY_RESULT, SELECTED_FULL_RESULTS SEARCH_RESULTS_PER_PAGE = 20 def get_title(title_number): return SELECTED_FULL_RESULTS.get(title_number) <|fim▁hole|> number_pages = math.ceil(nof_results / SEARCH_RESULTS_PER_PAGE) start_index = page_number * SEARCH_RESULTS_PER_PAGE end_index = start_index + SEARCH_RESULTS_PER_PAGE return { 'number_pages': number_pages, 'number_results': nof_results, 'page_number': page_number, 'titles': ALL_TITLES[start_index:end_index], } def get_titles_by_postcode(postcode, page_number): return _get_titles(page_number) def get_titles_by_address(address, page_number): return _get_titles(page_number) def get_official_copy_data(title_number): return OFFICIAL_COPY_RESULT<|fim▁end|>
def _get_titles(page_number): nof_results = len(ALL_TITLES)
<|file_name|>first_steps_1_simple_line.py<|end_file_name|><|fim▁begin|>from bokeh.plotting import figure, show # prepare some data x = [1, 2, 3, 4, 5] y = [6, 7, 2, 4, 5] # create a new plot with a title and axis labels p = figure(title="Simple line example", x_axis_label="x", y_axis_label="y") # add a line renderer with legend and line thickness<|fim▁hole|># show the results show(p)<|fim▁end|>
p.line(x, y, legend_label="Temp.", line_width=2)
<|file_name|>cm_tooltip.js<|end_file_name|><|fim▁begin|>/** * Attr: cmTooltip and cmTooltipContent */ myApp.directive('cmTooltip', function() { return function (scope, iElement, iAttrs) { console.log("appling cm tooltip");<|fim▁hole|> iElement.tooltip({ "animation": true, "placement": "top", "title": value }); currentValue = value; } }); } });<|fim▁end|>
var currentValue = ""; iAttrs.$observe('cmTooltipContent', function(value) { if(value != currentValue && value != "") {
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from bar import path<|fim▁end|>
<|file_name|>si_units.rs<|end_file_name|><|fim▁begin|>//! simple_units: a simple unit system for Rust //! //! Written by Willi Kappler, Version 0.1 (2017.02.22) //! //! Repository: https://github.com/willi-kappler/simple_units //! //! License: MIT //! use std::ops::Add; use std::ops::Sub; use std::ops::Mul; use std::ops::Div; use std::cmp::PartialEq; init_unit_and_inverse!(Meter, PerMeter); init_unit_and_inverse!(Meter2, PerMeter2); init_unit_and_inverse!(Meter3, PerMeter3); power3_unit!(Meter, Meter2, Meter3, PerMeter, PerMeter2, PerMeter3); init_unit_and_inverse!(Second, PerSecond); init_unit_and_inverse!(Second2, PerSecond2); init_unit_and_inverse!(Second3, PerSecond3); power3_unit!(Second, Second2, Second3, PerSecond, PerSecond2, PerSecond3); init_unit_and_inverse!(MeterPerSecond, SecondPerMeter); init_unit_and_inverse!(MeterPerSecond2, Second2PerMeter); init_unit_and_inverse!(Meter2PerSecond, SecondPerMeter2); init_unit_and_inverse!(Meter2PerSecond2, Second2PerMeter2); init_unit_and_inverse!(MeterSecond, PerMeterSecond); init_unit_and_inverse!(Meter2Second, PerMeter2Second); init_unit_and_inverse!(MeterSecond2, PerMeterSecond2); init_unit_and_inverse!(Meter2Second2, PerMeter2Second2); combine_unit!(Meter, PerMeter, Second, PerSecond, MeterSecond, MeterPerSecond, SecondPerMeter, PerMeterSecond); combine_unit!(Meter2, PerMeter2, Second, PerSecond, Meter2Second, Meter2PerSecond, SecondPerMeter2, PerMeter2Second); combine_unit!(Meter, PerMeter, Second2, PerSecond2, MeterSecond2, MeterPerSecond2, Second2PerMeter, PerMeterSecond2); combine_unit!(Meter2, PerMeter2, Second2, PerSecond2, Meter2Second2, Meter2PerSecond2, Second2PerMeter2, PerMeter2Second2); mul_div_unit!(PerSecond2, Meter2Second, Meter2PerSecond); init_unit_and_inverse!(Kilogram, PerKilogram); // momentum, impulse init_unit_and_inverse!(KilogramMeterPerSecond, SecondPerKilogramMeter); mul_div_unit!(Kilogram, MeterPerSecond, KilogramMeterPerSecond); init_unit_and_inverse!(Newton, PerNewton); mul_div_unit!(Kilogram, MeterPerSecond2, Newton); mul_div_unit!(Newton, Second, KilogramMeterPerSecond); init_unit_and_inverse!(Pascal, PerPascal); mul_div_unit!(Pascal, Meter2, Newton); init_unit_and_inverse!(Joule, PerJoule); mul_div_unit!(Joule, Meter, Newton); init_unit_and_inverse!(Watt, PerWatt); mul_div_unit!(Watt, Second, Joule); init_unit_and_inverse!(DegC, PerDegC); init_unit_and_inverse!(Kelvin, PerKelvin); init_unit_and_inverse!(Kelvin2, PerKelvin2); mul_div_unit!(Kelvin, Kelvin2); init_unit_and_inverse!(KelvinPerSecond, SecondPerKelvin); mul_div_unit!(KelvinPerSecond, Second, Kelvin); <|fim▁hole|>init_unit!(JouleDegCPerSecond); mul_div_unit!(DegCPerSecond, Joule, JouleDegCPerSecond); init_unit!(JouleKelvinPerSecond); mul_div_unit!(KelvinPerSecond, Joule, JouleKelvinPerSecond); init_unit_and_inverse!(Mol, PerMol); init_unit_and_inverse!(JoulePerKelvin, KelvinPerJoule); init_unit_and_inverse!(JoulePerMol, MolPerJoule); init_unit_and_inverse!(JoulePerKelvinMol, KelvinMolPerJoule); init_unit_and_inverse!(JouleKelvinPerMol, MolPerJouleKelvin); mul_div_unit!(JoulePerKelvinMol, Kelvin2, JouleKelvinPerMol); init_unit_and_inverse!(SecondPerMol, MolPerSecond); init_unit_and_inverse!(JouleKelvinPerMolSecond, MolSecondPerJouleKelvin); mul_div_unit!(SecondPerMol, JouleKelvinPerSecond, JouleKelvinPerMol); mul_div_unit!(Mol, SecondPerMol, Second); mul_div_unit!(Mol, JoulePerKelvinMol, JoulePerKelvin); mul_div_unit!(Kelvin, JoulePerKelvin, Joule); mul_div_unit!(JoulePerMol, KelvinPerSecond, JouleKelvinPerMolSecond); mul_div_unit!(Kelvin, JoulePerKelvinMol, JoulePerMol); mul_div_unit!(Second, JouleKelvinPerMolSecond, JouleKelvinPerMol); // TODO: add more units...<|fim▁end|>
init_unit_and_inverse!(DegCPerSecond, SecondPerDegC); mul_div_unit!(DegCPerSecond, Second, DegC);
<|file_name|>logger.cpp<|end_file_name|><|fim▁begin|>//============================================================================== // Brief : Logging Facilities // Authors : Bruno Santos <[email protected]> //------------------------------------------------------------------------------ // ODTONE - Open Dot Twenty One // // Copyright (C) 2009-2012 Universidade Aveiro // Copyright (C) 2009-2012 Instituto de Telecomunicações - Pólo Aveiro // // This software is distributed under a license. The full license // agreement can be found in the file LICENSE in this distribution. // This software may not be copied, modified, sold or distributed // other than expressed in the named license agreement. // // This software is distributed without any warranty. //============================================================================== #include <odtone/debug.hpp> #include <odtone/logger.hpp> #include <boost/make_shared.hpp> /////////////////////////////////////////////////////////////////////////////// namespace odtone { /////////////////////////////////////////////////////////////////////////////// /** * Construct a logger by copying it from another logger. * * @param name Logger's name. * @param log Logger to copy. */ logger::logger(char const* const name, logger& log) : _lock(log._lock), _name(name), _sink(log._sink), _level(log._level) { ODTONE_ASSERT(name); } /** * Construct a logger. * * @param name Logger's name. * @param sink std::ostream which defines how the logger will write and<|fim▁hole|>logger::logger(char const* const name, std::ostream& sink) : _lock(boost::make_shared<boost::mutex>()), _name(name), _sink(sink), _level(0) { ODTONE_ASSERT(name); } /** * Destruct a logger. */ logger::~logger() { } /** * Set the output level. Each logger has a level associated with it. * This reflects the maximum level that the logger cares about. So, if the * logger level is set to 2 it only cares about log messages belonging * to level 0, 1 and 2. * * @param n Logger level. */ void logger::level(uint n) { _level = n; } /** * Get the level configuration. Each logger has a level associated with it. * This reflects the maximum level that the logger cares about. So, if the * logger level is set to 2 it only cares about log messages belonging * to level 0, 1 and 2. * * @return The logger level. */ uint logger::level() const { return _level; } /** * Get the std::ostream associated with the logger. * * @return The std::ostream associated with the logger. */ std::ostream& logger::sink() const { return _sink; } /////////////////////////////////////////////////////////////////////////////// } /* namespace odtone */ // EOF ////////////////////////////////////////////////////////////////////////<|fim▁end|>
* format output. */
<|file_name|>test-stream-transform-constructor-set-methods.js<|end_file_name|><|fim▁begin|>'use strict'; var common = require('../common'); var assert = require('assert'); var Transform = require('stream').Transform; var _transformCalled = false; function _transform(d, e, n) { _transformCalled = true; n(); } var _flushCalled = false; function _flush(n) { _flushCalled = true; n(); } var t = new Transform({<|fim▁hole|> t.end(new Buffer('blerg')); t.resume(); process.on('exit', function() { assert.equal(t._transform, _transform); assert.equal(t._flush, _flush); assert(_transformCalled); assert(_flushCalled); });<|fim▁end|>
transform: _transform, flush: _flush });
<|file_name|>co.test.runner.js<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2015 Juniper Networks, Inc. All rights reserved. */ define([ 'jquery', 'underscore', 'co-test-utils', 'co-test-constants' ], function ($, _, cotu, cotc) { var defaultTestSuiteConfig = { class: '', groups: [], severity: cotc.SEVERITY_LOW }; var createTestSuiteConfig = function (testClass, groups, severity) { severity = ifNull(severity, cotc.SEVERITY_LOW); groups = ifNull(groups, ['all']); return $.extend({}, defaultTestSuiteConfig, {class: testClass, groups: groups, severity: severity}); }; var createViewTestConfig = function (viewId, testSuiteConfig, mokDataConfig) { var viewTestConfig = {};<|fim▁hole|> viewTestConfig.testSuites.push(suiteConfig); }); } viewTestConfig.mockDataConfig = ifNull(mokDataConfig, {}); return viewTestConfig; }; var defaultFakeServerConfig = { options: { autoRespondAfter: 100 }, responses: [], getResponsesConfig: function () { return []; } }; this.getDefaultFakeServerConfig = function () { return defaultFakeServerConfig; }; this.createFakeServerResponse = function (respObj) { var defaultResponse = { method: 'GET', url: '/', statusCode: 200, headers: {"Content-Type": "application/json"}, body: '' }; return $.extend({}, defaultResponse, respObj); } var defaultPageConfig = { hashParams: { p: '' }, loadTimeout: cotc.PAGE_LOAD_TIMEOUT }; this.getDefaultPageConfig = function () { return defaultPageConfig; } var defaultPageTestConfig = { moduleId: 'Set moduleId for your Test in pageTestConfig', testType: 'Set type of the test', fakeServer: this.getDefaultFakeServerConfig(), page: this.getDefaultPageConfig(), getTestConfig: function () { return {}; }, testInitFn: function(defObj, event) { if (defObj) defObj.resolve(); if (event) event.notify(); return; } }; this.createPageTestConfig = function (moduleId, testType, fakeServerConfig, pageConfig, getTestConfigCB, testInitFn) { var pageTestConfig = $.extend(true, {}, defaultPageTestConfig); if (moduleId != null) { pageTestConfig.moduleId = moduleId; } if (testType != null) { pageTestConfig.testType = testType; } if (fakeServerConfig != null) { pageTestConfig.fakeServer = $.extend({}, pageTestConfig.fakeServer, fakeServerConfig); } if (pageConfig != null) { pageTestConfig.page = $.extend({}, pageTestConfig.page, pageConfig); } if (getTestConfigCB != null) { pageTestConfig.getTestConfig = getTestConfigCB; } if (testInitFn != null) { pageTestConfig.testInitFn = testInitFn; } return pageTestConfig; }; this.cTest = function (message, callback, severity) { severity = ifNull(severity, cotc.SEVERITY_LOW); return { severity: severity, test: function () { return test(message, callback); }, type: cotc.TYPE_CONTRAIL_TEST }; }; var testGroup = function (name) { this.name = ifNull(name, ''); this.type = cotc.TYPE_CONTRAIL_TEST_GROUP; //set constant type. this.tests = []; /** * test is an object of CTest. * @param test */ this.registerTest = function (testObj) { if (testObj.type == cotc.TYPE_CONTRAIL_TEST) { this.tests.push(testObj); } else { console.log("Test should be object of type CUnit.test"); } } this.run = function (severity) { _.each(this.tests, function (testObj) { if (severity == cotc.SEVERITY_HIGH) { if (testObj.severity == cotc.SEVERITY_HIGH) { testObj.test(); } } else if (severity == cotc.SEVERITY_MEDIUM) { if (testObj.severity == cotc.SEVERITY_HIGH || testObj.severity == cotc.SEVERITY_MEDIUM) { testObj.test(); } } else if (severity == cotc.SEVERITY_LOW) { testObj.test(); } else { } }); }; }; this.createTestGroup = function (name) { return new testGroup(name); }; var testSuite = function (name) { this.name = ifNull(name, ''); this.groups = []; this.type = cotc.TYPE_CONTRAIL_TEST_SUITE; //set constant type. this.createTestGroup = function (name) { var group = new testGroup(name); this.groups.push(group); return group; }; this.registerGroup = function (group) { if (group.type == cotc.TYPE_CONTRAIL_TEST_GROUP) { this.groups.push(group); } else { console.log("Group should be object of CUnit.testGroup.") } } this.run = function (groupNames, severity) { var self = this; _.each(groupNames, function (groupName) { if (groupName == 'all') { //run all the groups. _.each(self.groups, function (group) { group.run(severity); }) } else { //run only the group that matches name. _.each(self.groups, function (group) { if (group.name == groupName) { group.run(severity); } }); } }); } } this.createTestSuite = function (name) { return new testSuite(name); } this.executeCommonTests = function (testConfigObj) { _.each(testConfigObj, function (testConfig) { _.each(testConfig.suites, function (suiteConfig) { suiteConfig.severity = cotc.RUN_SEVERITY; if (contrail.checkIfExist(suiteConfig.class)) { var testObj; if (contrail.checkIfExist(testConfig.viewObj)) { testObj = testConfig.viewObj; } else if (contrail.checkIfExist(testConfig.modelObj)) { testObj = testConfig.modelObj; } else if (contrail.checkIfExist(testConfig.moduleObj)) { testObj = testConfig.moduleObj; } else { console.log("Missing test object. Check your page test config."); } suiteConfig.class(testObj, suiteConfig); } }); }); }; this.executeUnitTests = function (testConfigObj) { _.each(testConfigObj, function (testConfig) { _.each(testConfig.suites, function(suiteConfig) { if (cotc.RUN_SEVERITY == undefined) { console.error("check co.test.config and set the run_severity correctly."); } suiteConfig.severity = cotc.RUN_SEVERITY; if (contrail.checkIfExist(suiteConfig.class)) { suiteConfig.class(testConfig.moduleObj, suiteConfig); } }); }); } /** * moduleId * fakeServer.options {} * fakeServer.responses * page.hashParams * page.loadTimeout * rootView * testConfig.getTestConfig() * @param PageTestConfig */ this.startTestRunner = function (pageTestConfig) { var self = this, fakeServer = null, fakeServerConfig = ifNull(pageTestConfig.fakeServer, self.getDefaultFakeServerConfig()); module(pageTestConfig.moduleId, { setup: function () { fakeServer = cotu.getFakeServer(fakeServerConfig.options); _.each(fakeServerConfig.responses, function (response) { fakeServer.respondWith(response.method, response.url, [response.statusCode, response.headers, response.data]); }); $.ajaxSetup({ cache: true }); }, teardown: function () { fakeServer.restore(); delete fakeServer; } }); var menuHandlerDoneCB = function () { asyncTest("Load and Run Test Suite: ", function (assert) { expect(0); // commenting out for now. once UT lib update get the async working. var done = assert.async(); switch (pageTestConfig.testType) { case cotc.VIEW_TEST: self.startViewTestRunner(pageTestConfig, fakeServer, assert, done); break; case cotc.MODEL_TEST: self.startModelTestRunner(pageTestConfig, fakeServer, done); break; case cotc.UNIT_TEST: self.startUnitTestRunner(pageTestConfig, done); break; case cotc.LIB_API_TEST: self.startLibTestRunner(pageTestConfig, done); default: console.log("Specify test type in your page test config. eg: cotc.VIEW_TEST or cotc.MODEL_TEST"); } }); }; menuHandlerDoneCB(); }; this.startViewTestRunner = function(viewTestConfig, fakeServer, assert, done) { if (contrail.checkIfExist(viewTestConfig.page.hashParams)) { var loadingStartedDefObj = loadFeature(viewTestConfig.page.hashParams); loadingStartedDefObj.done(function () { //additional fake server response setup var responses = viewTestConfig.fakeServer.getResponsesConfig(); _.each(responses, function (response) { fakeServer.respondWith(response.method, response.url, [response.statusCode, response.headers, response.body]); }); var pageLoadTimeOut = viewTestConfig.page.loadTimeout, pageLoadSetTimeoutId, pageLoadStart = new Date(); //Safety timeout until the root view is created. will be fixed in next release. setTimeout(function () { var testConfig = viewTestConfig.getTestConfig(), testInitDefObj = $.Deferred(), testStarted = false, testStartTime, qunitStarted = false, qunitStartTime; console.log("Configured Page Load Timeout (Max): " + pageLoadTimeOut / 1000 + "s"); console.log("Page Load Started: " + pageLoadStart.toString()); //start timer and make sure the startTest is invoked before pageLoadTimeOut. //This is the max time page should wait for loading. Exit. clearTimeout(pageLoadSetTimeoutId); pageLoadSetTimeoutId = window.setTimeout(function () { if (!testStarted && !qunitStarted) { testConfig.rootView.onAllViewsRenderComplete.unsubscribe(startTest); testConfig.rootView.onAllViewsRenderComplete.unsubscribe(initQUnit); assert.ok(false, "Page should load completely within configured page load timeout"); if (done) done(); } }, pageLoadTimeOut); /** * Run this once testInitFn is executed and onAllViewsRenderComplete is notified. * before starting QUnit, wait for the promise passed in the testInitFn to be resolved. * waiting on promise adds additional control on test start if there is more user actions * needs to be done. (even after the all views render is complete.) */ function initQUnit() { testConfig.rootView.onAllViewsRenderComplete.unsubscribe(initQUnit); /** * function to start the QUnit execution. * This will be invoked once the page loading is complete and test initialization is complete. */ function startQUnit() { qunitStarted = true; qunitStartTime = new Date(); console.log("Starting QUnit: " + qunitStartTime.toString()); console.log("Time taken to completely load the page: " + ((qunitStartTime.getTime() - pageLoadStart.getTime()) / 1000).toFixed(2) + "s"); if (pageLoadSetTimeoutId) { window.clearTimeout(pageLoadSetTimeoutId); pageLoadSetTimeoutId = undefined; } var mockDataDefObj = $.Deferred(); cotu.setViewObjAndViewConfig4All(testConfig.rootView, testConfig.tests); //create and update mock data in test config cotu.createMockData(testConfig.rootView, testConfig.tests, mockDataDefObj); $.when(mockDataDefObj).done(function () { self.executeCommonTests(testConfig.tests); QUnit.start(); if (done) done(); //uncomment following line to console all the fake server request/responses //console.log(fakeServer.requests); }); } if (testInitDefObj.state() == 'resolved') { startQUnit(); } else { $.when(testInitDefObj).done(startQUnit); } } /** * function to start the Test. * invoked once the page load is complete. Test initialization can also trigger more loading. * call initQUnit once render complete. */ function startTest() { testStarted = true; testStartTime = new Date(); console.log("Starting Test Execution: " + testStartTime.toString()); //Remove the startTest from firing again on views renderComplete. testConfig.rootView.onAllViewsRenderComplete.unsubscribe(startTest); /** * testInitFn can have async calls and multiple view rendering. * For pages that implement testInitFn; and all the views are already rendered, * manually call the notify on the onAllViewsRenderComplete event. * testInitDefObj promise should be resolved inside the testInitFn once the user actions are done. * subscribe to onAllViewsRenderComplete to start the QUnit init steps. */ testConfig.rootView.onAllViewsRenderComplete.subscribe(initQUnit); console.log("Starting Test Page init actions (User): " + new Date().toString()); viewTestConfig.testInitFn(testInitDefObj, testConfig.rootView.onAllViewsRenderComplete); } //Initial Page loading. //Check if render is active or any active ajax request. Subscribe to onAllViewsRenderComplete testConfig.rootView.onAllViewsRenderComplete.subscribe(startTest); }, 100); }); } else { console.log("Requires hash params to load the test page. Update your page test config."); } }; this.startModelTestRunner = function(pageTestConfig, fakeServer, done) { //additional fake server response setup var responses = pageTestConfig.fakeServer.getResponsesConfig(); _.each(responses, function (response) { fakeServer.respondWith(response.method, response.url, [response.statusCode, response.headers, response.body]); }); //TODO Remove the page timeout usage var pageLoadTimeOut = pageTestConfig.page.loadTimeout; setTimeout(function () { var modelTestConfig = pageTestConfig.getTestConfig(); var modelObjDefObj = $.Deferred(); cotu.setModelObj4All(modelTestConfig, modelObjDefObj); $.when(modelObjDefObj).done(function() { pageTestConfig.testInitFn(); self.executeCommonTests(modelTestConfig.tests); QUnit.start(); if (done) done(); }); }, pageLoadTimeOut); }; this.startUnitTestRunner = function(pageTestConfig, done) { var self = this, moduleDefObj = $.Deferred(), testInitDefObj = $.Deferred(), unitTestConfig = pageTestConfig.getTestConfig(); module(ifNull(pageTestConfig.moduleId, "Unit Test Module")); if (contrail.checkIfExist(pageTestConfig.testInitFn)) { pageTestConfig.testInitFn(testInitDefObj); } else { testInitDefObj.resolve(); } $.when(testInitDefObj).done(function() { cotu.setModuleObj4All(unitTestConfig, moduleDefObj); $.when(moduleDefObj).done(function() { expect(0); self.executeUnitTests(unitTestConfig.tests); QUnit.start(); if (done) done(); }); }); }; this.startLibTestRunner = function(libTestConfig, done) { var self = this; var testInitDefObj = $.Deferred(); module(ifNull(libTestConfig.moduleId, "Library API Test Module")); asyncTest("Start Library Tests - " + ifNull(libTestConfig.libName, ""), function (assert) { expect(0); libTestConfig.testInitFn(testInitDefObj); var libTests = libTestConfig.getTestConfig(); setTimeout(function() { self.executeLibTests(libTests); QUnit.start(); if (done) done(); }, 1000); }); }; return { getDefaultFakeServerConfig: getDefaultFakeServerConfig, createFakeServerResponse: createFakeServerResponse, getDefaultPageConfig: getDefaultPageConfig, createTestSuiteConfig: createTestSuiteConfig, createViewTestConfig: createViewTestConfig, createPageTestConfig: createPageTestConfig, executeCommonTests: executeCommonTests, executeUnitTests: executeUnitTests, executeLibTests: executeUnitTests, test: cTest, createTestGroup: createTestGroup, createTestSuite: createTestSuite, startTestRunner: startTestRunner, startViewTestRunner: startViewTestRunner, startModelTestRunner: startModelTestRunner, startLibTestRunner: startLibTestRunner, startUnitTestRunner: startUnitTestRunner }; });<|fim▁end|>
viewTestConfig.viewId = ifNull(viewId, ""); viewTestConfig.testSuites = []; if (testSuiteConfig != null) { _.each(testSuiteConfig, function (suiteConfig) {
<|file_name|>view.js<|end_file_name|><|fim▁begin|>/** * View abstract class * * @author Mautilus s.r.o. * @class View * @abstract * @mixins Events * @mixins Deferrable */ function View() { Events.call(this); Deferrable.call(this); this.construct.apply(this, arguments); }; View.prototype.__proto__ = Events.prototype; View.prototype.__proto__.__proto__ = Deferrable.prototype; /** * Construct object * * @constructor * @param {String} [parent=null] Another View instance this view belongs to * @param {Object} [attributes={}] Object attrs */ View.prototype.construct = function(parent, attributes) { if (typeof attributes === 'undefined' && parent && !parent.construct) { // parent is not provided, but attributes are attributes = $.extend(true, {}, parent); parent = null; } /** * @property {Object} parent Parent snippet or scene */ this.parent = parent; this.reset(attributes); this.$el = this.create(); if (this.id) { this.$el.attr('id', this.id); } if (this.cls) { this.$el.addClass(this.cls); } this.init.apply(this, arguments); this.bindEvents(); }; /** * Destruct object * * @private */ View.prototype.desctruct = function() { this.deinit.apply(this, arguments); this.destroy(); }; /** * Set focus to the scene * * @template */ View.prototype.focus = function() { }; /** * Reset properties * * @param {Object} [attributes] Object attrs */ View.prototype.reset = function(attributes) { this.isVisible = false; this.isActive = false; if (attributes) { this.setAttributes(attributes); } }; /** * Set object properties, functions and attributes that start with '_' are not allowed * * @param {Object} attributes */ View.prototype.setAttributes = function(attributes) { for (var i in attributes) { if (typeof attributes[i] !== 'undefined' && typeof attributes[i] !== 'function' && typeof this[i] !== 'fucntion' && i.substr(0, 1) !== '_') { this[i] = attributes[i]; } } }; /** * Bind listeners to the `key` event and some others */ View.prototype.bindEvents = function() { if (this.parent) { this.parent.on('key', this._onKey, this); this.parent.on('click', this._onClick, this); this.parent.on('scroll', this._onScroll, this); this.parent.on('focus', this._onFocus, this); } else { Control.on('key', this._onKey, this); Mouse.on('click', this._onClick, this); Mouse.on('scroll', this._onScroll, this); Focus.on('focus', this._onFocus, this); } I18n.on('langchange', this._onLangChange, this); }; /** * Un-bind all default listeners */ View.prototype.unbindEvents = function() { if (this.parent) { this.parent.off('key', this._onKey, this); this.parent.off('click', this._onClick, this); this.parent.off('scroll', this._onScroll, this); this.parent.off('focus', this._onFocus, this); } else { Control.off('key', this._onKey, this); Mouse.off('click', this._onClick, this); Mouse.off('scroll', this._onScroll, this); Focus.off('focus', this._onFocus, this); } I18n.off('langchange', this._onLangChange, this); }; /** * Create scene's element, is called when scene is being constructed * * @template * @returns {Object} Element, jQuery collection */ View.prototype.create = function() { return $('<div />'); }; /** * Remove scene's elements when scene is hiding * * @template */ View.prototype.remove = function() { }; /** * Remove or hide scene's element, is called when scene is being destructed * * @template * @return {Boolean/Promise} Return FALSE when you don't want to hide this scene, Promise may be also returned */ View.prototype.destroy = function() { }; /** * Initialise scene * * @template */ View.prototype.init = function() { }; /** * De-initialise scene * * @template */ View.prototype.deinit = function() { }; /** * Activate and focus scene when its shown * * @template * @return {Boolean/Promise} Return FALSE when you don't want to show this scene, Promise may be also returned */ View.prototype.activate = function() { }; /** * Deactivate scene when its hidden * * @template * @return {Boolean} Return FALSE when you don't want to destroy this scene when its hidden */ View.prototype.deactivate = function() { }; /** * This method is called when and 'activate' method fails * * @template * @return {Boolean} If TRUE is returned, router will call goBack (default action) */ View.prototype.revert = function() { return true; }; /** * Render snippet * * @template * @return {Promise} */ View.prototype.render = function() { }; /** * Render snippet into specified target element * * @param {Object} target jQuery collection or HTMLElement */ View.prototype.renderTo = function(target) { var p; this.$el.appendTo(target); p = this.render(); if (p instanceof Promise) { p.done(function() { this.show(); }, this); } else { this.show(); } return p; }; /** * Display scene's element and set `this.isVisible` to TRUE */ View.prototype.show = function() { var args = arguments; return this.when(function(promise) { var activated; if (this.onBeforeShow() === false) { promise.reject(); return false; } this.$el.show(); this.isVisible = true; this.isActive = false; this.onShow(); this.trigger('show'); promise.fail(function() { this.hide(); }, this); activated = this.activate.apply(this, args); if (activated instanceof Promise) { activated.then(function(status) { this.isActive = status; if (status) { promise.resolve(); } else { promise.reject(); } }, this); } else if (activated !== false) { this.isActive = true; promise.resolve(); } else { this.isActive = false; promise.reject(); } }, this); }; /** * Fired before the view is being shown and before `activate` method * * @template * @return {Boolean} */ View.prototype.onBeforeShow = function() { }; /** * Fired when this view is displayed * * @template */ View.prototype.onShow = function() { }; /** * Hide scene's element and set `this.isVisible` to FALSE */ View.prototype.hide = function() { return this.when(function(promise) { var deactivated; promise.done(function() { this.onBeforeHide(); this.$el.hide(); this.isVisible = false; this.onHide(); this.trigger('hide'); }, this); deactivated = this.deactivate(); if (deactivated instanceof Promise) { deactivated.then(function(status) { if (status) { this.isActive = false; promise.resolve(); } else { promise.reject(); } }, this); } else if (deactivated !== false) { this.isActive = false; promise.resolve(); } else { promise.reject(); } }, this); }; /** * Fired before the view is being hidden but after `deactivate` method (no return value) * * @template */ View.prototype.onBeforeHide = function() { }; /** * Fired when this view is hidden * * @template */ View.prototype.onHide = function() { }; /** * Test if this scene has focus (or any snippet inside this scene) * * @returns {Boolean} */ View.prototype.hasFocus = function() { return Focus.isIn(this.$el);<|fim▁hole|> */ View.prototype._onKey = function(keyCode, ev, stop) { if (!this.isVisible || !this.hasFocus()) { return; } if (this.trigger('beforekey', keyCode, ev) === false) { return false; } if (this.onKey(keyCode, ev, stop) === false) { return false; } if (Control.isArrow(keyCode) && this.navigate(Control.getArrow(keyCode), stop) === false) { return false; } if (keyCode === Control.key.ENTER && this.onEnter(Focus.focused, ev, stop) === false) { return false; } else if (keyCode === Control.key.RETURN && this.onReturn(Focus.focused, ev, stop) === false) { return false; } if (this.trigger('key', keyCode, ev) === false) { return false; } }; /** * Handles keyDown events * * @template * @param {Number} keyCode * @param {Event} event * @param {Function} stop * @returns {Boolean} */ View.prototype.onKey = function(keyCode, ev, stop) { }; /** * Handles ENTER event * * @template * @param {Object} $el Target element, jQuery collection * @param {Event} event * @returns {Boolean} */ View.prototype.onEnter = function($el, event) { }; /** * Handles RETURN event * * @template * @param {Object} $el Target element, jQuery collection * @param {Event} event * @returns {Boolean} */ View.prototype.onReturn = function($el, event) { }; /** * @private */ View.prototype._onClick = function($el, event) { if (!$el.belongsTo(this.$el)) { return; } if (this.onClick.apply(this, arguments) === false) { return false; } return this.trigger('click', $el, event); }; /** * Handles Click event * * @param {Object} $el Target element, jQuery collection * @param {Event} event Mouse event * @returns {Boolean} */ View.prototype.onClick = function($el, event) { }; /** * @private */ View.prototype._onScroll = function($el, delta, event) { if (!$el.belongsTo(this.$el)) { return; } if (this.onScroll.apply(this, arguments) === false) { return false; } return this.trigger('scroll', $el, delta, event); }; /** * Handles Scroll event when this scene is visible * * @param {Object} $el Target element, jQuery collection * @param {Number} delta, 1 or -1 * @param {Event} event Mouse event * @returns {Boolean} */ View.prototype.onScroll = function($el, delta, event) { }; /** * @private */ View.prototype._onFocus = function($el) { if (!$el.belongsTo(this.$el)) { return; } if (this.onFocus.apply(this, arguments) === false) { return false; } return this.trigger('focus', $el); }; /** * Handles Focus event * * @template * @param {Object} $el Target element, jQuery collection * @returns {Boolean} */ View.prototype.onFocus = function($el) { }; /** * @private */ View.prototype._onLangChange = function() { if (this.onLangChange.apply(this, arguments) === false) { return false; } this.trigger('langchange'); }; /** * When app language is changed * * @template * @returns {Boolean} */ View.prototype.onLangChange = function() { }; /** * Navigate in 4-way direction * * @template * @param {String} direction Possible values: 'left', 'right', 'up', 'down' * @param {Function} stop * @return {Boolean} Return FALSE to prevent event from bubeling */ View.prototype.navigate = function(direction, stop) { }; /** * Get all focusable elements inside this snippet. This takes currentyl focused * element and calculates new one. If the new sibling is not exits, new focus * is getting from the start / end of collection - cyclic. * * Is the same like getFocusable, but you can specify parent and also you can * walkthrough all elements in cyclic. * * @param {Number} direction left is equal to -1, right to 1 * @param {Object} parent jquery object. All focusable elements belongs only to this parent. * @returns {Object} jQuery collection */ View.prototype.getCircleFocusable = function(direction, parent) { var els = $('.focusable', parent || this.$el).not('.disabled').filter(':visible'), focusedIndex = Focus.focused ? els.index(Focus.focused) : -1; if (focusedIndex !== -1) { focusedIndex += direction; if (focusedIndex === -1) return els.eq(els.length - 1); else if (focusedIndex > els.length - 1) return els.eq(0); else return els.eq(focusedIndex); } }; /** * Get all focusable elements inside this scene * * @param {Number} [index] If specified, then returns only one element at the specified position * @param {Boolean} [fromCurrentlyFocused=false] If TRUE, than elements before focused element are cut off * @param {Object} [$el=this.$el] Limit search for just this specified element, jQuery collection * @param {String} [selector=.focusable] * @returns {Object} jQuery collection */ View.prototype.getFocusable = function(index, fromCurrentlyFocused, $el, selector) { var els, focusedIndex, _index = index; if (!selector) { selector = '.focusable'; } els = $(selector, $el || this.$el).filter(':visible').not('.disabled'); if (fromCurrentlyFocused) { if(typeof fromCurrentlyFocused === 'boolean'){ focusedIndex = Focus.focused ? els.index(Focus.focused) : -1; } else { focusedIndex = els.index(fromCurrentlyFocused); } if (typeof index !== 'undefined' && _index < 0) { els = els.slice(0, (focusedIndex >= 0 ? focusedIndex : 1)); //_index += els.length; } else { els = els.slice(focusedIndex >= 0 ? focusedIndex : 0); } } if (typeof _index !== 'undefined') { return els.eq(_index >> 0); } return els; }; /** * Convert View into string * * @returns {String} */ View.prototype.toString = function() { this.render(); return this.$el[0].outerHTML; };<|fim▁end|>
}; /** * @private
<|file_name|>ue4_prereqs_stub.go<|end_file_name|><|fim▁begin|>// +build !windows<|fim▁hole|> import "github.com/itchio/butler/endpoints/launch" func handleUE4Prereqs(params launch.LauncherParams) error { // nothing to worry about on non-windows platforms return nil }<|fim▁end|>
package native
<|file_name|>handlebars_partial_params.js<|end_file_name|><|fim▁begin|>// Inspired by an answer found at http://stackoverflow.com/questions/11523331/passing-variables-through-handlebars-partial module.exports = function partialParams(partialName, options) { if (!partialName) { console.error('No partial name given.'); return ''; } var partial = Handlebars.partials[partialName]; if (!partial) {<|fim▁hole|> return ''; } // return new Handlebars.SafeString( partial(options.hash) ); return partial(options.hash); };<|fim▁end|>
console.error('Couldnt find the compiled partial: ' + partialName);
<|file_name|>abilities.js<|end_file_name|><|fim▁begin|>exports.BattleAbilities = { "cutecharm": { inherit: true, onAfterDamage: function(damage, target, source, move) { if (move && move.isContact) { if (this.random(3) < 1) { source.addVolatile('attract', target); } } } }, "effectspore": { inherit: true, onAfterDamage: function(damage, target, source, move) { if (move && move.isContact && !source.status) { var r = this.random(300); if (r < 10) source.setStatus('slp'); else if (r < 20) source.setStatus('par'); else if (r < 30) source.setStatus('psn'); } } }, "flamebody": { inherit: true, onAfterDamage: function(damage, target, source, move) { if (move && move.isContact) { if (this.random(3) < 1) { source.trySetStatus('brn', target, move); }<|fim▁hole|> } }, "flashfire": { inherit: true, onTryHit: function(target, source, move) { if (target !== source && move.type === 'Fire') { if (move.id === 'willowisp' && (target.hasType('Fire') || target.status || target.volatiles['substitute'])) { return; } if (!target.addVolatile('flashfire')) { this.add('-immune', target, '[msg]'); } return null; } } }, "lightningrod": { desc: "During double battles, this Pokemon draws any single-target Electric-type attack to itself. If an opponent uses an Electric-type attack that affects multiple Pokemon, those targets will be hit. This ability does not affect Electric Hidden Power or Judgment. The user is immune to Electric and its Special Attack is increased one stage when hit by one.", shortDesc: "This Pokemon draws opposing Electric moves to itself.", onFoeRedirectTargetPriority: 1, onFoeRedirectTarget: function(target, source, source2, move) { if (move.type !== 'Electric') return; if (this.validTarget(this.effectData.target, source, move.target)) { return this.effectData.target; } }, id: "lightningrod", name: "Lightningrod", rating: 3.5, num: 32 }, "pickup": { inherit: true, onResidualOrder: null, onResidualSubOrder: null, onResidual: null }, "poisonpoint": { inherit: true, onAfterDamage: function(damage, target, source, move) { if (move && move.isContact) { if (this.random(3) < 1) { source.trySetStatus('psn', target, move); } } } }, "pressure": { inherit: true, onStart: null }, "rockhead": { inherit: true, onModifyMove: function(move) { if (move.id !== 'struggle') delete move.recoil; } }, "roughskin": { inherit: true, onAfterDamage: function(damage, target, source, move) { if (source && source !== target && move && move.isContact) { this.damage(source.maxhp/16, source, target); } } }, "shadowtag": { inherit: true, onFoeModifyPokemon: function(pokemon) { pokemon.trapped = true; } }, "static": { inherit: true, onAfterDamage: function(damage, target, source, effect) { if (effect && effect.isContact) { if (this.random(3) < 1) { source.trySetStatus('par', target, effect); } } } }, "stench": { inherit: true, onModifyMove: function(){} }, "sturdy": { inherit: true, onDamage: function(damage, target, source, effect) { if (effect && effect.ohko) { this.add('-activate',target,'Sturdy'); return 0; } } }, "synchronize": { inherit: true, onAfterSetStatus: function(status, target, source) { if (!source || source === target) return; var status = status.id; if (status === 'slp' || status === 'frz') return; if (status === 'tox') status = 'psn'; source.trySetStatus(status); } }, "trace": { inherit: true, onUpdate: function(pokemon) { var target = pokemon.side.foe.randomActive(); if (!target || target.fainted) return; var ability = this.getAbility(target.ability); var bannedAbilities = {forecast:1, multitype:1, trace:1}; if (bannedAbilities[target.ability]) { return; } if (ability === 'Intimidate') { if (pokemon.setAbility('Illuminate')) { //Temporary fix so Intimidate doesn't activate in third gen when traced this.add('-ability',pokemon, ability,'[from] ability: Trace','[of] '+target); } } else if (pokemon.setAbility(ability)) { this.add('-ability',pokemon, ability,'[from] ability: Trace','[of] '+target); } } }, "voltabsorb": { inherit: true, onTryHit: function(target, source, move) { if (target !== source && move.type === 'Electric' && move.id !== 'thunderwave') { if (!this.heal(target.maxhp/4)) { this.add('-immune', target, '[msg]'); } return null; } } } };<|fim▁end|>
}
<|file_name|>fragment.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The `Fragment` type, which represents the leaves of the layout tree. #![deny(unsafe_block)] use css::node_style::StyledNode; use construct::FlowConstructor; use context::LayoutContext; use floats::{ClearBoth, ClearLeft, ClearRight, ClearType}; use flow; use flow::Flow; use flow_ref::FlowRef; use incremental::RestyleDamage; use inline::{InlineFragmentContext, InlineMetrics}; use layout_debug; use model::{Auto, IntrinsicISizes, IntrinsicISizesContribution, MaybeAuto, Specified, specified}; use model; use text; use util::OpaqueNodeMethods; use wrapper::{TLayoutNode, ThreadSafeLayoutNode}; use geom::Size2D; use gfx::display_list::OpaqueNode; use gfx::text::glyph::CharIndex; use gfx::text::text_run::TextRun; use script_traits::UntrustedNodeAddress; use serialize::{Encodable, Encoder}; use servo_msg::constellation_msg::{PipelineId, SubpageId}; use servo_net::image::holder::ImageHolder; use servo_net::local_image_cache::LocalImageCache; use servo_util::geometry::Au; use servo_util::geometry; use servo_util::logical_geometry::{LogicalRect, LogicalSize, LogicalMargin}; use servo_util::range::*; use servo_util::smallvec::SmallVec; use servo_util::str::is_whitespace; use std::cmp::{max, min}; use std::fmt; use std::from_str::FromStr; use string_cache::Atom; use style::{ComputedValues, TElement, TNode, cascade_anonymous}; use style::computed_values::{LengthOrPercentage, LengthOrPercentageOrAuto}; use style::computed_values::{LengthOrPercentageOrNone}; use style::computed_values::{LPA_Auto, clear, position, text_align, text_decoration}; use style::computed_values::{vertical_align, white_space}; use sync::{Arc, Mutex}; use url::Url; /// Fragments (`struct Fragment`) are the leaves of the layout tree. They cannot position /// themselves. In general, fragments do not have a simple correspondence with CSS fragments in the /// specification: /// /// * Several fragments may correspond to the same CSS box or DOM node. For example, a CSS text box /// broken across two lines is represented by two fragments. /// /// * Some CSS fragments are not created at all, such as some anonymous block fragments induced by /// inline fragments with block-level sibling fragments. In that case, Servo uses an `InlineFlow` /// with `BlockFlow` siblings; the `InlineFlow` is block-level, but not a block container. It is /// positioned as if it were a block fragment, but its children are positioned according to /// inline flow. /// /// A `GenericFragment` is an empty fragment that contributes only borders, margins, padding, and /// backgrounds. It is analogous to a CSS nonreplaced content box. /// /// A fragment's type influences how its styles are interpreted during layout. For example, /// replaced content such as images are resized differently from tables, text, or other content. /// Different types of fragments may also contain custom data; for example, text fragments contain /// text. /// /// Do not add fields to this structure unless they're really really mega necessary! Fragments get /// moved around a lot and thus their size impacts performance of layout quite a bit. /// /// FIXME(#2260, pcwalton): This can be slimmed down some by (at least) moving `inline_context` /// to be on `InlineFlow` only. #[deriving(Clone)] pub struct Fragment { /// An opaque reference to the DOM node that this `Fragment` originates from. pub node: OpaqueNode, /// The CSS style of this fragment. pub style: Arc<ComputedValues>, /// The position of this fragment relative to its owning flow. /// The size includes padding and border, but not margin. pub border_box: LogicalRect<Au>, /// The sum of border and padding; i.e. the distance from the edge of the border box to the /// content edge of the fragment. pub border_padding: LogicalMargin<Au>, /// The margin of the content box. pub margin: LogicalMargin<Au>, /// Info specific to the kind of fragment. Keep this enum small. pub specific: SpecificFragmentInfo, /// Holds the style context information for fragments /// that are part of an inline formatting context. pub inline_context: Option<InlineFragmentContext>, /// A debug ID that is consistent for the life of /// this fragment (via transform etc). pub debug_id: u16, /// How damaged this fragment is since last reflow. pub restyle_damage: RestyleDamage, } impl<E, S: Encoder<E>> Encodable<S, E> for Fragment { fn encode(&self, e: &mut S) -> Result<(), E> { e.emit_struct("fragment", 0, |e| { try!(e.emit_struct_field("id", 0, |e| self.debug_id().encode(e))) try!(e.emit_struct_field("border_box", 1, |e| self.border_box.encode(e))) e.emit_struct_field("margin", 2, |e| self.margin.encode(e)) }) } } /// Info specific to the kind of fragment. /// /// Keep this enum small. As in, no more than one word. Or pcwalton will yell at you. #[deriving(Clone)] pub enum SpecificFragmentInfo { GenericFragment, IframeFragment(Box<IframeFragmentInfo>), ImageFragment(Box<ImageFragmentInfo>), /// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was /// declared with `display: inline;`. InlineAbsoluteHypotheticalFragment(InlineAbsoluteHypotheticalFragmentInfo), InlineBlockFragment(InlineBlockFragmentInfo), InputFragment, ScannedTextFragment(Box<ScannedTextFragmentInfo>), TableFragment, TableCellFragment, TableColumnFragment(TableColumnFragmentInfo), TableRowFragment, TableWrapperFragment, UnscannedTextFragment(UnscannedTextFragmentInfo), } impl SpecificFragmentInfo { fn restyle_damage(&self) -> RestyleDamage { let flow = match *self { IframeFragment(_) | ImageFragment(_) | InputFragment | ScannedTextFragment(_) | TableFragment | TableCellFragment | TableColumnFragment(_) | TableRowFragment | TableWrapperFragment | UnscannedTextFragment(_) | GenericFragment => return RestyleDamage::empty(), InlineAbsoluteHypotheticalFragment(ref info) => &info.flow_ref, InlineBlockFragment(ref info) => &info.flow_ref, }; flow::base(flow.deref()).restyle_damage } pub fn get_type(&self) -> &'static str { match *self { GenericFragment => "GenericFragment", IframeFragment(_) => "IframeFragment", ImageFragment(_) => "ImageFragment", InlineAbsoluteHypotheticalFragment(_) => "InlineAbsoluteHypotheticalFragment", InlineBlockFragment(_) => "InlineBlockFragment", InputFragment => "InputFragment", ScannedTextFragment(_) => "ScannedTextFragment", TableFragment => "TableFragment", TableCellFragment => "TableCellFragment", TableColumnFragment(_) => "TableColumnFragment", TableRowFragment => "TableRowFragment", TableWrapperFragment => "TableWrapperFragment", UnscannedTextFragment(_) => "UnscannedTextFragment", } } } /// A hypothetical box (see CSS 2.1 § 10.3.7) for an absolutely-positioned block that was declared /// with `display: inline;`. /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[deriving(Clone)] pub struct InlineAbsoluteHypotheticalFragmentInfo { pub flow_ref: FlowRef, } impl InlineAbsoluteHypotheticalFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineAbsoluteHypotheticalFragmentInfo { InlineAbsoluteHypotheticalFragmentInfo { flow_ref: flow_ref, } } } /// A fragment that represents an inline-block element. /// /// FIXME(pcwalton): Stop leaking this `FlowRef` to layout; that is not memory safe because layout /// can clone it. #[deriving(Clone)] pub struct InlineBlockFragmentInfo { pub flow_ref: FlowRef, } impl InlineBlockFragmentInfo { pub fn new(flow_ref: FlowRef) -> InlineBlockFragmentInfo { InlineBlockFragmentInfo { flow_ref: flow_ref, } } } /// A fragment that represents a replaced content image and its accompanying borders, shadows, etc. #[deriving(Clone)] pub struct ImageFragmentInfo { /// The image held within this fragment. pub image: ImageHolder<UntrustedNodeAddress>, pub for_node: UntrustedNodeAddress, pub computed_inline_size: Option<Au>, pub computed_block_size: Option<Au>, pub dom_inline_size: Option<Au>, pub dom_block_size: Option<Au>, pub writing_mode_is_vertical: bool, } impl ImageFragmentInfo { /// Creates a new image fragment from the given URL and local image cache. /// /// FIXME(pcwalton): The fact that image fragments store the cache in the fragment makes little /// sense to me. pub fn new(node: &ThreadSafeLayoutNode, image_url: Url, local_image_cache: Arc<Mutex<LocalImageCache<UntrustedNodeAddress>>>) -> ImageFragmentInfo { fn convert_length(node: &ThreadSafeLayoutNode, name: &Atom) -> Option<Au> { let element = node.as_element(); element.get_attr(&ns!(""), name).and_then(|string| { let n: Option<int> = FromStr::from_str(string); n }).and_then(|pixels| Some(Au::from_px(pixels))) } let is_vertical = node.style().writing_mode.is_vertical(); let dom_width = convert_length(node, &atom!("width")); let dom_height = convert_length(node, &atom!("height")); let opaque_node: OpaqueNode = OpaqueNodeMethods::from_thread_safe_layout_node(node); let untrusted_node: UntrustedNodeAddress = opaque_node.to_untrusted_node_address(); ImageFragmentInfo { image: ImageHolder::new(image_url, local_image_cache), for_node: untrusted_node, computed_inline_size: None, computed_block_size: None, dom_inline_size: if is_vertical { dom_height } else { dom_width }, dom_block_size: if is_vertical { dom_width } else { dom_height }, writing_mode_is_vertical: is_vertical, } } /// Returns the calculated inline-size of the image, accounting for the inline-size attribute. pub fn computed_inline_size(&self) -> Au { self.computed_inline_size.expect("image inline_size is not computed yet!") } /// Returns the calculated block-size of the image, accounting for the block-size attribute. pub fn computed_block_size(&self) -> Au { self.computed_block_size.expect("image block_size is not computed yet!") } /// Returns the original inline-size of the image. pub fn image_inline_size(&mut self) -> Au { let size = self.image.get_size(self.for_node).unwrap_or(Size2D::zero()); Au::from_px(if self.writing_mode_is_vertical { size.height } else { size.width }) } /// Returns the original block-size of the image. pub fn image_block_size(&mut self) -> Au { let size = self.image.get_size(self.for_node).unwrap_or(Size2D::zero()); Au::from_px(if self.writing_mode_is_vertical { size.width } else { size.height }) } // Return used value for inline-size or block-size. // // `dom_length`: inline-size or block-size as specified in the `img` tag. // `style_length`: inline-size as given in the CSS pub fn style_length(style_length: LengthOrPercentageOrAuto, dom_length: Option<Au>, container_inline_size: Au) -> MaybeAuto { match (MaybeAuto::from_style(style_length,container_inline_size),dom_length) { (Specified(length),_) => { Specified(length) }, (Auto,Some(length)) => { Specified(length) }, (Auto,None) => { Auto } } } /// Clamp a value obtained from style_length, based on min / max lengths. pub fn clamp_size(size: Au, min_size: LengthOrPercentage, max_size: LengthOrPercentageOrNone, container_inline_size: Au) -> Au { let min_size = model::specified(min_size, container_inline_size); let max_size = model::specified_or_none(max_size, container_inline_size); Au::max(min_size, match max_size { None => size, Some(max_size) => Au::min(size, max_size), }) } /// Tile an image pub fn tile_image(position: &mut Au, size: &mut Au, virtual_position: Au, image_size: u32) { let image_size = image_size as int; let delta_pixels = geometry::to_px(virtual_position - *position); let tile_count = (delta_pixels + image_size - 1) / image_size; let offset = Au::from_px(image_size * tile_count); let new_position = virtual_position - offset; *size = *position - new_position + *size; *position = new_position; } } /// A fragment that represents an inline frame (iframe). This stores the pipeline ID so that the size /// of this iframe can be communicated via the constellation to the iframe's own layout task. #[deriving(Clone)] pub struct IframeFragmentInfo { /// The pipeline ID of this iframe. pub pipeline_id: PipelineId, /// The subpage ID of this iframe. pub subpage_id: SubpageId, } impl IframeFragmentInfo { /// Creates the information specific to an iframe fragment. pub fn new(node: &ThreadSafeLayoutNode) -> IframeFragmentInfo { let (pipeline_id, subpage_id) = node.iframe_pipeline_and_subpage_ids(); IframeFragmentInfo { pipeline_id: pipeline_id, subpage_id: subpage_id, } } } /// A scanned text fragment represents a single run of text with a distinct style. A `TextFragment` /// may be split into two or more fragments across line breaks. Several `TextFragment`s may /// correspond to a single DOM text node. Split text fragments are implemented by referring to /// subsets of a single `TextRun` object. #[deriving(Clone)] pub struct ScannedTextFragmentInfo { /// The text run that this represents. pub run: Arc<Box<TextRun>>, /// The range within the above text run that this represents. pub range: Range<CharIndex>, /// The positions of newlines within this scanned text fragment. /// /// FIXME(#2260, pcwalton): Can't this go somewhere else, like in the text run or something? /// Or can we just remove it? pub new_line_pos: Vec<CharIndex>, /// The new_line_pos is eaten during line breaking. If we need to re-merge /// fragments, it will have to be restored. pub original_new_line_pos: Option<Vec<CharIndex>>, /// The intrinsic size of the text fragment. pub content_size: LogicalSize<Au>, } impl ScannedTextFragmentInfo { /// Creates the information specific to a scanned text fragment from a range and a text run. pub fn new(run: Arc<Box<TextRun>>, range: Range<CharIndex>, new_line_positions: Vec<CharIndex>, content_size: LogicalSize<Au>) -> ScannedTextFragmentInfo { ScannedTextFragmentInfo { run: run, range: range, new_line_pos: new_line_positions, original_new_line_pos: None, content_size: content_size, } } } #[deriving(Show)] pub struct SplitInfo { // TODO(bjz): this should only need to be a single character index, but both values are // currently needed for splitting in the `inline::try_append_*` functions. pub range: Range<CharIndex>, pub inline_size: Au, } impl SplitInfo { fn new(range: Range<CharIndex>, info: &ScannedTextFragmentInfo) -> SplitInfo { SplitInfo { range: range, inline_size: info.run.advance_for_range(&range), } } } /// Data for an unscanned text fragment. Unscanned text fragments are the results of flow /// construction that have not yet had their inline-size determined. #[deriving(Clone)] pub struct UnscannedTextFragmentInfo { /// The text inside the fragment. /// /// FIXME(pcwalton): Is there something more clever we can do here that avoids the double /// indirection while not penalizing all fragments? pub text: Box<String>, } impl UnscannedTextFragmentInfo { /// Creates a new instance of `UnscannedTextFragmentInfo` from the given DOM node. pub fn new(node: &ThreadSafeLayoutNode) -> UnscannedTextFragmentInfo { // FIXME(pcwalton): Don't copy text; atomically reference count it instead. UnscannedTextFragmentInfo { text: box node.text(), } } /// Creates a new instance of `UnscannedTextFragmentInfo` from the given text. #[inline] pub fn from_text(text: String) -> UnscannedTextFragmentInfo { UnscannedTextFragmentInfo { text: box text, } } } /// A fragment that represents a table column. #[deriving(Clone)] pub struct TableColumnFragmentInfo { /// the number of columns a <col> element should span pub span: int, } impl TableColumnFragmentInfo { /// Create the information specific to an table column fragment. pub fn new(node: &ThreadSafeLayoutNode) -> TableColumnFragmentInfo { let span = { let element = node.as_element(); element.get_attr(&ns!(""), &atom!("span")).and_then(|string| { let n: Option<int> = FromStr::from_str(string); n }).unwrap_or(0) }; TableColumnFragmentInfo { span: span, } } } impl Fragment { /// Constructs a new `Fragment` instance for the given node. /// /// This does *not* construct the text for generated content. See comments in /// `FlowConstructor::build_specific_fragment_info_for_node()` for more details. /// /// Arguments: /// /// * `constructor`: The flow constructor. /// * `node`: The node to create a fragment for. pub fn new(constructor: &mut FlowConstructor, node: &ThreadSafeLayoutNode) -> Fragment { let style = node.style().clone(); let writing_mode = style.writing_mode; Fragment { node: OpaqueNodeMethods::from_thread_safe_layout_node(node), style: style, restyle_damage: node.restyle_damage(), border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: constructor.build_specific_fragment_info_for_node(node), inline_context: None, debug_id: layout_debug::generate_unique_debug_id(), } } /// Constructs a new `Fragment` instance from a specific info. pub fn new_from_specific_info(node: &ThreadSafeLayoutNode, specific: SpecificFragmentInfo) -> Fragment { let style = node.style().clone(); let writing_mode = style.writing_mode; Fragment { node: OpaqueNodeMethods::from_thread_safe_layout_node(node), style: style, restyle_damage: node.restyle_damage(), border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, debug_id: layout_debug::generate_unique_debug_id(), } } /// Constructs a new `Fragment` instance for an anonymous table object. pub fn new_anonymous_table_fragment(node: &ThreadSafeLayoutNode, specific: SpecificFragmentInfo) -> Fragment { // CSS 2.1 § 17.2.1 This is for non-inherited properties on anonymous table fragments // example: // // <div style="display: table"> // Foo // </div> // // Anonymous table fragments, TableRowFragment and TableCellFragment, are generated around // `Foo`, but they shouldn't inherit the border. let node_style = cascade_anonymous(&**node.style()); let writing_mode = node_style.writing_mode; Fragment { node: OpaqueNodeMethods::from_thread_safe_layout_node(node), style: Arc::new(node_style), restyle_damage: node.restyle_damage(), border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, debug_id: layout_debug::generate_unique_debug_id(), } } /// Constructs a new `Fragment` instance from an opaque node. pub fn from_opaque_node_and_style(node: OpaqueNode, style: Arc<ComputedValues>, restyle_damage: RestyleDamage, specific: SpecificFragmentInfo) -> Fragment { let writing_mode = style.writing_mode; Fragment { node: node, style: style, restyle_damage: restyle_damage, border_box: LogicalRect::zero(writing_mode), border_padding: LogicalMargin::zero(writing_mode), margin: LogicalMargin::zero(writing_mode), specific: specific, inline_context: None, debug_id: layout_debug::generate_unique_debug_id(), } } pub fn reset_inline_sizes(&mut self) { self.border_padding = LogicalMargin::zero(self.style.writing_mode); self.margin = LogicalMargin::zero(self.style.writing_mode); } /// Saves the new_line_pos vector into a `ScannedTextFragment`. This will fail /// if called on any other type of fragment. pub fn save_new_line_pos(&mut self) { match &mut self.specific { &ScannedTextFragment(ref mut info) => { if !info.new_line_pos.is_empty() { info.original_new_line_pos = Some(info.new_line_pos.clone()); } } _ => {} } } pub fn restore_new_line_pos(&mut self) { match &mut self.specific { &ScannedTextFragment(ref mut info) => { match info.original_new_line_pos.take() { None => {} Some(new_line_pos) => info.new_line_pos = new_line_pos, } return } _ => {} } } /// Returns a debug ID of this fragment. This ID should not be considered stable across /// multiple layouts or fragment manipulations. pub fn debug_id(&self) -> u16 { self.debug_id } /// Transforms this fragment into another fragment of the given type, with the given size, /// preserving all the other data. pub fn transform(&self, size: LogicalSize<Au>, mut info: Box<ScannedTextFragmentInfo>) -> Fragment { let new_border_box = LogicalRect::from_point_size(self.style.writing_mode, self.border_box.start, size); info.content_size = size.clone(); Fragment { node: self.node, style: self.style.clone(), restyle_damage: RestyleDamage::all(), border_box: new_border_box, border_padding: self.border_padding, margin: self.margin, specific: ScannedTextFragment(info), inline_context: self.inline_context.clone(), debug_id: self.debug_id, } } pub fn restyle_damage(&self) -> RestyleDamage { self.restyle_damage | self.specific.restyle_damage() } /// Adds a style to the inline context for this fragment. If the inline /// context doesn't exist yet, it will be created. pub fn add_inline_context_style(&mut self, style: Arc<ComputedValues>) { if self.inline_context.is_none() { self.inline_context = Some(InlineFragmentContext::new()); } self.inline_context.as_mut().unwrap().styles.push(style.clone()); } /// Determines which quantities (border/padding/margin/specified) should be included in the /// intrinsic inline size of this fragment. fn quantities_included_in_intrinsic_inline_size(&self) -> QuantitiesIncludedInIntrinsicInlineSizes { match self.specific { GenericFragment | IframeFragment(_) | ImageFragment(_) | InlineBlockFragment(_) | InputFragment => QuantitiesIncludedInIntrinsicInlineSizes::all(), TableFragment | TableCellFragment => { IntrinsicInlineSizeIncludesPadding | IntrinsicInlineSizeIncludesBorder | IntrinsicInlineSizeIncludesSpecified } TableWrapperFragment => { IntrinsicInlineSizeIncludesMargins | IntrinsicInlineSizeIncludesBorder | IntrinsicInlineSizeIncludesSpecified } TableRowFragment => { IntrinsicInlineSizeIncludesBorder | IntrinsicInlineSizeIncludesSpecified } ScannedTextFragment(_) | TableColumnFragment(_) | UnscannedTextFragment(_) | InlineAbsoluteHypotheticalFragment(_) => { QuantitiesIncludedInIntrinsicInlineSizes::empty() } } } /// Returns the portion of the intrinsic inline-size that consists of borders, padding, and/or /// margins. /// /// FIXME(#2261, pcwalton): This won't work well for inlines: is this OK? pub fn surrounding_intrinsic_inline_size(&self) -> Au { let flags = self.quantities_included_in_intrinsic_inline_size(); let style = self.style(); // FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING. // This will likely need to be done by pushing down definite sizes during selector // cascading. let margin = if flags.contains(IntrinsicInlineSizeIncludesMargins) { let margin = style.logical_margin(); (MaybeAuto::from_style(margin.inline_start, Au(0)).specified_or_zero() + MaybeAuto::from_style(margin.inline_end, Au(0)).specified_or_zero()) } else { Au(0) }; // FIXME(pcwalton): Percentages should be relative to any definite size per CSS-SIZING. // This will likely need to be done by pushing down definite sizes during selector // cascading. let padding = if flags.contains(IntrinsicInlineSizeIncludesPadding) { let padding = style.logical_padding(); (model::specified(padding.inline_start, Au(0)) + model::specified(padding.inline_end, Au(0))) } else { Au(0) }; let border = if flags.contains(IntrinsicInlineSizeIncludesBorder) { self.border_width().inline_start_end() } else { Au(0) }; margin + padding + border } /// Uses the style only to estimate the intrinsic inline-sizes. These may be modified for text /// or replaced elements. fn style_specified_intrinsic_inline_size(&self) -> IntrinsicISizesContribution { let flags = self.quantities_included_in_intrinsic_inline_size(); let style = self.style(); let specified = if flags.contains(IntrinsicInlineSizeIncludesSpecified) { MaybeAuto::from_style(style.content_inline_size(), Au(0)).specified_or_zero() } else { Au(0) }; // FIXME(#2261, pcwalton): This won't work well for inlines: is this OK? let surrounding_inline_size = self.surrounding_intrinsic_inline_size(); IntrinsicISizesContribution { content_intrinsic_sizes: IntrinsicISizes { minimum_inline_size: specified, preferred_inline_size: specified, }, surrounding_size: surrounding_inline_size, } } pub fn calculate_line_height(&self, layout_context: &LayoutContext) -> Au { let font_style = self.style.get_font_arc(); let font_metrics = text::font_metrics_for_style(layout_context.font_context(), font_style); text::line_height_from_style(&*self.style, &font_metrics) } /// Returns the sum of the inline-sizes of all the borders of this fragment. Note that this /// can be expensive to compute, so if possible use the `border_padding` field instead. #[inline] pub fn border_width(&self) -> LogicalMargin<Au> { let style_border_width = match self.specific { ScannedTextFragment(_) => LogicalMargin::zero(self.style.writing_mode), _ => self.style().logical_border_width(), }; match self.inline_context { None => style_border_width, Some(ref inline_fragment_context) => { inline_fragment_context.styles.iter().fold(style_border_width, |acc, style| acc + style.logical_border_width()) } } } /// Computes the margins in the inline direction from the containing block inline-size and the /// style. After this call, the inline direction of the `margin` field will be correct. /// /// Do not use this method if the inline direction margins are to be computed some other way /// (for example, via constraint solving for blocks). pub fn compute_inline_direction_margins(&mut self, containing_block_inline_size: Au) { match self.specific { TableFragment | TableCellFragment | TableRowFragment | TableColumnFragment(_) => { self.margin.inline_start = Au(0); self.margin.inline_end = Au(0) } _ => { let margin = self.style().logical_margin(); self.margin.inline_start = MaybeAuto::from_style(margin.inline_start, containing_block_inline_size) .specified_or_zero(); self.margin.inline_end = MaybeAuto::from_style(margin.inline_end, containing_block_inline_size) .specified_or_zero(); } } } /// Computes the margins in the block direction from the containing block inline-size and the /// style. After this call, the block direction of the `margin` field will be correct. /// /// Do not use this method if the block direction margins are to be computed some other way /// (for example, via constraint solving for absolutely-positioned flows). pub fn compute_block_direction_margins(&mut self, containing_block_inline_size: Au) { match self.specific { TableFragment | TableCellFragment | TableRowFragment | TableColumnFragment(_) => { self.margin.block_start = Au(0); self.margin.block_end = Au(0) } _ => { // NB: Percentages are relative to containing block inline-size (not block-size) // per CSS 2.1. let margin = self.style().logical_margin(); self.margin.block_start = MaybeAuto::from_style(margin.block_start, containing_block_inline_size) .specified_or_zero(); self.margin.block_end = MaybeAuto::from_style(margin.block_end, containing_block_inline_size) .specified_or_zero(); } } } /// Computes the border and padding in both inline and block directions from the containing /// block inline-size and the style. After this call, the `border_padding` field will be /// correct. pub fn compute_border_and_padding(&mut self, containing_block_inline_size: Au) { // Compute border. let border = self.border_width(); // Compute padding. let padding = match self.specific { TableColumnFragment(_) | TableRowFragment | TableWrapperFragment => LogicalMargin::zero(self.style.writing_mode), _ => { let style_padding = match self.specific { ScannedTextFragment(_) => LogicalMargin::zero(self.style.writing_mode), _ => model::padding_from_style(self.style(), containing_block_inline_size), }; match self.inline_context { None => style_padding, Some(ref inline_fragment_context) => { inline_fragment_context.styles.iter().fold(style_padding, |acc, style| acc + model::padding_from_style(&**style, Au(0))) } } } }; self.border_padding = border + padding; } // Return offset from original position because of `position: relative`. pub fn relative_position(&self, containing_block_size: &LogicalSize<Au>) -> LogicalSize<Au> { fn from_style(style: &ComputedValues, container_size: &LogicalSize<Au>) -> LogicalSize<Au> { let offsets = style.logical_position(); let offset_i = if offsets.inline_start != LPA_Auto { MaybeAuto::from_style(offsets.inline_start, container_size.inline).specified_or_zero() } else { -MaybeAuto::from_style(offsets.inline_end, container_size.inline).specified_or_zero() }; let offset_b = if offsets.block_start != LPA_Auto { MaybeAuto::from_style(offsets.block_start, container_size.inline).specified_or_zero() } else { -MaybeAuto::from_style(offsets.block_end, container_size.inline).specified_or_zero() }; LogicalSize::new(style.writing_mode, offset_i, offset_b) } // Go over the ancestor fragments and add all relative offsets (if any). let mut rel_pos = if self.style().get_box().position == position::relative { from_style(self.style(), containing_block_size) } else { LogicalSize::zero(self.style.writing_mode) }; match self.inline_context { None => {} Some(ref inline_fragment_context) => { for style in inline_fragment_context.styles.iter() { if style.get_box().position == position::relative { rel_pos = rel_pos + from_style(&**style, containing_block_size); } } }, } rel_pos } /// Always inline for SCCP. /// /// FIXME(pcwalton): Just replace with the clear type from the style module for speed? #[inline(always)] pub fn clear(&self) -> Option<ClearType> { let style = self.style(); match style.get_box().clear { clear::none => None,<|fim▁hole|> clear::both => Some(ClearBoth), } } #[inline(always)] pub fn style<'a>(&'a self) -> &'a ComputedValues { &*self.style } /// Returns the text alignment of the computed style of the nearest ancestor-or-self `Element` /// node. pub fn text_align(&self) -> text_align::T { self.style().get_inheritedtext().text_align } pub fn vertical_align(&self) -> vertical_align::T { self.style().get_box().vertical_align } pub fn white_space(&self) -> white_space::T { self.style().get_inheritedtext().white_space } /// Returns the text decoration of this fragment, according to the style of the nearest ancestor /// element. /// /// NB: This may not be the actual text decoration, because of the override rules specified in /// CSS 2.1 § 16.3.1. Unfortunately, computing this properly doesn't really fit into Servo's /// model. Therefore, this is a best lower bound approximation, but the end result may actually /// have the various decoration flags turned on afterward. pub fn text_decoration(&self) -> text_decoration::T { self.style().get_text().text_decoration } /// Returns the inline-start offset from margin edge to content edge. /// /// FIXME(#2262, pcwalton): I think this method is pretty bogus, because it won't work for /// inlines. pub fn inline_start_offset(&self) -> Au { match self.specific { TableWrapperFragment => self.margin.inline_start, TableFragment | TableCellFragment | TableRowFragment => self.border_padding.inline_start, TableColumnFragment(_) => Au(0), _ => self.margin.inline_start + self.border_padding.inline_start, } } /// Returns true if this element can be split. This is true for text fragments. pub fn can_split(&self) -> bool { self.is_scanned_text_fragment() } /// Returns the newline positions of this fragment, if it's a scanned text fragment. pub fn newline_positions(&self) -> Option<&Vec<CharIndex>> { match self.specific { ScannedTextFragment(ref info) => Some(&info.new_line_pos), _ => None, } } /// Returns the newline positions of this fragment, if it's a scanned text fragment. pub fn newline_positions_mut(&mut self) -> Option<&mut Vec<CharIndex>> { match self.specific { ScannedTextFragment(ref mut info) => Some(&mut info.new_line_pos), _ => None, } } /// Returns true if and only if this is a scanned text fragment. fn is_scanned_text_fragment(&self) -> bool { match self.specific { ScannedTextFragment(..) => true, _ => false, } } /// Computes the intrinsic inline-sizes of this fragment. pub fn compute_intrinsic_inline_sizes(&mut self) -> IntrinsicISizesContribution { let mut result = self.style_specified_intrinsic_inline_size(); match self.specific { GenericFragment | IframeFragment(_) | TableFragment | TableCellFragment | TableColumnFragment(_) | TableRowFragment | TableWrapperFragment | InlineAbsoluteHypotheticalFragment(_) | InputFragment => {} InlineBlockFragment(ref mut info) => { let block_flow = info.flow_ref.as_block(); result.union_block(&block_flow.base.intrinsic_inline_sizes) } ImageFragment(ref mut image_fragment_info) => { let image_inline_size = image_fragment_info.image_inline_size(); result.union_block(&IntrinsicISizes { minimum_inline_size: image_inline_size, preferred_inline_size: image_inline_size, }) } ScannedTextFragment(ref text_fragment_info) => { let range = &text_fragment_info.range; let min_line_inline_size = text_fragment_info.run.min_width_for_range(range); // See http://dev.w3.org/csswg/css-sizing/#max-content-inline-size. // TODO: Account for soft wrap opportunities. let max_line_inline_size = text_fragment_info.run .metrics_for_range(range) .advance_width; result.union_block(&IntrinsicISizes { minimum_inline_size: min_line_inline_size, preferred_inline_size: max_line_inline_size, }) } UnscannedTextFragment(..) => { fail!("Unscanned text fragments should have been scanned by now!") } }; // Take borders and padding for parent inline fragments into account, if necessary. if self.is_primary_fragment() { match self.inline_context { None => {} Some(ref context) => { for style in context.styles.iter() { let border_width = style.logical_border_width().inline_start_end(); let padding_inline_size = model::padding_from_style(&**style, Au(0)).inline_start_end(); result.surrounding_size = result.surrounding_size + border_width + padding_inline_size; } } } } result } /// TODO: What exactly does this function return? Why is it Au(0) for GenericFragment? pub fn content_inline_size(&self) -> Au { match self.specific { GenericFragment | IframeFragment(_) | TableFragment | TableCellFragment | TableRowFragment | TableWrapperFragment | InlineBlockFragment(_) | InputFragment | InlineAbsoluteHypotheticalFragment(_) => Au(0), ImageFragment(ref image_fragment_info) => { image_fragment_info.computed_inline_size() } ScannedTextFragment(ref text_fragment_info) => { let (range, run) = (&text_fragment_info.range, &text_fragment_info.run); let text_bounds = run.metrics_for_range(range).bounding_box; text_bounds.size.width } TableColumnFragment(_) => fail!("Table column fragments do not have inline_size"), UnscannedTextFragment(_) => fail!("Unscanned text fragments should have been scanned by now!"), } } /// Returns, and computes, the block-size of this fragment. pub fn content_block_size(&self, layout_context: &LayoutContext) -> Au { match self.specific { GenericFragment | IframeFragment(_) | TableFragment | TableCellFragment | TableRowFragment | TableWrapperFragment | InlineBlockFragment(_) | InputFragment | InlineAbsoluteHypotheticalFragment(_) => Au(0), ImageFragment(ref image_fragment_info) => { image_fragment_info.computed_block_size() } ScannedTextFragment(_) => { // Compute the block-size based on the line-block-size and font size. self.calculate_line_height(layout_context) } TableColumnFragment(_) => fail!("Table column fragments do not have block_size"), UnscannedTextFragment(_) => fail!("Unscanned text fragments should have been scanned by now!"), } } /// Returns the dimensions of the content box. /// /// This is marked `#[inline]` because it is frequently called when only one or two of the /// values are needed and that will save computation. #[inline] pub fn content_box(&self) -> LogicalRect<Au> { self.border_box - self.border_padding } /// Find the split of a fragment that includes a new-line character. /// /// A return value of `None` indicates that the fragment is not splittable. /// Otherwise the split information is returned. The right information is /// optional due to the possibility of it being whitespace. // // TODO(bjz): The text run should be removed in the future, but it is currently needed for // the current method of fragment splitting in the `inline::try_append_*` functions. pub fn find_split_info_by_new_line(&self) -> Option<(SplitInfo, Option<SplitInfo>, Arc<Box<TextRun>> /* TODO(bjz): remove */)> { match self.specific { GenericFragment | IframeFragment(_) | ImageFragment(_) | TableFragment | TableCellFragment | TableRowFragment | TableWrapperFragment | InputFragment => None, TableColumnFragment(_) => fail!("Table column fragments do not need to split"), UnscannedTextFragment(_) => fail!("Unscanned text fragments should have been scanned by now!"), InlineBlockFragment(_) | InlineAbsoluteHypotheticalFragment(_) => { fail!("Inline blocks or inline absolute hypothetical fragments do not get split") } ScannedTextFragment(ref text_fragment_info) => { let mut new_line_pos = text_fragment_info.new_line_pos.clone(); let cur_new_line_pos = new_line_pos.remove(0).unwrap(); let inline_start_range = Range::new(text_fragment_info.range.begin(), cur_new_line_pos); let inline_end_range = Range::new( text_fragment_info.range.begin() + cur_new_line_pos + CharIndex(1), text_fragment_info.range.length() - (cur_new_line_pos + CharIndex(1))); // Left fragment is for inline-start text of first founded new-line character. let inline_start_fragment = SplitInfo::new(inline_start_range, &**text_fragment_info); // Right fragment is for inline-end text of first founded new-line character. let inline_end_fragment = if inline_end_range.length() > CharIndex(0) { Some(SplitInfo::new(inline_end_range, &**text_fragment_info)) } else { None }; Some((inline_start_fragment, inline_end_fragment, text_fragment_info.run.clone())) } } } /// Attempts to find the split positions of a text fragment so that its inline-size is /// no more than `max_inline-size`. /// /// A return value of `None` indicates that the fragment could not be split. /// Otherwise the information pertaining to the split is returned. The inline-start /// and inline-end split information are both optional due to the possibility of /// them being whitespace. pub fn find_split_info_for_inline_size(&self, start: CharIndex, max_inline_size: Au, starts_line: bool) -> Option<(Option<SplitInfo>, Option<SplitInfo>, Arc<Box<TextRun>>)> { match self.specific { GenericFragment | IframeFragment(_) | ImageFragment(_) | TableFragment | TableCellFragment | TableRowFragment | TableWrapperFragment | InlineBlockFragment(_) | InputFragment | InlineAbsoluteHypotheticalFragment(_) => None, TableColumnFragment(_) => fail!("Table column fragments do not have inline_size"), UnscannedTextFragment(_) => { fail!("Unscanned text fragments should have been scanned by now!") } ScannedTextFragment(ref text_fragment_info) => { let mut pieces_processed_count: uint = 0; let mut remaining_inline_size: Au = max_inline_size; let mut inline_start_range = Range::new(text_fragment_info.range.begin() + start, CharIndex(0)); let mut inline_end_range: Option<Range<CharIndex>> = None; debug!("split_to_inline_size: splitting text fragment \ (strlen={}, range={}, avail_inline_size={})", text_fragment_info.run.text.len(), text_fragment_info.range, max_inline_size); for (glyphs, offset, slice_range) in text_fragment_info.run.iter_slices_for_range( &text_fragment_info.range) { debug!("split_to_inline_size: considering slice (offset={}, range={}, \ remain_inline_size={})", offset, slice_range, remaining_inline_size); let metrics = text_fragment_info.run.metrics_for_slice(glyphs, &slice_range); let advance = metrics.advance_width; let should_continue; if advance <= remaining_inline_size || glyphs.is_whitespace() { should_continue = true; if starts_line && pieces_processed_count == 0 && glyphs.is_whitespace() { debug!("split_to_inline_size: case=skipping leading trimmable whitespace"); inline_start_range.shift_by(slice_range.length()); } else { debug!("split_to_inline_size: case=enlarging span"); remaining_inline_size = remaining_inline_size - advance; inline_start_range.extend_by(slice_range.length()); } } else { // The advance is more than the remaining inline-size. should_continue = false; let slice_begin = offset + slice_range.begin(); if slice_begin < text_fragment_info.range.end() { // There are still some things inline-start over at the end of the line. Create // the inline-end chunk. let inline_end_range_end = text_fragment_info.range.end() - slice_begin; inline_end_range = Some(Range::new(slice_begin, inline_end_range_end)); debug!("split_to_inline_size: case=splitting remainder with inline_end range={:?}", inline_end_range); } } pieces_processed_count += 1; if !should_continue { break } } let inline_start_is_some = inline_start_range.length() > CharIndex(0); if (pieces_processed_count == 1 || !inline_start_is_some) && !starts_line { None } else { let inline_start = if inline_start_is_some { Some(SplitInfo::new(inline_start_range, &**text_fragment_info)) } else { None }; let inline_end = inline_end_range.map(|inline_end_range| { SplitInfo::new(inline_end_range, &**text_fragment_info) }); Some((inline_start, inline_end, text_fragment_info.run.clone())) } } } } /// Returns true if this fragment is an unscanned text fragment that consists entirely of /// whitespace that should be stripped. pub fn is_ignorable_whitespace(&self) -> bool { match self.white_space() { white_space::pre => return false, white_space::normal | white_space::nowrap => {} } match self.specific { UnscannedTextFragment(ref text_fragment_info) => { is_whitespace(text_fragment_info.text.as_slice()) } _ => false, } } /// Assigns replaced inline-size, padding, and margins for this fragment only if it is replaced /// content per CSS 2.1 § 10.3.2. pub fn assign_replaced_inline_size_if_necessary(&mut self, container_inline_size: Au) { match self.specific { GenericFragment | IframeFragment(_) | TableFragment | TableCellFragment | TableRowFragment | TableWrapperFragment | InputFragment => return, TableColumnFragment(_) => fail!("Table column fragments do not have inline_size"), UnscannedTextFragment(_) => { fail!("Unscanned text fragments should have been scanned by now!") } ImageFragment(_) | ScannedTextFragment(_) | InlineBlockFragment(_) | InlineAbsoluteHypotheticalFragment(_) => {} }; let style_inline_size = self.style().content_inline_size(); let style_block_size = self.style().content_block_size(); let style_min_inline_size = self.style().min_inline_size(); let style_max_inline_size = self.style().max_inline_size(); let style_min_block_size = self.style().min_block_size(); let style_max_block_size = self.style().max_block_size(); let noncontent_inline_size = self.border_padding.inline_start_end(); match self.specific { InlineAbsoluteHypotheticalFragment(ref mut info) => { let block_flow = info.flow_ref.as_block(); block_flow.base.position.size.inline = block_flow.base.intrinsic_inline_sizes.preferred_inline_size; // This is a hypothetical box, so it takes up no space. self.border_box.size.inline = Au(0); } InlineBlockFragment(ref mut info) => { let block_flow = info.flow_ref.as_block(); self.border_box.size.inline = block_flow.base.intrinsic_inline_sizes.preferred_inline_size; block_flow.base.block_container_inline_size = self.border_box.size.inline; } ScannedTextFragment(ref info) => { // Scanned text fragments will have already had their content inline-sizes assigned // by this point. self.border_box.size.inline = info.content_size.inline + noncontent_inline_size } ImageFragment(ref mut image_fragment_info) => { // TODO(ksh8281): compute border,margin let inline_size = ImageFragmentInfo::style_length( style_inline_size, image_fragment_info.dom_inline_size, container_inline_size); let inline_size = match inline_size { Auto => { let intrinsic_width = image_fragment_info.image_inline_size(); let intrinsic_height = image_fragment_info.image_block_size(); if intrinsic_height == Au(0) { intrinsic_width } else { let ratio = intrinsic_width.to_f32().unwrap() / intrinsic_height.to_f32().unwrap(); let specified_height = ImageFragmentInfo::style_length( style_block_size, image_fragment_info.dom_block_size, Au(0)); let specified_height = match specified_height { Auto => intrinsic_height, Specified(h) => h, }; let specified_height = ImageFragmentInfo::clamp_size( specified_height, style_min_block_size, style_max_block_size, Au(0)); Au((specified_height.to_f32().unwrap() * ratio) as i32) } }, Specified(w) => w, }; let inline_size = ImageFragmentInfo::clamp_size(inline_size, style_min_inline_size, style_max_inline_size, container_inline_size); self.border_box.size.inline = inline_size + noncontent_inline_size; image_fragment_info.computed_inline_size = Some(inline_size); } _ => fail!("this case should have been handled above"), } } /// Assign block-size for this fragment if it is replaced content. The inline-size must have /// been assigned first. /// /// Ideally, this should follow CSS 2.1 § 10.6.2. pub fn assign_replaced_block_size_if_necessary(&mut self, containing_block_block_size: Au) { match self.specific { GenericFragment | IframeFragment(_) | TableFragment | TableCellFragment | TableRowFragment | TableWrapperFragment | InputFragment => return, TableColumnFragment(_) => fail!("Table column fragments do not have block_size"), UnscannedTextFragment(_) => { fail!("Unscanned text fragments should have been scanned by now!") } ImageFragment(_) | ScannedTextFragment(_) | InlineBlockFragment(_) | InlineAbsoluteHypotheticalFragment(_) => {} } let style_block_size = self.style().content_block_size(); let style_min_block_size = self.style().min_block_size(); let style_max_block_size = self.style().max_block_size(); let noncontent_block_size = self.border_padding.block_start_end(); match self.specific { ImageFragment(ref mut image_fragment_info) => { // TODO(ksh8281): compute border,margin,padding let inline_size = image_fragment_info.computed_inline_size(); let block_size = ImageFragmentInfo::style_length( style_block_size, image_fragment_info.dom_block_size, containing_block_block_size); let block_size = match block_size { Auto => { let scale = image_fragment_info.image_inline_size().to_f32().unwrap() / inline_size.to_f32().unwrap(); Au((image_fragment_info.image_block_size().to_f32().unwrap() / scale) as i32) }, Specified(h) => { h } }; let block_size = ImageFragmentInfo::clamp_size(block_size, style_min_block_size, style_max_block_size, Au(0)); image_fragment_info.computed_block_size = Some(block_size); self.border_box.size.block = block_size + noncontent_block_size } ScannedTextFragment(ref info) => { // Scanned text fragments' content block-sizes are calculated by the text run // scanner during flow construction. self.border_box.size.block = info.content_size.block + noncontent_block_size } InlineBlockFragment(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = info.flow_ref.as_block(); self.border_box.size.block = block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end() } InlineAbsoluteHypotheticalFragment(ref mut info) => { // Not the primary fragment, so we do not take the noncontent size into account. let block_flow = info.flow_ref.as_block(); self.border_box.size.block = block_flow.base.position.size.block; } _ => fail!("should have been handled above"), } } /// Calculates block-size above baseline, depth below baseline, and ascent for this fragment when /// used in an inline formatting context. See CSS 2.1 § 10.8.1. pub fn inline_metrics(&self, layout_context: &LayoutContext) -> InlineMetrics { match self.specific { ImageFragment(ref image_fragment_info) => { let computed_block_size = image_fragment_info.computed_block_size(); InlineMetrics { block_size_above_baseline: computed_block_size + self.border_padding.block_start_end(), depth_below_baseline: Au(0), ascent: computed_block_size + self.border_padding.block_end, } } ScannedTextFragment(ref text_fragment) => { // See CSS 2.1 § 10.8.1. let line_height = self.calculate_line_height(layout_context); InlineMetrics::from_font_metrics(&text_fragment.run.font_metrics, line_height) } InlineBlockFragment(ref info) => { // See CSS 2.1 § 10.8.1. let block_flow = info.flow_ref.deref().as_immutable_block(); let font_style = self.style.get_font_arc(); let font_metrics = text::font_metrics_for_style(layout_context.font_context(), font_style); InlineMetrics::from_block_height(&font_metrics, block_flow.base.position.size.block + block_flow.fragment.margin.block_start_end()) } InlineAbsoluteHypotheticalFragment(_) => { // Hypothetical boxes take up no space. InlineMetrics { block_size_above_baseline: Au(0), depth_below_baseline: Au(0), ascent: Au(0), } } _ => { InlineMetrics { block_size_above_baseline: self.border_box.size.block, depth_below_baseline: Au(0), ascent: self.border_box.size.block, } } } } /// Returns true if this fragment is a hypothetical box. See CSS 2.1 § 10.3.7. pub fn is_hypothetical(&self) -> bool { match self.specific { InlineAbsoluteHypotheticalFragment(_) => true, _ => false, } } /// Returns true if this fragment can merge with another adjacent fragment or false otherwise. pub fn can_merge_with_fragment(&self, other: &Fragment) -> bool { match (&self.specific, &other.specific) { (&UnscannedTextFragment(_), &UnscannedTextFragment(_)) => { // FIXME: Should probably use a whitelist of styles that can safely differ (#3165) self.style().get_font() == other.style().get_font() && self.text_decoration() == other.text_decoration() && self.white_space() == other.white_space() } _ => false, } } /// Returns true if and only if this is the *primary fragment* for the fragment's style object /// (conceptually, though style sharing makes this not really true, of course). The primary /// fragment is the one that draws backgrounds, borders, etc., and takes borders, padding and /// margins into account. Every style object has at most one primary fragment. /// /// At present, all fragments are primary fragments except for inline-block and table wrapper /// fragments. Inline-block fragments are not primary fragments because the corresponding block /// flow is the primary fragment, while table wrapper fragments are not primary fragments /// because the corresponding table flow is the primary fragment. pub fn is_primary_fragment(&self) -> bool { match self.specific { InlineBlockFragment(_) | InlineAbsoluteHypotheticalFragment(_) | TableWrapperFragment => false, GenericFragment | IframeFragment(_) | ImageFragment(_) | ScannedTextFragment(_) | TableFragment | TableCellFragment | TableColumnFragment(_) | TableRowFragment | UnscannedTextFragment(_) | InputFragment => true, } } pub fn update_late_computed_inline_position_if_necessary(&mut self) { match self.specific { InlineAbsoluteHypotheticalFragment(ref mut info) => { let position = self.border_box.start.i; info.flow_ref.update_late_computed_inline_position_if_necessary(position) } _ => {} } } pub fn update_late_computed_block_position_if_necessary(&mut self) { match self.specific { InlineAbsoluteHypotheticalFragment(ref mut info) => { let position = self.border_box.start.b; info.flow_ref.update_late_computed_block_position_if_necessary(position) } _ => {} } } } impl fmt::Show for Fragment { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "({} {} ", self.debug_id(), self.specific.get_type())); try!(write!(f, "bp {}", self.border_padding)); try!(write!(f, " ")); try!(write!(f, "m {}", self.margin)); write!(f, ")") } } bitflags! { flags QuantitiesIncludedInIntrinsicInlineSizes: u8 { static IntrinsicInlineSizeIncludesMargins = 0x01, static IntrinsicInlineSizeIncludesPadding = 0x02, static IntrinsicInlineSizeIncludesBorder = 0x04, static IntrinsicInlineSizeIncludesSpecified = 0x08, } }<|fim▁end|>
clear::left => Some(ClearLeft), clear::right => Some(ClearRight),
<|file_name|>run-test-sort-selection.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python from algorithms.sorting.selection_sort import * from __prototype__ import * <|fim▁hole|>if __name__ == '__main__': test_all(selection_sort)<|fim▁end|>
<|file_name|>style.hpp<|end_file_name|><|fim▁begin|>/***************************************************************************** * * This file is part of Mapnik (c++ mapping toolkit) * * Copyright (C) 2006 Artem Pavlenko * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version.<|fim▁hole|> * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA * *****************************************************************************/ //$Id: style.hpp 39 2005-04-10 20:39:53Z pavlenko $ #ifndef STYLE_HPP #define STYLE_HPP // mapnik #include <mapnik/color.hpp> #include <mapnik/symbolizer.hpp> // boost #include <boost/shared_ptr.hpp> // stl #include <vector> #include <algorithm> #include <functional> namespace mapnik { } #endif //STYLE_HPP<|fim▁end|>
* * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>"use strict"; // copied from http://www.broofa.com/Tools/Math.uuid.js var CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'.split(''); exports.uuid = function () { var chars = CHARS, uuid = new Array(36), rnd=0, r; for (var i = 0; i < 36; i++) { if (i==8 || i==13 || i==18 || i==23) { uuid[i] = '-'; } else if (i==14) { uuid[i] = '4'; } else { if (rnd <= 0x02) rnd = 0x2000000 + (Math.random()*0x1000000)|0; r = rnd & 0xf; rnd = rnd >> 4; uuid[i] = chars[(i == 19) ? (r & 0x3) | 0x8 : r]; } } return uuid.join(''); }; exports.in_array = function (item, array) { return (array.indexOf(item) != -1); }; exports.sort_keys = function (obj) { return Object.keys(obj).sort(); }; exports.uniq = function (arr) { var out = []; var o = 0; for (var i=0,l=arr.length; i < l; i++) { if (out.length === 0) { out.push(arr[i]); } else if (out[o] != arr[i]) { out.push(arr[i]); o++; } } return out; } exports.ISODate = function (d) { function pad(n) {return n<10 ? '0'+n : n} return d.getUTCFullYear()+'-'<|fim▁hole|> + pad(d.getUTCMinutes())+':' + pad(d.getUTCSeconds())+'Z' } var _daynames = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']; var _monnames = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']; function _pad (num, n, p) { var s = '' + num; p = p || '0'; while (s.length < n) s = p + s; return s; } exports.pad = _pad; exports.date_to_str = function (d) { return _daynames[d.getDay()] + ', ' + _pad(d.getDate(),2) + ' ' + _monnames[d.getMonth()] + ' ' + d.getFullYear() + ' ' + _pad(d.getHours(),2) + ':' + _pad(d.getMinutes(),2) + ':' + _pad(d.getSeconds(),2) + ' ' + d.toString().match(/\sGMT([+-]\d+)/)[1]; } exports.decode_qp = function (line) { line = line.replace(/\r\n/g,"\n").replace(/[ \t]+\r?\n/g,"\n"); if (! /=/.test(line)) { // this may be a pointless optimisation... return new Buffer(line); } line = line.replace(/=\n/mg, ''); var buf = new Buffer(line.length); var pos = 0; for (var i=0,l=line.length; i < l; i++) { if (line[i] === '=' && /=[0-9a-fA-F]{2}/.test(line[i] + line[i+1] + line[i+2])) { i++; buf[pos] = parseInt(line[i] + line[i+1], 16); i++; } else { buf[pos] = line.charCodeAt(i); } pos++; } return buf.slice(0, pos); } function _char_to_qp (ch) { return "=" + _pad(ch.charCodeAt(0).toString(16).toUpperCase(), 2); } // Shameless attempt to copy from Perl's MIME::QuotedPrint::Perl code. exports.encode_qp = function (str) { str = str.replace(/([^\ \t\n!"#\$%&'()*+,\-.\/0-9:;<>?\@A-Z\[\\\]^_`a-z{|}~])/g, function (orig, p1) { return _char_to_qp(p1); }).replace(/([ \t]+)$/gm, function (orig, p1) { return p1.split('').map(_char_to_qp).join(''); }); // Now shorten lines to 76 chars, but don't break =XX encodes. // Method: iterate over to char 73. // If char 74, 75 or 76 is = we need to break before the =. // Otherwise break at 76. var cur_length = 0; var out = ''; for (var i=0; i<str.length; i++) { if (str[i] === '\n') { out += '\n'; cur_length = 0; continue; } cur_length++; if (cur_length <= 73) { out += str[i]; } else if (cur_length > 73 && cur_length < 76) { if (str[i] === '=') { out += '=\n='; cur_length = 1; } else { out += str[i]; } } else { // Otherwise got to char 76 // Don't insert '=\n' if end of string or next char is already \n: if ((i === (str.length - 1)) || (str[i+1] === '\n')) { out += str[i]; } else { out += '=\n' + str[i]; cur_length = 1; } } } return out; } var versions = process.version.split('.'), version = Number(versions[0].substring(1)), subversion = Number(versions[1]); exports.existsSync = require((version > 0 || subversion >= 8) ? 'fs' : 'path').existsSync; exports.indexOfLF = function (buf, maxlength) { for (var i=0; i<buf.length; i++) { if (maxlength && (i === maxlength)) break; if (buf[i] === 0x0a) return i; } return -1; }<|fim▁end|>
+ pad(d.getUTCMonth()+1)+'-' + pad(d.getUTCDate())+'T' + pad(d.getUTCHours())+':'
<|file_name|>matchers.qunit.js<|end_file_name|><|fim▁begin|>/*global QUnit, sinon */ sap.ui.define([ 'sap/ui/test/Opa', 'sap/ui/test/Opa5', "sap/m/Button", "sap/m/Input", "sap/ui/test/matchers/PropertyStrictEquals", "sap/ui/test/matchers/Ancestor", "sap/ui/test/matchers/Descendant", "sap/ui/test/matchers/MatcherFactory", "sap/ui/layout/HorizontalLayout" ], function (Opa, Opa5, Button, Input, PropertyStrictEquals, Ancestor, Descendant, MatcherFactory, HorizontalLayout) { "use strict"; QUnit.test("Should not execute the test in debug mode", function (assert) { assert.ok(!window["sap-ui-debug"], "Starting the OPA tests in debug mode is not supported since it changes timeouts"); }); var iExecutionDelay = Opa.config.executionDelay; QUnit.module("matchers without fake time", { beforeEach: function () { this.oButton = new Button("testButton", {text : "foo"}); this.oButton.placeAt("qunit-fixture"); sap.ui.getCore().applyChanges(); }, afterEach: function () { this.oButton.destroy(); } }); QUnit.test("Should find a control by id without matchers", function(assert) { var done = assert.async();<|fim▁hole|> // System under Test var oOpa5 = new Opa5(); // Act oOpa5.waitFor({ id : "testButton", success : oSuccessSpy, timeout : 1, //second pollingInterval : 200 //millisecond }); oOpa5.emptyQueue().done(function() { var oSuccessButton = oSuccessSpy.args[0][0]; assert.strictEqual(oSuccessButton, this.oButton, "found a control"); done(); }.bind(this)); }); QUnit.test("Should not call check if no matcher is matching on a single control", function(assert) { var oCheckSpy = this.spy(); var done = assert.async(); // System under Test var oOpa5 = new Opa5(); var oMatcher = new PropertyStrictEquals({ name : "text", value : "bar" }); // Act oOpa5.waitFor({ id : "testButton", matchers : [ oMatcher ], check : oCheckSpy, timeout : 1, //second pollingInterval : 200 //millisecond }); oOpa5.emptyQueue().fail(function () { assert.strictEqual(oCheckSpy.callCount, 0, "did not call the check"); done(); }); }); QUnit.test("Should skip a check if matchers filtered out all controls", function(assert) { var oCheckSpy = this.spy(); var done = assert.async(); var oTextMatcher = new PropertyStrictEquals({ name : "text", value : "baz" }); // System under Test var oOpa5 = new Opa5(); // Act oOpa5.waitFor({ id : ["myButton", "myButton2"], matchers : oTextMatcher, check : oCheckSpy, timeout : 1 //second }); Opa5.emptyQueue().fail(function () { assert.strictEqual(oCheckSpy.callCount, 0, "did not call the check"); done(); }); }); QUnit.module("matchers in waitfor", { beforeEach : function () { sinon.config.useFakeTimers = true; this.oLayout1 = new HorizontalLayout({id: "layout1"}); this.oButton = new Button("myButton", {text : "foo"}); this.oButton2 = new Button("myButton2", {text : "bar"}); this.oButton.placeAt(this.oLayout1); this.oLayout1.placeAt("qunit-fixture"); this.oButton2.placeAt("qunit-fixture"); sap.ui.getCore().applyChanges(); }, afterEach : function () { sinon.config.useFakeTimers = false; this.oButton.destroy(); this.oButton2.destroy(); this.oLayout1.destroy(); } }); QUnit.test("Should execute a matcher and pass its value to success if no control is searched", function (assert) { var oOpa5 = new Opa5(), fnMatcherStub = this.stub().returns("foo"), fnActionSpy = this.spy(), done = assert.async(); // give some common defaults to see if they interfere and the plugin thinks we are looking for a control Opa5.extendConfig({ viewNamespace: "foo", visible: true }); oOpa5.waitFor({ matchers: fnMatcherStub, actions: fnActionSpy }); oOpa5.emptyQueue().done(function () { // Assert sinon.assert.calledOnce(fnMatcherStub); sinon.assert.calledWith(fnActionSpy, "foo"); done(); }); this.clock.tick(1000); }); QUnit.test("Should not call check if no matcher is matching", function(assert) { var oCheckSpy = this.spy(); // System under Test var oOpa5 = new Opa5(); var oMatcher = new PropertyStrictEquals({ name : "text", value : "bar" }); var oMatchSpy = this.spy(oMatcher, "isMatching"); // Act oOpa5.waitFor({ id : "myButton", matchers : [ oMatcher ], check : oCheckSpy, timeout : 1, //second pollingInterval : 200 //millisecond }); oOpa5.emptyQueue(); this.clock.tick(iExecutionDelay); assert.strictEqual(oMatchSpy.callCount, 1, "called the matcher for the first time"); this.clock.tick(200); assert.strictEqual(oMatchSpy.callCount, 2, "called the matcher for the second time"); // Assert assert.strictEqual(oCheckSpy.callCount, 0, "did not call the check"); // Cleanup this.clock.tick(1000); }); QUnit.test("Should call check when all matchers are matching", function(assert) { var oSuccessSpy = this.spy(); // System under Test var oOpa5 = new Opa5(); var oTextMatcher = new PropertyStrictEquals({ name : "text", value : "foo" }); var oEnabledMatcher = new PropertyStrictEquals({ name : "enabled", value : false }); var oTextMatcherSpy = this.spy(oTextMatcher, "isMatching"); var oEnabledMatcherSpy = this.spy(oEnabledMatcher, "isMatching"); this.oButton.setEnabled(true); // Act oOpa5.waitFor({ id : "myButton", matchers : [ oEnabledMatcher, oTextMatcher ], success : oSuccessSpy, timeout : 1, //second pollingInterval : 200 //millisecond }); Opa5.emptyQueue(); this.clock.tick(iExecutionDelay); // Assert assert.strictEqual(oTextMatcherSpy.callCount, 0, "did not call the oTextMatcher yet"); assert.strictEqual(oEnabledMatcherSpy.callCount, 1, "called the oEnabledMatcher"); this.oButton.setEnabled(false); this.clock.tick(200); assert.strictEqual(oTextMatcherSpy.callCount, 1, "did call the oTextMatcher"); assert.strictEqual(oEnabledMatcherSpy.callCount, 2, "did call the oEnabledMatcher again"); assert.strictEqual(oSuccessSpy.callCount, 1, "did call the success"); }); QUnit.test("Should use declarative matchers", function(assert) { var oSuccessSpy = this.spy(); var oOpa5 = new Opa5(); var fnIsMatching = PropertyStrictEquals.prototype.isMatching; var mCalls = {text: 0, enabled: 0, busy: 0}; PropertyStrictEquals.prototype.isMatching = function () { mCalls[this.getName()] += 1; return fnIsMatching.apply(this, arguments); }; this.oButton.setEnabled(true); oOpa5.waitFor({ id : "myButton", propertyStrictEquals: [{ name : "enabled", value : false }, { name : "text", value : "foo" }], matchers: { propertyStrictEquals: { name: "busy", value: false } }, success : oSuccessSpy, timeout : 1, //second pollingInterval : 200 //millisecond }); Opa5.emptyQueue(); this.clock.tick(iExecutionDelay); assert.strictEqual(mCalls.enabled, 1, "called the enabled (state) matcher"); assert.strictEqual(mCalls.text, 0, "did not call the text matcher yet (declared on root)"); assert.strictEqual(mCalls.busy, 0, "did not call the busy matcher yet (declared in matchers)"); this.oButton.setEnabled(false); this.clock.tick(200); assert.strictEqual(mCalls.enabled, 2, "did call the enabled (state) matcher again"); assert.strictEqual(mCalls.text, 1, "did call the text matcher (declared on root)"); assert.strictEqual(mCalls.busy, 1, "did call the busy matcher (declared in matchers)"); assert.strictEqual(oSuccessSpy.callCount, 1, "did call the success"); PropertyStrictEquals.prototype.isMatching = fnIsMatching; }); QUnit.test("Should use declarative matchers with expansions", function (assert) { var oSuccessSpy = this.spy(); var mCalls = { propertyStrictEquals: {text: 0}, ancestor: [], descendant: [] }; var fnPropertyMatch = PropertyStrictEquals.prototype.isMatching; PropertyStrictEquals.prototype.isMatching = function () { mCalls.propertyStrictEquals[this.getName()] += 1; return fnPropertyMatch.apply(this, arguments); }; var fnAncestor = Ancestor; Ancestor = function () { var ancestor = arguments[0]; return function () { mCalls.ancestor.push({ ancestor: ancestor, child: arguments[0] }); return fnAncestor.call(this, ancestor).apply(this, arguments); }; }; var fnDescendant = Descendant; Descendant = function () { var descendant = arguments[0]; return function () { mCalls.descendant.push({ descendant: descendant, parent: arguments[0] }); return fnDescendant.call(this, descendant).apply(this, arguments); }; }; var fnGetMatchers = sinon.stub(MatcherFactory.prototype, "_getSupportedMatchers").returns({ propertyStrictEquals: PropertyStrictEquals, ancestor: Ancestor, descendant: Descendant }); var oOpa5 = new Opa5(); oOpa5.waitFor({ id : "myButton", propertyStrictEquals: { name : "text", value : "foo" }, matchers: { ancestor: { id: "layout1", descendant: { id: "myButton" } } }, success : oSuccessSpy, timeout : 1, //second pollingInterval : 200 //millisecond }); Opa5.emptyQueue(); this.clock.tick(200); assert.strictEqual(mCalls.propertyStrictEquals.text, 1, "called the text matcher"); assert.strictEqual(mCalls.descendant.length, 1, "called the descendant matcher"); assert.strictEqual(mCalls.ancestor.length, 1, "called the ancestor matcher"); assert.strictEqual(oSuccessSpy.callCount, 1, "did call the success"); // restore PropertyStrictEquals.prototype.isMatching = fnPropertyMatch; Ancestor = fnAncestor; Descendant = fnDescendant; fnGetMatchers.restore(); }); QUnit.test("Should only pass matching controls to success", function(assert) { var oSuccessSpy = this.spy(); var oTextMatcher = new PropertyStrictEquals({ name : "text", value : "bar" }); // System under Test var oOpa5 = new Opa5(); // Act oOpa5.waitFor({ id : ["myButton", "myButton2"], matchers : [ oTextMatcher ], success : oSuccessSpy, timeout : 1, //second pollingInterval : 200 //millisecond }); Opa5.emptyQueue(); // Assert this.clock.tick(200); assert.strictEqual(oSuccessSpy.callCount, 1, "did call the success"); var aControls = oSuccessSpy.args[0][0]; assert.strictEqual(aControls.length, 1, "did pass only one button"); assert.strictEqual(aControls[0].sId, "myButton2", "did pass the correct button"); }); QUnit.test("Should only pass a single matching control to success", function(assert) { var oSuccessSpy = this.spy(); var oTextMatcher = new PropertyStrictEquals({ name : "text", value : "foo" }); // System under Test var oOpa5 = new Opa5(); // Act oOpa5.waitFor({ id : "myButton", matchers : [ oTextMatcher ], success : oSuccessSpy, timeout : 1, //second pollingInterval : 200 //millisecond }); Opa5.emptyQueue(); // Assert this.clock.tick(200); assert.strictEqual(oSuccessSpy.callCount, 1, "did call the success"); var oControl = oSuccessSpy.args[0][0]; assert.strictEqual(oControl.sId, "myButton", "did pass the correct button"); }); QUnit.test("Should call a matcher which is an inline function", function(assert) { // System under Test var oOpa5 = new Opa5(); oOpa5.extendConfig({pollingInterval : 200 /*millisecond*/}); var fnMatcher = this.spy(function(oControl) { return !!oControl; }); // Act var fnCheckSpy1 = this.spy(function(){ return true; }); var fnCheckSpy2 = this.spy(function(){ return true; }); oOpa5.waitFor({ id : "myButton", matchers : fnMatcher, check : fnCheckSpy1, timeout : 1 //second }); oOpa5.waitFor({ id : "myButton", matchers : [ fnMatcher ], check : fnCheckSpy2, timeout : 1 //second }); oOpa5.emptyQueue(); this.clock.tick(iExecutionDelay); this.clock.tick(iExecutionDelay); assert.strictEqual(fnMatcher.callCount, 2, "called the matcher twice"); // Assert assert.ok(fnCheckSpy1.calledBefore(fnCheckSpy2), "Checks executed in correct order"); assert.strictEqual(fnCheckSpy1.callCount, 1, "called first check"); assert.strictEqual(fnCheckSpy2.callCount, 1, "called last check"); // Cleanup this.clock.tick(1000); }); var waitForIdWithChangingMatchers = function(vId, oCheckSpy, oSuccessSpy) { var fnReturnTextMatcher = function(oControl) { return oControl.getText(); }; var fnStringChangeMathcer = function(sText) { return sText + "test"; }; // System under Test var oOpa5 = new Opa5(); // Act oOpa5.waitFor({ id : vId, matchers : [ fnReturnTextMatcher, fnStringChangeMathcer ], check : function() { oCheckSpy.call(this, arguments); return true; }, success : oSuccessSpy, timeout : 1, //second pollingInterval : 200 //millisecond }); Opa5.emptyQueue(); }; QUnit.test("Should pass multiple truthy results of matching to the next matchers and to success as array", function(assert) { var oSuccessSpy = this.spy(); var oCheckSpy = this.spy(); waitForIdWithChangingMatchers(["myButton", "myButton2"], oCheckSpy, oSuccessSpy); // Assert this.clock.tick(200); var aText = oSuccessSpy.args[0][0]; assert.strictEqual(aText.length, 2, "Matchers did pass two values"); assert.strictEqual(aText[0], "footest", "The first value is 'footest'"); assert.strictEqual(aText[1], "bartest", "The second value is 'bartest'"); var aCheckText = oCheckSpy.args[0][0][0]; assert.strictEqual(aCheckText.length, aText.length, "Check got same amout of values"); assert.strictEqual(aCheckText[0], aText[0], "The first value is same"); assert.strictEqual(aCheckText[1], aText[1], "The second value is same"); }); QUnit.test("Should pass only truthy result of matching to the next matchers and to success as value", function(assert) { var oSuccessSpy = this.spy(); var oCheckSpy = this.spy(); waitForIdWithChangingMatchers("myButton", oCheckSpy, oSuccessSpy); // Assert this.clock.tick(200); var aText = oSuccessSpy.args[0][0]; assert.strictEqual(aText, "footest", "The matched value is 'footest'"); var aCheckText = oCheckSpy.args[0][0][0]; assert.strictEqual(aCheckText, aText, "Check got same value as success"); }); QUnit.module("state matchers", { beforeEach : function () { this.oButton = new Button("enabledButton", {text : "foo"}); this.oButton2 = new Button("disabledButton", {text : "bar", enabled: false}); this.oInput = new Input("editableInput"); this.oInput2 = new Input("noneditableInput", {editable: false}); this.oButton.placeAt("qunit-fixture"); this.oButton2.placeAt("qunit-fixture"); this.oInput.placeAt("qunit-fixture"); this.oInput2.placeAt("qunit-fixture"); sap.ui.getCore().applyChanges(); }, afterEach : function () { this.oButton.destroy(); this.oButton2.destroy(); this.oInput.destroy(); this.oInput2.destroy(); } }); QUnit.test("Should filter by enabled state when autoWait is true", function (assert) { var done = assert.async(); var oOpa5 = new Opa5(); Opa5.extendConfig({ autoWait: true }); oOpa5.waitFor({ controlType: "sap.m.Button", success: function (aButtons) { assert.strictEqual(aButtons.length, 1, "Should include only enabled controls by default (enabled: undefined)"); } }); oOpa5.waitFor({ controlType: "sap.m.Button", enabled: false, success: function (aButtons) { assert.strictEqual(aButtons.length, 2, "Should include both enabled and disabled controls when enabled: false"); } }); Opa5.emptyQueue().done(function () { Opa5.resetConfig(); done(); }); }); QUnit.test("Should filter by enabled state when autoWait is false", function (assert) { var done = assert.async(); var oOpa5 = new Opa5(); oOpa5.waitFor({ controlType: "sap.m.Button", success: function (aButtons) { assert.strictEqual(aButtons.length, 2, "Should include both enabled and disabled controls by default (enabled: undefined)"); } }); oOpa5.waitFor({ controlType: "sap.m.Button", enabled: true, success: function (aButtons) { assert.strictEqual(aButtons.length, 1, "Should include only enabled controls when enabled: true"); } }); Opa5.emptyQueue().done(done); }); QUnit.test("Should apply interactable matcher when interactable is true and autoWait is false", function (assert) { var done = assert.async(); var oOpa5 = new Opa5(); this.oButton2.setEnabled(true); this.oButton2.setBusy(true); oOpa5.waitFor({ controlType: "sap.m.Button", interactable: true, success: function (aButtons) { assert.strictEqual(aButtons.length, 1, "Should include only interactable controls when interactable: true"); } }); Opa5.emptyQueue().done(function () { Opa5.resetConfig(); done(); }); }); QUnit.test("Should filter by enabled state when interactable is true", function (assert) { var done = assert.async(); var oOpa5 = new Opa5(); oOpa5.waitFor({ controlType: "sap.m.Button", interactable: true, success: function (aButtons) { assert.strictEqual(aButtons.length, 1, "Should include only enabled controls when enabled: undefined"); } }); oOpa5.waitFor({ controlType: "sap.m.Button", interactable: true, enabled: false, success: function (aButtons) { assert.strictEqual(aButtons.length, 2, "Should include both enabled and disabled controls when enabled: false"); } }); Opa5.emptyQueue().done(function () { Opa5.resetConfig(); done(); }); }); QUnit.test("Should filter by editable state", function (assert) { var done = assert.async(); var oOpa5 = new Opa5(); Opa5.extendConfig({ autoWait: true }); oOpa5.waitFor({ controlType: "sap.m.Input", editable: true, success: function (aInputs) { assert.strictEqual(aInputs.length, 1, "Should include only editable controls by default (editable: undefined)"); } }); oOpa5.waitFor({ controlType: "sap.m.Input", editable: false, success: function (aInputs) { assert.strictEqual(aInputs.length, 2, "Should include all controls when editable: false"); } }); Opa5.emptyQueue().done(function () { Opa5.resetConfig(); done(); }); }); });<|fim▁end|>
var oSuccessSpy = this.spy();
<|file_name|>executor_unix.go<|end_file_name|><|fim▁begin|>//go:build darwin || dragonfly || freebsd || linux || netbsd || openbsd || solaris // +build darwin dragonfly freebsd linux netbsd openbsd solaris package executor <|fim▁hole|>) // configure new process group for child process func (e *UniversalExecutor) setNewProcessGroup() error { if e.childCmd.SysProcAttr == nil { e.childCmd.SysProcAttr = &syscall.SysProcAttr{} } e.childCmd.SysProcAttr.Setpgid = true return nil } // Cleanup any still hanging user processes func (e *UniversalExecutor) cleanupChildProcesses(proc *os.Process) error { // If new process group was created upon command execution // we can kill the whole process group now to cleanup any leftovers. if e.childCmd.SysProcAttr != nil && e.childCmd.SysProcAttr.Setpgid { if err := syscall.Kill(-proc.Pid, syscall.SIGKILL); err != nil && err.Error() != noSuchProcessErr { return err } return nil } return proc.Kill() } // Only send the process a shutdown signal (default INT), doesn't // necessarily kill it. func (e *UniversalExecutor) shutdownProcess(sig os.Signal, proc *os.Process) error { if sig == nil { sig = os.Interrupt } if err := proc.Signal(sig); err != nil && err.Error() != finishedErr { return fmt.Errorf("executor shutdown error: %v", err) } return nil }<|fim▁end|>
import ( "fmt" "os" "syscall"
<|file_name|>test-string-decoder.js<|end_file_name|><|fim▁begin|>// Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. 'use strict'; const common = require('../common'); const assert = require('assert'); const inspect = require('util').inspect; const StringDecoder = require('string_decoder').StringDecoder; // Test default encoding let decoder = new StringDecoder(); assert.strictEqual(decoder.encoding, 'utf8'); // Should work without 'new' keyword const decoder2 = {}; StringDecoder.call(decoder2); assert.strictEqual(decoder2.encoding, 'utf8'); // UTF-8 test('utf-8', Buffer.from('$', 'utf-8'), '$'); test('utf-8', Buffer.from('¢', 'utf-8'), '¢'); test('utf-8', Buffer.from('€', 'utf-8'), '€'); test('utf-8', Buffer.from('𤭢', 'utf-8'), '𤭢'); // A mixed ascii and non-ascii string // Test stolen from deps/v8/test/cctest/test-strings.cc // U+02E4 -> CB A4 // U+0064 -> 64 // U+12E4 -> E1 8B A4 // U+0030 -> 30 // U+3045 -> E3 81 85 test( 'utf-8', Buffer.from([0xCB, 0xA4, 0x64, 0xE1, 0x8B, 0xA4, 0x30, 0xE3, 0x81, 0x85]), '\u02e4\u0064\u12e4\u0030\u3045' ); // Some invalid input, known to have caused trouble with chunking // in https://github.com/nodejs/node/pull/7310#issuecomment-226445923 // 00: |00000000 ASCII // 41: |01000001 ASCII // B8: 10|111000 continuation // CC: 110|01100 two-byte head // E2: 1110|0010 three-byte head // F0: 11110|000 four-byte head // F1: 11110|001'another four-byte head // FB: 111110|11 "five-byte head", not UTF-8 test('utf-8', Buffer.from('C9B5A941', 'hex'), '\u0275\ufffdA'); test('utf-8', Buffer.from('E2', 'hex'), '\ufffd'); test('utf-8', Buffer.from('E241', 'hex'), '\ufffdA'); test('utf-8', Buffer.from('CCCCB8', 'hex'), '\ufffd\u0338'); test('utf-8', Buffer.from('F0B841', 'hex'), '\ufffdA'); test('utf-8', Buffer.from('F1CCB8', 'hex'), '\ufffd\u0338'); test('utf-8', Buffer.from('F0FB00', 'hex'), '\ufffd\ufffd\0'); test('utf-8', Buffer.from('CCE2B8B8', 'hex'), '\ufffd\u2e38'); test('utf-8', Buffer.from('E2B8CCB8', 'hex'), '\ufffd\u0338'); test('utf-8', Buffer.from('E2FBCC01', 'hex'), '\ufffd\ufffd\ufffd\u0001'); test('utf-8', Buffer.from('CCB8CDB9', 'hex'), '\u0338\u0379'); // CESU-8 of U+1D40D // V8 has changed their invalid UTF-8 handling, see // https://chromium-review.googlesource.com/c/v8/v8/+/671020 for more info. test('utf-8', Buffer.from('EDA0B5EDB08D', 'hex'), '\ufffd\ufffd\ufffd\ufffd\ufffd\ufffd'); // UCS-2 test('ucs2', Buffer.from('ababc', 'ucs2'), 'ababc'); // UTF-16LE test('utf16le', Buffer.from('3DD84DDC', 'hex'), '\ud83d\udc4d'); // thumbs up // Additional UTF-8 tests decoder = new StringDecoder('utf8'); assert.strictEqual(decoder.write(Buffer.from('E1', 'hex')), ''); // A quick test for lastNeed & lastTotal which are undocumented. assert.strictEqual(decoder.lastNeed, 2); assert.strictEqual(decoder.lastTotal, 3); assert.strictEqual(decoder.end(), '\ufffd'); decoder = new StringDecoder('utf8'); assert.strictEqual(decoder.write(Buffer.from('E18B', 'hex')), ''); assert.strictEqual(decoder.end(), '\ufffd'); decoder = new StringDecoder('utf8'); assert.strictEqual(decoder.write(Buffer.from('\ufffd')), '\ufffd'); assert.strictEqual(decoder.end(), ''); decoder = new StringDecoder('utf8'); assert.strictEqual(decoder.write(Buffer.from('\ufffd\ufffd\ufffd')), '\ufffd\ufffd\ufffd'); assert.strictEqual(decoder.end(), ''); decoder = new StringDecoder('utf8'); assert.strictEqual(decoder.write(Buffer.from('EFBFBDE2', 'hex')), '\ufffd'); assert.strictEqual(decoder.end(), '\ufffd'); decoder = new StringDecoder('utf8'); assert.strictEqual(decoder.write(Buffer.from('F1', 'hex')), ''); assert.strictEqual(decoder.write(Buffer.from('41F2', 'hex')), '\ufffdA'); assert.strictEqual(decoder.end(), '\ufffd'); // Additional utf8Text test decoder = new StringDecoder('utf8'); assert.strictEqual(decoder.text(Buffer.from([0x41]), 2), ''); // Additional UTF-16LE surrogate pair tests decoder = new StringDecoder('utf16le'); assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), ''); assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), ''); assert.strictEqual(decoder.write(Buffer.from('DC', 'hex')), '\ud83d\udc4d'); assert.strictEqual(decoder.end(), ''); decoder = new StringDecoder('utf16le'); assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), ''); assert.strictEqual(decoder.end(), '\ud83d'); decoder = new StringDecoder('utf16le'); assert.strictEqual(decoder.write(Buffer.from('3DD8', 'hex')), ''); assert.strictEqual(decoder.write(Buffer.from('4D', 'hex')), ''); assert.strictEqual(decoder.end(), '\ud83d'); decoder = new StringDecoder('utf16le'); assert.strictEqual(decoder.write(Buffer.from('3DD84D', 'hex')), '\ud83d'); assert.strictEqual(decoder.end(), ''); common.expectsError( () => new StringDecoder(1), { code: 'ERR_UNKNOWN_ENCODING', type: TypeError, message: 'Unknown encoding: 1' } ); common.expectsError( () => new StringDecoder('test'), { code: 'ERR_UNKNOWN_ENCODING', type: TypeError, message: 'Unknown encoding: test' } ); // test verifies that StringDecoder will correctly decode the given input // buffer with the given encoding to the expected output. It will attempt all // possible ways to write() the input buffer, see writeSequences(). The // singleSequence allows for easy debugging of a specific sequence which is // useful in case of test failures. function test(encoding, input, expected, singleSequence) {<|fim▁hole|> } else { sequences = [singleSequence]; } const hexNumberRE = /.{2}/g; sequences.forEach((sequence) => { const decoder = new StringDecoder(encoding); let output = ''; sequence.forEach((write) => { output += decoder.write(input.slice(write[0], write[1])); }); output += decoder.end(); if (output !== expected) { const message = `Expected "${unicodeEscape(expected)}", ` + `but got "${unicodeEscape(output)}"\n` + `input: ${input.toString('hex').match(hexNumberRE)}\n` + `Write sequence: ${JSON.stringify(sequence)}\n` + `Full Decoder State: ${inspect(decoder)}`; assert.fail(output, expected, message); } }); } // unicodeEscape prints the str contents as unicode escape codes. function unicodeEscape(str) { let r = ''; for (let i = 0; i < str.length; i++) { r += `\\u${str.charCodeAt(i).toString(16)}`; } return r; } // writeSequences returns an array of arrays that describes all possible ways a // buffer of the given length could be split up and passed to sequential write // calls. // // e.G. writeSequences(3) will return: [ // [ [ 0, 3 ] ], // [ [ 0, 2 ], [ 2, 3 ] ], // [ [ 0, 1 ], [ 1, 3 ] ], // [ [ 0, 1 ], [ 1, 2 ], [ 2, 3 ] ] // ] function writeSequences(length, start, sequence) { if (start === undefined) { start = 0; sequence = []; } else if (start === length) { return [sequence]; } let sequences = []; for (let end = length; end > start; end--) { const subSequence = sequence.concat([[start, end]]); const subSequences = writeSequences(length, end, subSequence, sequences); sequences = sequences.concat(subSequences); } return sequences; }<|fim▁end|>
let sequences; if (!singleSequence) { sequences = writeSequences(input.length);
<|file_name|>SyntheticWheelEvent.js<|end_file_name|><|fim▁begin|>/** * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ import SyntheticMouseEvent from './SyntheticMouseEvent'; /** * @interface WheelEvent * @see http://www.w3.org/TR/DOM-Level-3-Events/ */ const SyntheticWheelEvent = SyntheticMouseEvent.extend({ deltaX(event) { return 'deltaX' in event ? event.deltaX : // Fallback to `wheelDeltaX` for Webkit and normalize (right is positive). 'wheelDeltaX' in event ? -event.wheelDeltaX : 0; },<|fim▁hole|> return 'deltaY' in event ? event.deltaY : // Fallback to `wheelDeltaY` for Webkit and normalize (down is positive). 'wheelDeltaY' in event ? -event.wheelDeltaY : // Fallback to `wheelDelta` for IE<9 and normalize (down is positive). 'wheelDelta' in event ? -event.wheelDelta : 0; }, deltaZ: null, // Browsers without "deltaMode" is reporting in raw wheel delta where one // notch on the scroll is always +/- 120, roughly equivalent to pixels. // A good approximation of DOM_DELTA_LINE (1) is 5% of viewport size or // ~40 pixels, for DOM_DELTA_SCREEN (2) it is 87.5% of viewport size. deltaMode: null, }); export default SyntheticWheelEvent;<|fim▁end|>
deltaY(event) {
<|file_name|>njsPool.cpp<|end_file_name|><|fim▁begin|>/* Copyright (c) 2015, 2018, Oracle and/or its affiliates. All rights reserved. */ /****************************************************************************** * * You may not use the identified files except in compliance with the Apache * License, Version 2.0 (the "License.") * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0. * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. * * This file uses NAN: * * Copyright (c) 2015 NAN contributors * * NAN contributors listed at https://github.com/rvagg/nan#contributors * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * NAME * njsPool.cpp * * DESCRIPTION * Pool class implementation. * *****************************************************************************/ #include "node.h" #include <string> #include "njsOracle.h" #include "njsPool.h" #include "njsConnection.h" using namespace std; using namespace node; using namespace v8; // peristent Pool class handle Nan::Persistent<FunctionTemplate> njsPool::poolTemplate_s; //----------------------------------------------------------------------------- // njsPool::Init() // Initialization function of Pool class. Maps functions and properties // from JS to C++. //----------------------------------------------------------------------------- void njsPool::Init(Local<Object> target) { Nan::HandleScope scope; Local<FunctionTemplate> temp = Nan::New<FunctionTemplate>(New); temp->InstanceTemplate()->SetInternalFieldCount(1); temp->SetClassName(Nan::New<v8::String>("Pool").ToLocalChecked()); Nan::SetPrototypeMethod(temp, "close", Close); Nan::SetPrototypeMethod(temp, "getConnection", GetConnection); Nan::SetAccessor(temp->InstanceTemplate(), Nan::New<v8::String>("poolMax").ToLocalChecked(), njsPool::GetPoolMax, njsPool::SetPoolMax); Nan::SetAccessor(temp->InstanceTemplate(), Nan::New<v8::String>("poolMin").ToLocalChecked(), njsPool::GetPoolMin, njsPool::SetPoolMin); Nan::SetAccessor(temp->InstanceTemplate(), Nan::New<v8::String>("poolIncrement").ToLocalChecked(), njsPool::GetPoolIncrement, njsPool::SetPoolIncrement); Nan::SetAccessor(temp->InstanceTemplate(), Nan::New<v8::String>("poolTimeout").ToLocalChecked(), njsPool::GetPoolTimeout, njsPool::SetPoolTimeout); Nan::SetAccessor(temp->InstanceTemplate(), Nan::New<v8::String>("connectionsOpen").ToLocalChecked(), njsPool::GetConnectionsOpen, njsPool::SetConnectionsOpen); Nan::SetAccessor(temp->InstanceTemplate(), Nan::New<v8::String>("connectionsInUse").ToLocalChecked(), njsPool::GetConnectionsInUse, njsPool::SetConnectionsInUse); Nan::SetAccessor(temp->InstanceTemplate(), Nan::New<v8::String>("stmtCacheSize").ToLocalChecked(), njsPool::GetStmtCacheSize, njsPool::SetStmtCacheSize); Nan::SetAccessor(temp->InstanceTemplate(), Nan::New<v8::String>("poolPingInterval").ToLocalChecked(), njsPool::GetPoolPingInterval, njsPool::SetPoolPingInterval); poolTemplate_s.Reset(temp); Nan::Set(target, Nan::New<v8::String>("Pool").ToLocalChecked(), temp->GetFunction()); } //----------------------------------------------------------------------------- // njsPool::CreateFromBaton() // Create a new pool from the baton. //----------------------------------------------------------------------------- Local<Object> njsPool::CreateFromBaton(njsBaton *baton) { Nan::EscapableHandleScope scope; Local<Function> func; Local<Object> obj; njsPool *pool; func = Nan::GetFunction( Nan::New<FunctionTemplate>(poolTemplate_s)).ToLocalChecked(); obj = Nan::NewInstance(func).ToLocalChecked(); pool = Nan::ObjectWrap::Unwrap<njsPool>(obj); pool->dpiPoolHandle = baton->dpiPoolHandle; baton->dpiPoolHandle = NULL; pool->jsOracledb.Reset(baton->jsOracledb); pool->poolMax = baton->poolMax; pool->poolMin = baton->poolMin; pool->poolIncrement = baton->poolIncrement; pool->poolTimeout = baton->poolTimeout; pool->poolPingInterval = baton->poolPingInterval; pool->stmtCacheSize = baton->stmtCacheSize; pool->lobPrefetchSize = baton->lobPrefetchSize; return scope.Escape(obj); } //----------------------------------------------------------------------------- // njsPool::New() // Create new object accesible from JS. This is always called from within // njsPool::CreateFromBaton() and never from any external JS. //----------------------------------------------------------------------------- NAN_METHOD(njsPool::New) { njsPool *pool = new njsPool(); pool->Wrap(info.Holder()); info.GetReturnValue().Set(info.Holder()); } //----------------------------------------------------------------------------- // njsPool::GetPoolMin() // Get accessor of "poolMin" property. //----------------------------------------------------------------------------- NAN_GETTER(njsPool::GetPoolMin) { njsPool *pool = (njsPool*) ValidateGetter(info); if (pool) info.GetReturnValue().Set(pool->poolMin); } //----------------------------------------------------------------------------- // njsPool::GetPoolMax() // Get accessor of "poolMax" property. //----------------------------------------------------------------------------- NAN_GETTER(njsPool::GetPoolMax) { njsPool *pool = (njsPool*) ValidateGetter(info); if (pool) info.GetReturnValue().Set(pool->poolMax); } //----------------------------------------------------------------------------- // njsPool::GetPoolIncrement() // Get accessor of "poolIncrement" property. //----------------------------------------------------------------------------- NAN_GETTER(njsPool::GetPoolIncrement) { njsPool *pool = (njsPool*) ValidateGetter(info); if (pool) info.GetReturnValue().Set(pool->poolIncrement); } //----------------------------------------------------------------------------- // njsPool::GetPoolTimeout() // Get accessor of "poolTimeout" property. //----------------------------------------------------------------------------- NAN_GETTER(njsPool::GetPoolTimeout) { njsPool *pool = (njsPool*) ValidateGetter(info); if (pool) info.GetReturnValue().Set(pool->poolTimeout); } //----------------------------------------------------------------------------- // njsPool::GetConnectionsOpen() // Get accessor of "connectionsOpen" property. //----------------------------------------------------------------------------- NAN_GETTER(njsPool::GetConnectionsOpen) { njsPool *pool = (njsPool*) ValidateGetter(info); if (!pool) return; if (!pool->IsValid()) { info.GetReturnValue().Set(Nan::Undefined()); return; } uint32_t value; if (dpiPool_getOpenCount(pool->dpiPoolHandle, &value) < 0) { njsOracledb::ThrowDPIError(); return; } info.GetReturnValue().Set(value); } //----------------------------------------------------------------------------- // njsPool::GetConnectionsInUse() // Get accessor of "connectionsInUse" property. //----------------------------------------------------------------------------- NAN_GETTER(njsPool::GetConnectionsInUse) { njsPool *pool = (njsPool*) ValidateGetter(info); if (!pool) return; if (!pool->IsValid()) { info.GetReturnValue().Set(Nan::Undefined()); return; } uint32_t value; if (dpiPool_getBusyCount(pool->dpiPoolHandle, &value) < 0) { njsOracledb::ThrowDPIError(); return; } info.GetReturnValue().Set(value); } //-----------------------------------------------------------------------------<|fim▁hole|>// Get accessor of "poolPingInterval" property. //----------------------------------------------------------------------------- NAN_GETTER(njsPool::GetPoolPingInterval) { njsPool *pool = (njsPool*) ValidateGetter(info); if (pool) info.GetReturnValue().Set(pool->poolPingInterval); } //----------------------------------------------------------------------------- // njsPool::GetStmtCacheSize() // Get accessor of "stmtCacheSize" property. //----------------------------------------------------------------------------- NAN_GETTER(njsPool::GetStmtCacheSize) { njsPool *pool = (njsPool*) ValidateGetter(info); if (pool) info.GetReturnValue().Set(pool->stmtCacheSize); } //----------------------------------------------------------------------------- // njsPool::SetPoolMin() // Set accessor of "poolMin" property. //----------------------------------------------------------------------------- NAN_SETTER(njsPool::SetPoolMin) { PropertyIsReadOnly("poolMin"); } //----------------------------------------------------------------------------- // njsPool::SetPoolMax() // Set accessor of "poolMax" property. //----------------------------------------------------------------------------- NAN_SETTER(njsPool::SetPoolMax) { PropertyIsReadOnly("poolMax"); } //----------------------------------------------------------------------------- // njsPool::SetPoolIncrement() // Set accessor of "poolIncrement" property. //----------------------------------------------------------------------------- NAN_SETTER(njsPool::SetPoolIncrement) { PropertyIsReadOnly("poolIncrement"); } //----------------------------------------------------------------------------- // njsPool::SetPoolTimeout() // Set accessor of "poolTimeout" property. //----------------------------------------------------------------------------- NAN_SETTER(njsPool::SetPoolTimeout) { PropertyIsReadOnly("poolTimeout"); } //----------------------------------------------------------------------------- // njsPool::SetConnectionsOpen() // Set accessor of "connectionsOpen" property. //----------------------------------------------------------------------------- NAN_SETTER(njsPool::SetConnectionsOpen) { PropertyIsReadOnly("connectionsOpen"); } //----------------------------------------------------------------------------- // njsPool::SetConnectionsInUse() // Set accessor of "connectionsInUse" property. //----------------------------------------------------------------------------- NAN_SETTER(njsPool::SetConnectionsInUse) { PropertyIsReadOnly("connectionsInUse"); } //----------------------------------------------------------------------------- // njsPool::SetPoolPingInterval() // Set accessor of "stmtCacheSize" property. //----------------------------------------------------------------------------- NAN_SETTER(njsPool::SetPoolPingInterval) { PropertyIsReadOnly("poolPingInterval"); } //----------------------------------------------------------------------------- // njsPool::SetStmtCacheSize() // Set accessor of "stmtCacheSize" property. //----------------------------------------------------------------------------- NAN_SETTER(njsPool::SetStmtCacheSize) { PropertyIsReadOnly("stmtCacheSize"); } //----------------------------------------------------------------------------- // njsPool::GetConnection() // Get a connection from the pool and return it. // // PARAMETERS // - JS callback which will receive (error, connection) //----------------------------------------------------------------------------- NAN_METHOD(njsPool::GetConnection) { njsBaton *baton; njsPool *pool; Local<Object> connProps; pool = (njsPool*) ValidateArgs(info, 2, 2); if (!pool) return; /* Get optional connection properties: argument may have empty json */ if (!pool->GetObjectArg(info, 0, connProps)) return; baton = pool->CreateBaton(info); if (!baton) return; if (baton->error.empty()) { /* Connection Properties: If empty json, values will be empty */ baton->GetStringFromJSON(connProps, "user", 0, baton->user); baton->GetStringFromJSON(connProps, "password", 0, baton->password); baton->jsOracledb.Reset(pool->jsOracledb); njsOracledb *oracledb = baton->GetOracledb(); baton->connClass = oracledb->getConnectionClass(); baton->lobPrefetchSize = pool->lobPrefetchSize; baton->SetDPIPoolHandle(pool->dpiPoolHandle); } baton->QueueWork("GetConnection", Async_GetConnection, Async_AfterGetConnection, 2); } //----------------------------------------------------------------------------- // njsPool::Async_GetConnection() // Worker function for njsPool::GetConnection() method. //----------------------------------------------------------------------------- void njsPool::Async_GetConnection(njsBaton *baton) { dpiConnCreateParams params; dpiContext *context; context = njsOracledb::GetDPIContext(); if (dpiContext_initConnCreateParams(context, &params) < 0) { baton->GetDPIError(); return; } if (!baton->connClass.empty()) { params.connectionClass = baton->connClass.c_str(); params.connectionClassLength = baton->connClass.length(); } if (dpiPool_acquireConnection(baton->dpiPoolHandle, baton->user.empty() ? NULL : baton->user.c_str(), baton->user.empty() ? 0 : baton->user.length(), baton->password.empty () ? NULL : baton->password.c_str(), baton->password.empty () ? 0 : baton->password.length(), &params, &baton->dpiConnHandle) < 0) baton->GetDPIError(); } //----------------------------------------------------------------------------- // njsPool::Async_AfterGetConnection() // Sets up the arguments for the callback to JS. The connection object is // created and passed as the second argument. The first argument is the error // and at this point it is known that no error has taken place. //----------------------------------------------------------------------------- void njsPool::Async_AfterGetConnection(njsBaton *baton, Local<Value> argv[]) { argv[1] = njsConnection::CreateFromBaton(baton); } //----------------------------------------------------------------------------- // njsPool::Close() // Close the pool. The reference to the DPI handle is transferred to the // baton so that it will cleared automatically upon success and so that the // pool is marked as invalid immediately. // // PARAMETERS // - JS callback which will receive (error) //----------------------------------------------------------------------------- NAN_METHOD(njsPool::Close) { njsBaton *baton; njsPool *pool; pool = (njsPool*) ValidateArgs(info, 2, 2); if (!pool) return; baton = pool->CreateBaton(info); if (!baton) return; baton->GetBoolFromJSON(info[0].As<Object>(), "forceClose", 0, &baton->force); baton->dpiPoolHandle = pool->dpiPoolHandle; pool->dpiPoolHandle = NULL; baton->QueueWork("Close", Async_Close, NULL, 1); } //----------------------------------------------------------------------------- // njsPool::Async_Close() // Worker function for njsPool::Close() method. If the attempt to // close the pool fails, the reference to the DPI handle is transferred back // from the baton to the pool. //----------------------------------------------------------------------------- void njsPool::Async_Close(njsBaton *baton) { dpiPoolCloseMode mode = (baton->force) ? DPI_MODE_POOL_CLOSE_FORCE : DPI_MODE_POOL_CLOSE_DEFAULT; if (dpiPool_close(baton->dpiPoolHandle, mode) < 0) { njsPool *pool = (njsPool*) baton->callingObj; pool->dpiPoolHandle = baton->dpiPoolHandle; baton->dpiPoolHandle = NULL; baton->GetDPIError(); } }<|fim▁end|>
// njsPool::GetPoolPingInterval()
<|file_name|>consts.rs<|end_file_name|><|fim▁begin|>pub static PCRE_CASELESS: int = 0x00000001; // Compile pub static PCRE_MULTILINE: int = 0x00000002; // Compile pub static PCRE_DOTALL: int = 0x00000004; // Compile pub static PCRE_EXTENDED: int = 0x00000008; // Compile pub static PCRE_ANCHORED: int = 0x00000010; // Compile, exec, DFA exec pub static PCRE_DOLLAR_ENDONLY: int = 0x00000020; // Compile pub static PCRE_EXTRA: int = 0x00000040; // Compile pub static PCRE_NOTBOL: int = 0x00000080; // Exec, DFA exec pub static PCRE_NOTEOL: int = 0x00000100; // Exec, DFA exec pub static PCRE_UNGREEDY: int = 0x00000200; // Compile pub static PCRE_NOTEMPTY: int = 0x00000400; // Exec, DFA exec pub static PCRE_UTF8: int = 0x00000800; // Compile pub static PCRE_NO_AUTO_CAPTURE: int = 0x00001000; // Compile pub static PCRE_NO_UTF8_CHECK: int = 0x00002000; // Compile, exec, DFA exec pub static PCRE_AUTO_CALLOUT: int = 0x00004000; // Compile pub static PCRE_PARTIAL_SOFT: int = 0x00008000; // Exec, DFA exec pub static PCRE_PARTIAL: int = 0x00008000; // Backwards compatible synonym pub static PCRE_DFA_SHORTEST: int = 0x00010000; // DFA exec pub static PCRE_DFA_RESTART: int = 0x00020000; // DFA exec pub static PCRE_FIRSTLINE: int = 0x00040000; // Compile pub static PCRE_DUPNAMES: int = 0x00080000; // Compile pub static PCRE_NEWLINE_CR: int = 0x00100000; // Compile, exec, DFA exec pub static PCRE_NEWLINE_LF: int = 0x00200000; // Compile, exec, DFA exec pub static PCRE_NEWLINE_CRLF: int = 0x00300000; // Compile, exec, DFA exec pub static PCRE_NEWLINE_ANY: int = 0x00400000; // Compile, exec, DFA exec pub static PCRE_NEWLINE_ANYCRLF: int = 0x00500000; // Compile, exec, DFA exec pub static PCRE_BSR_ANYCRLF: int = 0x00800000; // Compile, exec, DFA exec pub static PCRE_BSR_UNICODE: int = 0x01000000; // Compile, exec, DFA exec pub static PCRE_JAVASCRIPT_COMPAT: int = 0x02000000; // Compile pub static PCRE_NO_START_OPTIMIZE: int = 0x04000000; // Compile, exec, DFA exec pub static PCRE_NO_START_OPTIMISE: int = 0x04000000; // Synonym pub static PCRE_PARTIAL_HARD: int = 0x08000000; // Exec, DFA exec pub static PCRE_NOTEMPTY_ATSTART: int = 0x10000000; // Exec, DFA exec pub static PCRE_UCP: int = 0x20000000; // Compile pub static COMPILE_OPTIONS: int = 0x27fc7a7f; pub static EXEC_OPTIONS: int = 0x1df0a590; //static COMPILE_OPTIONS: int = // PCRE_CASELESS // | PCRE_MULTILINE // | PCRE_DOTALL // | PCRE_EXTENDED // | PCRE_ANCHORED // | PCRE_DOLLAR_ENDONLY // | PCRE_EXTRA // | PCRE_UNGREEDY // | PCRE_UTF8 // | PCRE_NO_AUTO_CAPTURE // | PCRE_NO_UTF8_CHECK // | PCRE_AUTO_CALLOUT // | PCRE_FIRSTLINE // | PCRE_DUPNAMES // | PCRE_NEWLINE_CR // | PCRE_NEWLINE_LF // | PCRE_NEWLINE_CRLF // | PCRE_NEWLINE_ANY // | PCRE_NEWLINE_ANYCRLF // | PCRE_BSR_ANYCRLF // | PCRE_BSR_UNICODE // | PCRE_JAVASCRIPT_COMPAT // | PCRE_NO_START_OPTIMIZE // | PCRE_NO_START_OPTIMISE // | PCRE_UCP; //static EXEC_OPTIONS: int = // PCRE_ANCHORED // | PCRE_NOTBOL // | PCRE_NOTEOL // | PCRE_NOTEMPTY // | PCRE_NO_UTF8_CHECK // | PCRE_PARTIAL_SOFT // | PCRE_PARTIAL // | PCRE_NEWLINE_CR // | PCRE_NEWLINE_LF // | PCRE_NEWLINE_CRLF // | PCRE_NEWLINE_ANY // | PCRE_NEWLINE_ANYCRLF // | PCRE_BSR_ANYCRLF // | PCRE_BSR_UNICODE // | PCRE_NO_START_OPTIMIZE // | PCRE_NO_START_OPTIMISE // | PCRE_PARTIAL_HARD // | PCRE_NOTEMPTY_ATSTART; pub static PCRE_ERROR_NOMATCH: int = -1; pub static PCRE_ERROR_NULL: int = -2;<|fim▁hole|>pub static PCRE_ERROR_UNKNOWN_NODE: int = -5; // For backward compatibility pub static PCRE_ERROR_NOMEMORY: int = -6; pub static PCRE_ERROR_NOSUBSTRING: int = -7; pub static PCRE_ERROR_MATCHLIMIT: int = -8; pub static PCRE_ERROR_CALLOUT: int = -9; // Never used by PCRE itself pub static PCRE_ERROR_BADUTF8: int = -10; pub static PCRE_ERROR_BADUTF8_OFFSET: int = -11; pub static PCRE_ERROR_PARTIAL: int = -12; pub static PCRE_ERROR_BADPARTIAL: int = -13; pub static PCRE_ERROR_INTERNAL: int = -14; pub static PCRE_ERROR_BADCOUNT: int = -15; pub static PCRE_ERROR_DFA_UITEM: int = -16; pub static PCRE_ERROR_DFA_UCOND: int = -17; pub static PCRE_ERROR_DFA_UMLIMIT: int = -18; pub static PCRE_ERROR_DFA_WSSIZE: int = -19; pub static PCRE_ERROR_DFA_RECURSE: int = -20; pub static PCRE_ERROR_RECURSIONLIMIT: int = -21; pub static PCRE_ERROR_NULLWSLIMIT: int = -22; // No longer actually used pub static PCRE_ERROR_BADNEWLINE: int = -23; pub static PCRE_ERROR_BADOFFSET: int = -24; pub static PCRE_ERROR_SHORTUTF8: int = -25; pub static PCRE_INFO_OPTIONS: int = 0; pub static PCRE_INFO_SIZE: int = 1; pub static PCRE_INFO_CAPTURECOUNT: int = 2; pub static PCRE_INFO_BACKREFMAX: int = 3; pub static PCRE_INFO_FIRSTBYTE: int = 4; pub static PCRE_INFO_FIRSTCHAR: int = 4; // For backwards compatibility pub static PCRE_INFO_FIRSTTABLE: int = 5; pub static PCRE_INFO_LASTLITERAL: int = 6; pub static PCRE_INFO_NAMEENTRYSIZE: int = 7; pub static PCRE_INFO_NAMECOUNT: int = 8; pub static PCRE_INFO_NAMETABLE: int = 9; pub static PCRE_INFO_STUDYSIZE: int = 10; pub static PCRE_INFO_DEFAULT_TABLES: int = 11; pub static PCRE_INFO_OKPARTIAL: int = 12; pub static PCRE_INFO_JCHANGED: int = 13; pub static PCRE_INFO_HASCRORLF: int = 14; pub static PCRE_INFO_MINLENGTH: int = 15;<|fim▁end|>
pub static PCRE_ERROR_BADOPTION: int = -3; pub static PCRE_ERROR_BADMAGIC: int = -4; pub static PCRE_ERROR_UNKNOWN_OPCODE: int = -5;
<|file_name|>RenderBase.cpp<|end_file_name|><|fim▁begin|>// Copyright 2010 Dolphin Emulator Project // Licensed under GPLv2+ // Refer to the license.txt file included. // --------------------------------------------------------------------------------------------- // GC graphics pipeline // --------------------------------------------------------------------------------------------- // 3d commands are issued through the fifo. The GPU draws to the 2MB EFB. // The efb can be copied back into ram in two forms: as textures or as XFB. // The XFB is the region in RAM that the VI chip scans out to the television. // So, after all rendering to EFB is done, the image is copied into one of two XFBs in RAM. // Next frame, that one is scanned out and the other one gets the copy. = double buffering. // --------------------------------------------------------------------------------------------- #include <cinttypes> #include <cmath> #include <string> #include "Common/Atomic.h" #include "Common/Event.h" #include "Common/Profiler.h" #include "Common/StringUtil.h" #include "Common/Timer.h" #include "Core/ConfigManager.h" #include "Core/Core.h" #include "Core/Host.h" #include "Core/Movie.h" #include "Core/FifoPlayer/FifoRecorder.h" #include "Core/HW/VideoInterface.h" #include "VideoCommon/AVIDump.h" #include "VideoCommon/BPMemory.h" #include "VideoCommon/CommandProcessor.h" #include "VideoCommon/CPMemory.h" #include "VideoCommon/Debugger.h" #include "VideoCommon/Fifo.h" #include "VideoCommon/FPSCounter.h" #include "VideoCommon/FramebufferManagerBase.h" #include "VideoCommon/MainBase.h" #include "VideoCommon/OpcodeDecoding.h" #include "VideoCommon/RenderBase.h" #include "VideoCommon/Statistics.h" #include "VideoCommon/TextureCacheBase.h" #include "VideoCommon/VideoConfig.h" #include "VideoCommon/XFMemory.h" // TODO: Move these out of here. int frameCount; int OSDChoice; static int OSDTime; Renderer *g_renderer = nullptr; std::mutex Renderer::s_criticalScreenshot; std::string Renderer::s_sScreenshotName; Common::Event Renderer::s_screenshotCompleted; volatile bool Renderer::s_bScreenshot; // The framebuffer size int Renderer::s_target_width; int Renderer::s_target_height; // TODO: Add functionality to reinit all the render targets when the window is resized. int Renderer::s_backbuffer_width; int Renderer::s_backbuffer_height; PostProcessingShaderImplementation* Renderer::m_post_processor; TargetRectangle Renderer::target_rc; int Renderer::s_last_efb_scale; bool Renderer::XFBWrited; PEControl::PixelFormat Renderer::prev_efb_format = PEControl::INVALID_FMT; unsigned int Renderer::efb_scale_numeratorX = 1; unsigned int Renderer::efb_scale_numeratorY = 1; unsigned int Renderer::efb_scale_denominatorX = 1; unsigned int Renderer::efb_scale_denominatorY = 1; Renderer::Renderer() : frame_data() , bLastFrameDumped(false) { UpdateActiveConfig(); TextureCache::OnConfigChanged(g_ActiveConfig); #if defined _WIN32 || defined HAVE_LIBAV bAVIDumping = false; #endif OSDChoice = 0; OSDTime = 0; } Renderer::~Renderer() { // invalidate previous efb format prev_efb_format = PEControl::INVALID_FMT; efb_scale_numeratorX = efb_scale_numeratorY = efb_scale_denominatorX = efb_scale_denominatorY = 1; #if defined _WIN32 || defined HAVE_LIBAV if (SConfig::GetInstance().m_DumpFrames && bLastFrameDumped && bAVIDumping) AVIDump::Stop(); #else if (pFrameDump.IsOpen()) pFrameDump.Close(); #endif } void Renderer::RenderToXFB(u32 xfbAddr, const EFBRectangle& sourceRc, u32 fbStride, u32 fbHeight, float Gamma) { CheckFifoRecording(); if (!fbStride || !fbHeight) return; XFBWrited = true; if (g_ActiveConfig.bUseXFB) { FramebufferManagerBase::CopyToXFB(xfbAddr, fbStride, fbHeight, sourceRc, Gamma); } else { // below div two to convert from bytes to pixels - it expects width, not stride Swap(xfbAddr, fbStride/2, fbStride/2, fbHeight, sourceRc, Gamma); } } int Renderer::EFBToScaledX(int x) { switch (g_ActiveConfig.iEFBScale) { case SCALE_AUTO: // fractional return FramebufferManagerBase::ScaleToVirtualXfbWidth(x); default: return x * (int)efb_scale_numeratorX / (int)efb_scale_denominatorX; }; } int Renderer::EFBToScaledY(int y) { switch (g_ActiveConfig.iEFBScale) { case SCALE_AUTO: // fractional return FramebufferManagerBase::ScaleToVirtualXfbHeight(y); default: return y * (int)efb_scale_numeratorY / (int)efb_scale_denominatorY; }; } <|fim▁hole|> if (g_ActiveConfig.iEFBScale == SCALE_AUTO || g_ActiveConfig.iEFBScale == SCALE_AUTO_INTEGRAL) { *scaledX = x; *scaledY = y; } else { *scaledX = x * (int)efb_scale_numeratorX / (int)efb_scale_denominatorX; *scaledY = y * (int)efb_scale_numeratorY / (int)efb_scale_denominatorY; } } // return true if target size changed bool Renderer::CalculateTargetSize(unsigned int framebuffer_width, unsigned int framebuffer_height) { int newEFBWidth, newEFBHeight; newEFBWidth = newEFBHeight = 0; // TODO: Ugly. Clean up switch (s_last_efb_scale) { case SCALE_AUTO: case SCALE_AUTO_INTEGRAL: newEFBWidth = FramebufferManagerBase::ScaleToVirtualXfbWidth(EFB_WIDTH); newEFBHeight = FramebufferManagerBase::ScaleToVirtualXfbHeight(EFB_HEIGHT); if (s_last_efb_scale == SCALE_AUTO_INTEGRAL) { newEFBWidth = ((newEFBWidth-1) / EFB_WIDTH + 1) * EFB_WIDTH; newEFBHeight = ((newEFBHeight-1) / EFB_HEIGHT + 1) * EFB_HEIGHT; } efb_scale_numeratorX = newEFBWidth; efb_scale_denominatorX = EFB_WIDTH; efb_scale_numeratorY = newEFBHeight; efb_scale_denominatorY = EFB_HEIGHT; break; case SCALE_1X: efb_scale_numeratorX = efb_scale_numeratorY = 1; efb_scale_denominatorX = efb_scale_denominatorY = 1; break; case SCALE_1_5X: efb_scale_numeratorX = efb_scale_numeratorY = 3; efb_scale_denominatorX = efb_scale_denominatorY = 2; break; case SCALE_2X: efb_scale_numeratorX = efb_scale_numeratorY = 2; efb_scale_denominatorX = efb_scale_denominatorY = 1; break; case SCALE_2_5X: efb_scale_numeratorX = efb_scale_numeratorY = 5; efb_scale_denominatorX = efb_scale_denominatorY = 2; break; default: efb_scale_numeratorX = efb_scale_numeratorY = s_last_efb_scale - 3; efb_scale_denominatorX = efb_scale_denominatorY = 1; int maxSize; maxSize = GetMaxTextureSize(); if ((unsigned)maxSize < EFB_WIDTH * efb_scale_numeratorX / efb_scale_denominatorX) { efb_scale_numeratorX = efb_scale_numeratorY = (maxSize / EFB_WIDTH); efb_scale_denominatorX = efb_scale_denominatorY = 1; } break; } if (s_last_efb_scale > SCALE_AUTO_INTEGRAL) CalculateTargetScale(EFB_WIDTH, EFB_HEIGHT, &newEFBWidth, &newEFBHeight); if (newEFBWidth != s_target_width || newEFBHeight != s_target_height) { s_target_width = newEFBWidth; s_target_height = newEFBHeight; return true; } return false; } void Renderer::ConvertStereoRectangle(const TargetRectangle& rc, TargetRectangle& leftRc, TargetRectangle& rightRc) { // Resize target to half its original size TargetRectangle drawRc = rc; if (g_ActiveConfig.iStereoMode == STEREO_TAB) { // The height may be negative due to flipped rectangles int height = rc.bottom - rc.top; drawRc.top += height / 4; drawRc.bottom -= height / 4; } else { int width = rc.right - rc.left; drawRc.left += width / 4; drawRc.right -= width / 4; } // Create two target rectangle offset to the sides of the backbuffer leftRc = drawRc, rightRc = drawRc; if (g_ActiveConfig.iStereoMode == STEREO_TAB) { leftRc.top -= s_backbuffer_height / 4; leftRc.bottom -= s_backbuffer_height / 4; rightRc.top += s_backbuffer_height / 4; rightRc.bottom += s_backbuffer_height / 4; } else { leftRc.left -= s_backbuffer_width / 4; leftRc.right -= s_backbuffer_width / 4; rightRc.left += s_backbuffer_width / 4; rightRc.right += s_backbuffer_width / 4; } } void Renderer::SetScreenshot(const std::string& filename) { std::lock_guard<std::mutex> lk(s_criticalScreenshot); s_sScreenshotName = filename; s_bScreenshot = true; } // Create On-Screen-Messages void Renderer::DrawDebugText() { std::string final_yellow, final_cyan; if (g_ActiveConfig.bShowFPS || SConfig::GetInstance().m_ShowFrameCount) { if (g_ActiveConfig.bShowFPS) final_cyan += StringFromFormat("FPS: %d", g_renderer->m_fps_counter.m_fps); if (g_ActiveConfig.bShowFPS && SConfig::GetInstance().m_ShowFrameCount) final_cyan += " - "; if (SConfig::GetInstance().m_ShowFrameCount) { final_cyan += StringFromFormat("Frame: %llu", (unsigned long long) Movie::g_currentFrame); if (Movie::IsPlayingInput()) final_cyan += StringFromFormat(" / %llu", (unsigned long long) Movie::g_totalFrames); } final_cyan += "\n"; final_yellow += "\n"; } if (SConfig::GetInstance().m_ShowLag) { final_cyan += StringFromFormat("Lag: %" PRIu64 "\n", Movie::g_currentLagCount); final_yellow += "\n"; } if (SConfig::GetInstance().m_ShowInputDisplay) { final_cyan += Movie::GetInputDisplay(); final_yellow += "\n"; } // OSD Menu messages if (OSDChoice > 0) { OSDTime = Common::Timer::GetTimeMs() + 3000; OSDChoice = -OSDChoice; } if ((u32)OSDTime > Common::Timer::GetTimeMs()) { std::string res_text; switch (g_ActiveConfig.iEFBScale) { case SCALE_AUTO: res_text = "Auto (fractional)"; break; case SCALE_AUTO_INTEGRAL: res_text = "Auto (integral)"; break; case SCALE_1X: res_text = "Native"; break; case SCALE_1_5X: res_text = "1.5x"; break; case SCALE_2X: res_text = "2x"; break; case SCALE_2_5X: res_text = "2.5x"; break; default: res_text = StringFromFormat("%dx", g_ActiveConfig.iEFBScale - 3); break; } const char* ar_text = ""; switch (g_ActiveConfig.iAspectRatio) { case ASPECT_AUTO: ar_text = "Auto"; break; case ASPECT_STRETCH: ar_text = "Stretch"; break; case ASPECT_ANALOG: ar_text = "Force 4:3"; break; case ASPECT_ANALOG_WIDE: ar_text = "Force 16:9"; } const char* const efbcopy_text = g_ActiveConfig.bSkipEFBCopyToRam ? "to Texture" : "to RAM"; // The rows const std::string lines[] = { std::string("Internal Resolution: ") + res_text, std::string("Aspect Ratio: ") + ar_text + (g_ActiveConfig.bCrop ? " (crop)" : ""), std::string("Copy EFB: ") + efbcopy_text, std::string("Fog: ") + (g_ActiveConfig.bDisableFog ? "Disabled" : "Enabled"), }; enum { lines_count = sizeof(lines) / sizeof(*lines) }; // The latest changed setting in yellow for (int i = 0; i != lines_count; ++i) { if (OSDChoice == -i - 1) final_yellow += lines[i]; final_yellow += '\n'; } // The other settings in cyan for (int i = 0; i != lines_count; ++i) { if (OSDChoice != -i - 1) final_cyan += lines[i]; final_cyan += '\n'; } } final_cyan += Common::Profiler::ToString(); if (g_ActiveConfig.bOverlayStats) final_cyan += Statistics::ToString(); if (g_ActiveConfig.bOverlayProjStats) final_cyan += Statistics::ToStringProj(); //and then the text g_renderer->RenderText(final_cyan, 20, 20, 0xFF00FFFF); g_renderer->RenderText(final_yellow, 20, 20, 0xFFFFFF00); } void Renderer::UpdateDrawRectangle(int backbuffer_width, int backbuffer_height) { float FloatGLWidth = (float)backbuffer_width; float FloatGLHeight = (float)backbuffer_height; float FloatXOffset = 0; float FloatYOffset = 0; // The rendering window size const float WinWidth = FloatGLWidth; const float WinHeight = FloatGLHeight; // Update aspect ratio hack values // Won't take effect until next frame // Don't know if there is a better place for this code so there isn't a 1 frame delay if (g_ActiveConfig.bWidescreenHack) { float source_aspect = VideoInterface::GetAspectRatio(g_aspect_wide); float target_aspect; switch (g_ActiveConfig.iAspectRatio) { case ASPECT_STRETCH: target_aspect = WinWidth / WinHeight; break; case ASPECT_ANALOG: target_aspect = VideoInterface::GetAspectRatio(false); break; case ASPECT_ANALOG_WIDE: target_aspect = VideoInterface::GetAspectRatio(true); break; default: // ASPECT_AUTO target_aspect = source_aspect; break; } float adjust = source_aspect / target_aspect; if (adjust > 1) { // Vert+ g_Config.fAspectRatioHackW = 1; g_Config.fAspectRatioHackH = 1 / adjust; } else { // Hor+ g_Config.fAspectRatioHackW = adjust; g_Config.fAspectRatioHackH = 1; } } else { // Hack is disabled g_Config.fAspectRatioHackW = 1; g_Config.fAspectRatioHackH = 1; } // Check for force-settings and override. // The rendering window aspect ratio as a proportion of the 4:3 or 16:9 ratio float Ratio; switch (g_ActiveConfig.iAspectRatio) { case ASPECT_ANALOG_WIDE: Ratio = (WinWidth / WinHeight) / VideoInterface::GetAspectRatio(true); break; case ASPECT_ANALOG: Ratio = (WinWidth / WinHeight) / VideoInterface::GetAspectRatio(false); break; default: Ratio = (WinWidth / WinHeight) / VideoInterface::GetAspectRatio(g_aspect_wide); break; } if (g_ActiveConfig.iAspectRatio != ASPECT_STRETCH) { if (Ratio > 1.0f) { // Scale down and center in the X direction. FloatGLWidth /= Ratio; FloatXOffset = (WinWidth - FloatGLWidth) / 2.0f; } // The window is too high, we have to limit the height else { // Scale down and center in the Y direction. FloatGLHeight *= Ratio; FloatYOffset = FloatYOffset + (WinHeight - FloatGLHeight) / 2.0f; } } // ----------------------------------------------------------------------- // Crop the picture from Analog to 4:3 or from Analog (Wide) to 16:9. // Output: FloatGLWidth, FloatGLHeight, FloatXOffset, FloatYOffset // ------------------ if (g_ActiveConfig.iAspectRatio != ASPECT_STRETCH && g_ActiveConfig.bCrop) { switch (g_ActiveConfig.iAspectRatio) { case ASPECT_ANALOG_WIDE: Ratio = (16.0f / 9.0f) / VideoInterface::GetAspectRatio(true); break; case ASPECT_ANALOG: Ratio = (4.0f / 3.0f) / VideoInterface::GetAspectRatio(false); break; default: Ratio = (!g_aspect_wide ? (4.0f / 3.0f) : (16.0f / 9.0f)) / VideoInterface::GetAspectRatio(g_aspect_wide); break; } if (Ratio <= 1.0f) { Ratio = 1.0f / Ratio; } // The width and height we will add (calculate this before FloatGLWidth and FloatGLHeight is adjusted) float IncreasedWidth = (Ratio - 1.0f) * FloatGLWidth; float IncreasedHeight = (Ratio - 1.0f) * FloatGLHeight; // The new width and height FloatGLWidth = FloatGLWidth * Ratio; FloatGLHeight = FloatGLHeight * Ratio; // Adjust the X and Y offset FloatXOffset = FloatXOffset - (IncreasedWidth * 0.5f); FloatYOffset = FloatYOffset - (IncreasedHeight * 0.5f); } int XOffset = (int)(FloatXOffset + 0.5f); int YOffset = (int)(FloatYOffset + 0.5f); int iWhidth = (int)ceil(FloatGLWidth); int iHeight = (int)ceil(FloatGLHeight); iWhidth -= iWhidth % 4; // ensure divisibility by 4 to make it compatible with all the video encoders iHeight -= iHeight % 4; target_rc.left = XOffset; target_rc.top = YOffset; target_rc.right = XOffset + iWhidth; target_rc.bottom = YOffset + iHeight; } void Renderer::SetWindowSize(int width, int height) { if (width < 1) width = 1; if (height < 1) height = 1; // Scale the window size by the EFB scale. CalculateTargetScale(width, height, &width, &height); Host_RequestRenderWindowSize(width, height); } void Renderer::CheckFifoRecording() { bool wasRecording = g_bRecordFifoData; g_bRecordFifoData = FifoRecorder::GetInstance().IsRecording(); if (g_bRecordFifoData) { if (!wasRecording) { RecordVideoMemory(); } FifoRecorder::GetInstance().EndFrame(CommandProcessor::fifo.CPBase, CommandProcessor::fifo.CPEnd); } } void Renderer::RecordVideoMemory() { u32 *bpmem_ptr = (u32*)&bpmem; u32 cpmem[256]; // The FIFO recording format splits XF memory into xfmem and xfregs; follow // that split here. u32 *xfmem_ptr = (u32*)&xfmem; u32 *xfregs_ptr = (u32*)&xfmem + FifoDataFile::XF_MEM_SIZE; u32 xfregs_size = sizeof(XFMemory) / 4 - FifoDataFile::XF_MEM_SIZE; memset(cpmem, 0, 256 * 4); FillCPMemoryArray(cpmem); FifoRecorder::GetInstance().SetVideoMemory(bpmem_ptr, cpmem, xfmem_ptr, xfregs_ptr, xfregs_size); } void Renderer::Swap(u32 xfbAddr, u32 fbWidth, u32 fbStride, u32 fbHeight, const EFBRectangle& rc, float Gamma) { // TODO: merge more generic parts into VideoCommon g_renderer->SwapImpl(xfbAddr, fbWidth, fbStride, fbHeight, rc, Gamma); if (XFBWrited) g_renderer->m_fps_counter.Update(); frameCount++; GFX_DEBUGGER_PAUSE_AT(NEXT_FRAME, true); // Begin new frame // Set default viewport and scissor, for the clear to work correctly // New frame stats.ResetFrame(); Core::Callback_VideoCopiedToXFB(XFBWrited || (g_ActiveConfig.bUseXFB && g_ActiveConfig.bUseRealXFB)); XFBWrited = false; } void Renderer::PokeEFB(EFBAccessType type, const std::vector<EfbPokeData>& data) { for (EfbPokeData poke : data) { AccessEFB(type, poke.x, poke.y, poke.data); } }<|fim▁end|>
void Renderer::CalculateTargetScale(int x, int y, int* scaledX, int* scaledY) {
<|file_name|>seq2seq.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017-present, Facebook, Inc. # All rights reserved. # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. An additional grant # of patent rights can be found in the PATENTS file in the same directory. from parlai.core.agents import Agent from torch.autograd import Variable from torch import optim import torch.nn as nn import torch import copy import random class Seq2seqAgent(Agent): """Simple agent which uses an LSTM to process incoming text observations.""" @staticmethod def add_cmdline_args(argparser): argparser.add_arg('-hs', '--hiddensize', type=int, default=64, help='size of the hidden layers and embeddings') argparser.add_arg('-nl', '--numlayers', type=int, default=2, help='number of hidden layers') argparser.add_arg('-lr', '--learningrate', type=float, default=0.5, help='learning rate') argparser.add_arg('-dr', '--dropout', type=float, default=0.1, help='dropout rate') argparser.add_arg('--no-cuda', action='store_true', default=False, help='disable GPUs even if available') argparser.add_arg('--gpu', type=int, default=-1, help='which GPU device to use') def __init__(self, opt, shared=None): super().__init__(opt, shared) if shared and 'dictionary' in shared: # only set up everything for the main instance self.dict = shared['dictionary'] self.EOS = self.dict.eos_token self.EOS_TENSOR = torch.LongTensor(self.dict.parse(self.EOS)) self.id = 'Seq2Seq' hsz = opt['hiddensize'] self.hidden_size = hsz self.num_layers = opt['numlayers'] self.learning_rate = opt['learningrate'] self.use_cuda = opt.get('cuda', False) self.longest_label = 2 # TODO: 1 if 'babi' in opt['task']: self.babi_mode = True self.dirs = set(['n', 's', 'e', 'w']) self.criterion = nn.NLLLoss() self.lt = nn.Embedding(len(self.dict), hsz, padding_idx=0, scale_grad_by_freq=True) self.encoder = nn.GRU(hsz, hsz, opt['numlayers']) self.decoder = nn.GRU(hsz, hsz, opt['numlayers']) self.d2o = nn.Linear(hsz, len(self.dict)) self.dropout = nn.Dropout(opt['dropout']) self.softmax = nn.LogSoftmax() lr = opt['learningrate'] self.optims = { 'lt': optim.SGD(self.lt.parameters(), lr=lr), 'encoder': optim.SGD(self.encoder.parameters(), lr=lr), 'decoder': optim.SGD(self.decoder.parameters(), lr=lr), 'd2o': optim.SGD(self.d2o.parameters(), lr=lr), } if self.use_cuda: self.cuda() self.episode_done = True def parse(self, text): return torch.LongTensor(self.dict.txt2vec(text)) def v2t(self, vec): return self.dict.vec2txt(vec) def cuda(self): self.criterion.cuda() self.lt.cuda() self.encoder.cuda() self.decoder.cuda() self.d2o.cuda() self.dropout.cuda() self.softmax.cuda() def hidden_to_idx(self, hidden, drop=False): if hidden.size(0) > 1: raise RuntimeError('bad dimensions of tensor:', hidden) hidden = hidden.squeeze(0) scores = self.d2o(hidden) if drop: scores = self.dropout(scores) scores = self.softmax(scores) _max_score, idx = scores.max(1) return idx, scores def zero_grad(self): for optimizer in self.optims.values(): optimizer.zero_grad() def update_params(self): for optimizer in self.optims.values(): optimizer.step() def init_zeros(self, bsz=1): t = torch.zeros(self.num_layers, bsz, self.hidden_size) if self.use_cuda: t = t.cuda(async=True) return Variable(t) def init_rand(self, bsz=1): t = torch.FloatTensor(self.num_layers, bsz, self.hidden_size) t.uniform_(0.05) if self.use_cuda: t = t.cuda(async=True) return Variable(t) def observe(self, observation): observation = copy.deepcopy(observation) if not self.episode_done: # if the last example wasn't the end of an episode, then we need to # recall what was said in that example prev_dialogue = self.observation['text'] observation['text'] = prev_dialogue + '\n' + observation['text'] self.observation = observation self.episode_done = observation['episode_done'] return observation def update(self, xs, ys): batchsize = len(xs) # first encode context xes = self.lt(xs).t() h0 = self.init_zeros(batchsize) _output, hn = self.encoder(xes, h0) # start with EOS tensor for all x = self.EOS_TENSOR if self.use_cuda: x = x.cuda(async=True) x = Variable(x) xe = self.lt(x).unsqueeze(1) xes = xe.expand(xe.size(0), batchsize, xe.size(2)) output_lines = [[] for _ in range(batchsize)] self.zero_grad() # update model loss = 0 self.longest_label = max(self.longest_label, ys.size(1)) for i in range(ys.size(1)): output, hn = self.decoder(xes, hn) preds, scores = self.hidden_to_idx(output, drop=True) y = ys.select(1, i) loss += self.criterion(scores, y) # use the true token as the next input xes = self.lt(y).unsqueeze(0) # hn = self.dropout(hn) for j in range(preds.size(0)): token = self.v2t([preds.data[j][0]]) output_lines[j].append(token) loss.backward() self.update_params() if random.random() < 0.1: true = self.v2t(ys.data[0]) print('loss:', round(loss.data[0], 2), ' '.join(output_lines[0]), '(true: {})'.format(true)) return output_lines def predict(self, xs): batchsize = len(xs) # first encode context xes = self.lt(xs).t() h0 = self.init_zeros(batchsize) _output, hn = self.encoder(xes, h0) # start with EOS tensor for all x = self.EOS_TENSOR if self.use_cuda: x = x.cuda(async=True) x = Variable(x) xe = self.lt(x).unsqueeze(1) xes = xe.expand(xe.size(0), batchsize, xe.size(2)) done = [False for _ in range(batchsize)] total_done = 0 max_len = 0 output_lines = [[] for _ in range(batchsize)] while(total_done < batchsize) and max_len < self.longest_label: output, hn = self.decoder(xes, hn) preds, scores = self.hidden_to_idx(output, drop=False) xes = self.lt(preds.t()) max_len += 1 for i in range(preds.size(0)): if not done[i]: token = self.v2t(preds.data[i]) if token == self.EOS: done[i] = True total_done += 1 else: output_lines[i].append(token) if self.babi_mode and token not in self.dirs: # for babi, only output one token except when # giving directions done[i] = True total_done += 1 if random.random() < 0.1: print('prediction:', ' '.join(output_lines[0])) return output_lines def batchify(self, obs): exs = [ex for ex in obs if 'text' in ex] valid_inds = [i for i, ex in enumerate(obs) if 'text' in ex] batchsize = len(exs) parsed = [self.parse(ex['text']) for ex in exs] max_x_len = max([len(x) for x in parsed]) xs = torch.LongTensor(batchsize, max_x_len).fill_(0) for i, x in enumerate(parsed): offset = max_x_len - len(x) for j, idx in enumerate(x): xs[i][j + offset] = idx if self.use_cuda: xs = xs.cuda(async=True) xs = Variable(xs) ys = None if 'labels' in exs[0]: labels = [random.choice(ex['labels']) + ' ' + self.EOS for ex in exs] parsed = [self.parse(y) for y in labels] max_y_len = max(len(y) for y in parsed) ys = torch.LongTensor(batchsize, max_y_len).fill_(0) for i, y in enumerate(parsed): for j, idx in enumerate(y): ys[i][j] = idx if self.use_cuda: ys = ys.cuda(async=True) ys = Variable(ys) return xs, ys, valid_inds def batch_act(self, observations): batchsize = len(observations) batch_reply = [{'id': self.getID()} for _ in range(batchsize)] xs, ys, valid_inds = self.batchify(observations) if len(xs) == 0: return batch_reply # Either train or predict if ys is not None: predictions = self.update(xs, ys) else: predictions = self.predict(xs) <|fim▁hole|> batch_reply[valid_inds[i]]['text'] = ' '.join( c for c in predictions[i] if c != self.EOS) return batch_reply def act(self): return self.batch_act([self.observation])[0] def save(self, path): model = {} model['lt'] = self.lt.state_dict() model['encoder'] = self.encoder.state_dict() model['decoder'] = self.decoder.state_dict() model['d2o'] = self.d2o.state_dict() model['longest_label'] = self.longest_label with open(path, 'wb') as write: torch.save(model, write) def load(self, path): with open(path, 'rb') as read: model = torch.load(read) self.lt.load_state_dict(model['lt']) self.encoder.load_state_dict(model['encoder']) self.decoder.load_state_dict(model['decoder']) self.d2o.load_state_dict(model['d2o']) self.longest_label = model['longest_label']<|fim▁end|>
for i in range(len(predictions)):
<|file_name|>migration.py<|end_file_name|><|fim▁begin|>import logging import time import types from autotest.client.shared import error from virttest import utils_misc, utils_test, aexpect def run(test, params, env): """ KVM migration test: 1) Get a live VM and clone it. 2) Verify that the source VM supports migration. If it does, proceed with the test. 3) Send a migration command to the source VM and wait until it's finished. 4) Kill off the source VM. 3) Log into the destination VM after the migration is finished. 4) Compare the output of a reference command executed on the source with the output of the same command on the destination machine. :param test: QEMU test object. :param params: Dictionary with test parameters. :param env: Dictionary with the test environment. """ def guest_stress_start(guest_stress_test): """ Start a stress test in guest, Could be 'iozone', 'dd', 'stress' :param type: type of stress test. """ from tests import autotest_control timeout = 0 if guest_stress_test == "autotest": test_type = params.get("test_type") func = autotest_control.run_autotest_control new_params = params.copy() new_params["test_control_file"] = "%s.control" % test_type args = (test, new_params, env) timeout = 60 elif guest_stress_test == "dd": vm = env.get_vm(env, params.get("main_vm"))<|fim▁hole|> args = ("for((;;)) do dd if=/dev/zero of=/tmp/test bs=5M " "count=100; rm -f /tmp/test; done", login_timeout, logging.info) logging.info("Start %s test in guest", guest_stress_test) bg = utils_test.BackgroundTest(func, args) params["guest_stress_test_pid"] = bg bg.start() if timeout: logging.info("sleep %ds waiting guest test start.", timeout) time.sleep(timeout) if not bg.is_alive(): raise error.TestFail("Failed to start guest test!") def guest_stress_deamon(): """ This deamon will keep watch the status of stress in guest. If the stress program is finished before migration this will restart it. """ while True: bg = params.get("guest_stress_test_pid") action = params.get("action") if action == "run": logging.debug("Check if guest stress is still running") guest_stress_test = params.get("guest_stress_test") if bg and not bg.is_alive(): logging.debug("Stress process finished, restart it") guest_stress_start(guest_stress_test) time.sleep(30) else: logging.debug("Stress still on") else: if bg and bg.is_alive(): try: stress_stop_cmd = params.get("stress_stop_cmd") vm = env.get_vm(env, params.get("main_vm")) vm.verify_alive() session = vm.wait_for_login() if stress_stop_cmd: logging.warn("Killing background stress process " "with cmd '%s', you would see some " "error message in client test result," "it's harmless.", stress_stop_cmd) session.cmd(stress_stop_cmd) bg.join(10) except Exception: pass break time.sleep(10) def get_functions(func_names, locals_dict): """ Find sub function(s) in this function with the given name(s). """ if not func_names: return [] funcs = [] for f in func_names.split(): f = locals_dict.get(f) if isinstance(f, types.FunctionType): funcs.append(f) return funcs def mig_set_speed(): mig_speed = params.get("mig_speed", "1G") return vm.monitor.migrate_set_speed(mig_speed) login_timeout = int(params.get("login_timeout", 360)) mig_timeout = float(params.get("mig_timeout", "3600")) mig_protocol = params.get("migration_protocol", "tcp") mig_cancel_delay = int(params.get("mig_cancel") == "yes") * 2 mig_exec_cmd_src = params.get("migration_exec_cmd_src") mig_exec_cmd_dst = params.get("migration_exec_cmd_dst") if mig_exec_cmd_src and "gzip" in mig_exec_cmd_src: mig_exec_file = params.get("migration_exec_file", "/var/tmp/exec") mig_exec_file += "-%s" % utils_misc.generate_random_string(8) mig_exec_cmd_src = mig_exec_cmd_src % mig_exec_file mig_exec_cmd_dst = mig_exec_cmd_dst % mig_exec_file offline = params.get("offline", "no") == "yes" check = params.get("vmstate_check", "no") == "yes" living_guest_os = params.get("migration_living_guest", "yes") == "yes" deamon_thread = None vm = env.get_vm(params["main_vm"]) vm.verify_alive() if living_guest_os: session = vm.wait_for_login(timeout=login_timeout) # Get the output of migration_test_command test_command = params.get("migration_test_command") reference_output = session.cmd_output(test_command) # Start some process in the background (and leave the session open) background_command = params.get("migration_bg_command", "") session.sendline(background_command) time.sleep(5) # Start another session with the guest and make sure the background # process is running session2 = vm.wait_for_login(timeout=login_timeout) try: check_command = params.get("migration_bg_check_command", "") session2.cmd(check_command, timeout=30) session2.close() # run some functions before migrate start. pre_migrate = get_functions(params.get("pre_migrate"), locals()) for func in pre_migrate: func() # Start stress test in guest. guest_stress_test = params.get("guest_stress_test") if guest_stress_test: guest_stress_start(guest_stress_test) params["action"] = "run" deamon_thread = utils_test.BackgroundTest( guest_stress_deamon, ()) deamon_thread.start() # Migrate the VM ping_pong = params.get("ping_pong", 1) for i in xrange(int(ping_pong)): if i % 2 == 0: logging.info("Round %s ping..." % str(i / 2)) else: logging.info("Round %s pong..." % str(i / 2)) vm.migrate(mig_timeout, mig_protocol, mig_cancel_delay, offline, check, migration_exec_cmd_src=mig_exec_cmd_src, migration_exec_cmd_dst=mig_exec_cmd_dst) # Set deamon thread action to stop after migrate params["action"] = "stop" # run some functions after migrate finish. post_migrate = get_functions(params.get("post_migrate"), locals()) for func in post_migrate: func() # Log into the guest again logging.info("Logging into guest after migration...") session2 = vm.wait_for_login(timeout=30) logging.info("Logged in after migration") # Make sure the background process is still running session2.cmd(check_command, timeout=30) # Get the output of migration_test_command output = session2.cmd_output(test_command) # Compare output to reference output if output != reference_output: logging.info("Command output before migration differs from " "command output after migration") logging.info("Command: %s", test_command) logging.info("Output before:" + utils_misc.format_str_for_message(reference_output)) logging.info("Output after:" + utils_misc.format_str_for_message(output)) raise error.TestFail("Command '%s' produced different output " "before and after migration" % test_command) finally: # Kill the background process if session2 and session2.is_alive(): bg_kill_cmd = params.get("migration_bg_kill_command", None) if bg_kill_cmd is not None: try: session2.cmd(bg_kill_cmd) except aexpect.ShellTimeoutError: logging.debug("Remote session not responsive, " "shutting down VM %s", vm.name) vm.destroy(gracefully=True) if deamon_thread is not None: # Set deamon thread action to stop after migrate params["action"] = "stop" deamon_thread.join() else: # Just migrate without depending on a living guest OS vm.migrate(mig_timeout, mig_protocol, mig_cancel_delay, offline, check, migration_exec_cmd_src=mig_exec_cmd_src, migration_exec_cmd_dst=mig_exec_cmd_dst)<|fim▁end|>
vm.verify_alive() session = vm.wait_for_login(timeout=login_timeout) func = session.cmd_output
<|file_name|>config.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python #-*- coding:utf-8 -*- # vim:ai:sta:et:ts=4:sw=4:sts=4 """kernelng 0.x Tool for maintaining customized overlays of kernel-ng.eclass-based ebuilds Copyright 2005-2014 Gentoo Foundation Copyright (C) 2014 Gregory M. Turner <[email protected]> Distributed under the terms of the GNU General Public License v2 This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA. """ import os import sys import re from collections import OrderedDict from itertools import chain, islice, count, repeat import click from click._compat import iteritems from .output import has_verbose_level, echov, sechov, trace, suppress_tracing import portage try: portage.proxy.lazyimport.lazyimport(globals(), 'portage.data:portage_uid,portage_gid') except ImportError: portage_uid = 250 portage_gid = 250 # eprefixifiable dummy value EPREFIX = "@GENTOO_PORTAGE_EPREFIX@" # non-eprefixified fallback behavior: ask portage or assume empty if EPREFIX == "@GENTOO_%s_EPREFIX@" % "PORTAGE": try: from portage.const import EPREFIX as _EPREFIX except ImportError: _EPREFIX = '' EPREFIX = _EPREFIX PROGNAME = sys.argv[0].split(os.path.sep)[-1] if len(sys.argv) >= 1 else 'kernelng' PROGDESC = 'kernel-ng-util' FRAMEWORK = 'kernel-ng' PORTAGE_CONF_DIR = '/etc/portage' REPOS_CONF = 'repos.conf' REPOS_CONF_FILE = ''.join(( EPREFIX, PORTAGE_CONF_DIR, os.path.sep, REPOS_CONF )) KERNELNG_CONF = '%s.conf' % FRAMEWORK KERNELNG_CONF_DIR = '/etc/%s' % FRAMEWORK EKERNELNG_CONF_DIR = '%s%s' % (EPREFIX, KERNELNG_CONF_DIR) KERNELNG_CONF_FILE = ''.join(( EKERNELNG_CONF_DIR, os.path.sep, KERNELNG_CONF, )) CONST_RE = re.compile('%\([^)]*\)[^\W\d_]', re.UNICODE) SUBCONSTS = { 'prog': PROGNAME, 'progdesc': PROGDESC, 'framework': FRAMEWORK, 'kngconf': KERNELNG_CONF, 'kngconffile': KERNELNG_CONF_FILE, 'eprefix': EPREFIX, 'lc': '%s%s' % ( click.style('LOADCONFIG', fg='blue', bold=True), click.style(':', fg='white', bold=True) ) } CONFIG_COMMENT_RE = re.compile('\s*#|\s*$', re.UNICODE) CONFIG_SECTION_RE = re.compile('\s*\[\s*([^][]*[^][\s]+)\s*\]\s*$', re.UNICODE) CONFIG_SETTING_RE = re.compile('\s*([^\d\W][\w-]*)\s*=\s*($|.*\S+)\s*$', re.UNICODE) def subconsts(text, subconsts=SUBCONSTS): """Utility function to make substitutions from a dictionary of constants.""" try: return text % subconsts if re.search(CONST_RE, text) else text except ValueError as e: echov('subconsts: error substituting in "%s": %s.' % (text, str(e)), err=True) raise # convenience alias _sc = subconsts class KNGConfigItemUnknownReason(Exception): def __init__(self, key, value, reason): super(KNGConfigItemUnknownReason, self).__init__( 'Unknown KNGConfigItem reason "%s", assigning "%s" to "%s"' % ( reason, value, key)) VALID_KNGCONFIGITEMREASONS=['stored', 'default', 'override'] def ValidateKNGConfigItemReason(key, value, reason): if reason not in VALID_KNGCONFIGITEMREASONS: raise KNGConfigItemUnknownReason(key, value, reason) # KNGConfigItem, KNGConfigItems, KNGConfig, and fetal-ness/daddy # ============================================================== # The interface here is tolerable but the plumbing is ugly and inelegant # due to code evolution by incremental hacking. The whole thing should probably be # scrapped and re-coded from scratch, truth be told, now that I've figued out # what it is I'm trying to accomplish. # # The basic data-structure we are building could be thought of as a dict of {<str>: <list>} # items; the lists could be thought of as containing (<str>, <str>) tuples. In fact, # that's an oversimplification. The dict is actually a KNGConfig, which is an OrderedDict # subclass that logically represents the entire contents of a kernel-ng.conf file, with # each dictionary key representing a section. The list is actually a KNGConfigItems instance # and the list-items are KNGConfigItem instances (the analogue of the (<str>, <str>) tuples). # Each KNGConfigItem either represents a configuration-file comment or a standard configuration-file # line-item (i.e.: key=value). # # We use the OrderedDict so that we can round-trip the configuration file without re-ordering # the sections. Initially this will be fairly broken, but the enhancements to achieve full # .conf => OO => .conf round-trip capabilities are simply to saving off some formatting metadata # at the KNGConfigItem level during "deserialization" -- aka parsing, what-have-you. First, # .conf-file deserialization of /any/ sort will need to be implemented :S. # # The motivation for much of the crazyness below is that I wanted consumers to be able to say: # "kngconfig['foo']['bar'] = 'baz'", and have the bar setting in the foo section recieve a value of # 'baz'. Even so, thereafter, kngconfig['foo']['bar'] would not be 'baz', but a KNGConfigItem # with value 'baz' and key 'bar', but that's fine, kngconfig['foo']['bar'].value would be our 'baz'. # # To achieve this, I used the __missing__ feature at the top dict level, added hybrid-dict features # to KNGConfigItems (so that KNGConfigItems.__getattr__ will search the KNGConfigItem instances # it contains for the provided index, or otherwise call a "_missing" API which works just like # "__missing__" but, obviously is not a built-in magic name thingy so-preferably-not-to-speak. # BUT, crap, I thought, this would mean that as soon as the API consumer simply looks at # kngconfig['foo'], the 'foo' section must come into being. Which wouldn't be a problem except # that a 'kernelng_foo' package would fail to be generated during "kernelng overlay update" due # to (amazingly!) there being no */foo package in all of portage. Clearly this would not be what # most API consumers meant by kngconfig['foo']. # # To solve this dilemma, I created the concept of "fetal" KNGConfigItem and KNGConfigItems # instances. In this scheme, two new properties are created: "daddy" and "fetal". Daddy maps back # to the container that contains the instance (nb: implications wrt. i.e., deepclone() are not # dealt with yet); meanwhile, fetal tells us: # # KNGConfigItem: if the instance has never had a non-None "value" property set # KNGConfigItems: if the instance has ever had any non-fetal KNGConfigItem instances in it. # # Once these are "born", there is back-propogation through the "daddy"s so that the KNGConfigItems # get born themselves, the instant they become grandparents, if necessary. # # The purpose of all these acrobatics is to censor the fetuses during deserialization, ensuring # that no gross side effects occur due to the objects generated by __missing__ and _missing. # # Yes, I know this is all kinds of ugly but the interface is almost reasonable (eliminating the # requirement to pass a "daddy" keyword argument to constructors would be nice and will eventually # get done; the ability for multiple containers to be pregnant with the same fetus is not # needed but my implementation also sort-of breaks the ability for multiple containers to contain # the same non-fetal containee, which clearly sucks and should also be fixed). # # Each KNGConfigItem has a "reason" property which explains its semantic purpose. Three "reasons" # are supported: "stored" is the standard reason and simply means the KNGConfigItem represents # a setting which should persist when the KNGConfig containing it is deserialized. The "default" # reason signifies that the key=>value mapping is not stored in the configuration file, and serves # only as an in-memory means of tracking the default value (a default property also stores the # default value if applicable; in this case, del(conf['foo']['bar']) will not delete the # conf['foo']['bar'] KNGConfigItem from conf['foo'] -- instead it will set its reason to "default" # which will cause the KNGConfigItem to disappear in the deserialized .conf file). The third # "reason" is as-yet unused and probably broken: "override" is intended to represent a temporary # in-memory # change to the configuration that will not persist. The problem is that there is no # provisions yet in place to track the persistent value being overriden. Perhaps the "override" # reason is not needed and can be removed. class KNGConfigItem(object): @trace def __init__(self, key, value='__comment__', default=None, reason=None, daddy=None): ''' This constructor has two forms: KNGConfigItem(<comment-str>) and KNGConfigItem(<key>, <value>). default and reason apply only to the second form -- for comments, the default is always None and the reason is always 'stored' ''' if reason is not None: ValidateKNGConfigItemReason(key, value, reason) if value == '__comment__': key, value = value, key default=None reason='stored' elif reason is None and default is None: reason = 'stored' elif reason is None: # and default is set if value == default: # note: value is not None because default is not None reason = 'default' elif value is not None: reason = 'stored' # else, None is the right thing to have in reason for now, we'll have # to figure it out when we are born. self._key = key self._value = value if reason == 'default' and default is None: self._default = value else: self._default = default self._reason = reason self._daddy = daddy @suppress_tracing def __repr__(self): if self.iscomment: return 'KNGConfigItem(%r, reason=%r)' % (self.comment, self.reason) else: return 'KNGConfigItem(%r, %r, default=%r, reason=%r)' % ( self.key, self.value, self.default, self.reason) @property def key(self): return self._key # note: "value" as a name for a property makes for confusing reading here but # foo.key/foo.value is imo a nice self-evident naming scheme for our consumers @property def value(self): return self._value @value.setter @trace def value(self, newvalue): if newvalue is None: # We need to know if we have left "fetal mode" during an assignment; # we track "fetal mode" using a convention that value == None <==> is_fetal # Values should always be strings anyhow (nb: I've deliberately opted not # to enforce that for pythonicicity reasons). raise ValueError('None is not an allowed value for KNGConfigItems.') if self._value == newvalue: # avoid any side-effects as no change is required. return if self._value is None: if self._daddy is None: raise ValueError('fetal-mode state-machine thinko') else: # it is possible that determining reason has been deferred 'till now if self._reason is None: if self._default is None: self._reason = 'stored' elif newvalue == self._default: self._reason = 'default' else: self._reason = 'stored' self._daddy.christen() if self.reason == 'default': # if the value has changed to a non-default value, then # reason will need to change to 'stored'. Pretty sure the # newvalue != self._default is a noop but relying on that # here seems obscure and future-fragile. if self._default is not None and newvalue != self._default: self.reason = 'stored' # else: nothing to do: once stored, always stored. self._value = newvalue @value.deleter @trace def value(self): if self._default is not None: self._value = self._default self._reason = 'default' elif self._daddy is not None: del self._daddy[self.key] else: raise ValueError('Unanticipated wierd corner case. This is a bug.') @property def default(self): return self._default @property def reason(self): return self._reason @reason.setter @trace def reason(self, value): ValidateKNGConfigItemReason(self.key, self.value, value) self._reason = value @property def fetal(self): return self._value is None @property @trace def isexplicit(self): if self.reason == 'default': return False elif self.reason == 'override': # FIXME: This result suggests "isexplicit" is the wrong name for this. return False elif self.value is None: # fetal mode return False else: return True @property def iscomment(self): return (self.key == '__comment__') @property def comment(self): return self.value @property def daddy(self): return self._daddy @trace def __eq__(self, other): if isinstance(other, KNGConfigItem): if other.key != self.key: return False if other.value != self.value: return False if other.reason != self.reason: return False return True else: # fuck it return NotImplemented @trace def __ne__(self, other): return not (self == other) @trace def __gt__(self, other): if isinstance(other, KNGConfigItem): return self.key > other.key or (self.key == other.key and self.value > other.value) \ or (self.key == other.key and self.value == other.value and self.reason > other.reason) else: return NotImplemented @trace def __le__(self, other): return not self.__gt__(other) @trace def __lt__(self, other): return (not self.__eq__(other)) and self.__le__(other) @trace def __ge__(self, other): return self.__eq__(other) or self.__gt__(other) kng_example_config_data = None @trace def KNGExampleConfigData(): global kng_example_config_data if kng_example_config_data: return kng_example_config_data.copy() result = OrderedDict() # format of the innermost key=>val tuples: # ( key, val, [force_stored=False, [no_default=False]] ) result['implicit_global'] = ( '# %(framework)s.conf', '', '# This file is designed to contain sensible default values for', '# a plurality of real-world systems; however, it can and often should', '# be modified to match your system\'s needs.', '#', '# %(framework)s.conf has a "Windows .ini"-style syntax, consisting of', '# name => value mappings, i.e.:', '#', '# <name> = <value>', '#', '# and section headings enclosed in brackets, i.e.:', '#', '# [<section>]', '#', '# Each section (with one exception, described below) corresponds to', '# a portage package atom. For example, the header:', '#', '# [=sys-kernel/gentoo-sources-3.15*]', '#', '# would contain specifics about how to map from portage packages', '# matching the "=sys-kernel/gentoo-sources-3.15*" portage "atom"', '# to %(framework)s packages in the site-specific %(framework)s', '# overlay (n.b.: the %(prog)s utility contains all the secret sauce to', '# create and maintain these site-specific overlays. Run "%(prog)s -h",', '# or "man %(prog)s" if that\'s Greek to you, and you\'re not Greek).', '#', '# Lines beginning with a "#" are treated as comments. Empty lines', '# are ignored. Quotation marks are not needed and will not be', '# preserved by the %(prog)s utility -- their use is discouraged.', '#', '# A "[global]" section is also supported. Any "<name> = <value>"', '# pairs appearing before any section header are considered', '# implicitly to be in the global section, so the "[global]" header', '# may be omitted, so long as all global settings come first.', '', ) result['global'] = ( '', '# overlay', '# -------', '# default value: site-%(framework)s', '# scope: global only', '#', '# Name of the site-wide %(framework)s portage overlay.', '# The overlay need not exist to be named here. If it does', '# not exist it will be created automatically as required or', '# when the "%(prog)s overlay create" command is executed.', '', ( 'overlay', 'site-%(framework)s', True ), '', '# name_prefix', '# ==========', '# default value: %(prog)s_', '# scope: any', '#', '# Prefix applied to ng-sources package names in the overlay. For', '# example, if name_prefix is "foo", then the %(framework)s package', '# mirroring portage kernel package sys-kernel/bar-sources in the', '# %(framework)s overlay would be named sys-kernel/foobar-sources.', '# Making this empty would result in identically named packages and', '# is therefore strongly discouraged, although not technocratically', '# prohibited by %(progdesc)s.', '', ( 'name_prefix', '%(prog)s_' ), '', '# no_name_prefix', '# ==============', '# default value: no_', '# scope: any', '#', '# Prefix applied to no-sources package names in the overlay. For', '# example, if no_name_prefix is "no_", then the no-sources package', '# mirroring the portage kernel package sys-kernel/shit-sources in', '# the %(framework)s overlay would be named sys-kernel/no_shit-sources.', '# Making this empty would result in identically named packages and', '# is therefore strongly discouraged, although not technocratically', '# prohibited by %(progdesc)s.', '', ( 'no_name_prefix', 'no_' ), '', '# repos_conf', '# ==========', '# default value: %(eprefix)s/etc/portage/repos.conf', '# scope: global only', '#', '# Location of portage\'s repos.conf file. If empty, i.e.:', '#', '# repos_conf =', '#', '# %(framework)s will not automatically maintain the repos.conf file;', '# otherwise, when the overlay is created, this file will be', '# automatically modified to activate the %(framework)s overlay in', '# portage if and when the overlay is created.', '', ( 'repos_conf', '%(eprefix)s/etc/portage/repos.conf' ), '', ) result['sys-kernel/gentoo-sources'] = ( '', '# name_override', '# =============', '# No default value', '# scope: sectional only', '#', '# Instead of the name_prefix scheme, it is possible to specify a', '# name explicitly for the overlay packages generated by %(progdesc)s', '# to mirror the portage package in a given section. For example,', '# if we put name_override = %(prog)s in the [sys-kernel/gentoo-sources]', '# section, then the overlay package mirroring sys-kernel/gentoo-sources', '# generated by %(progdesc)s would be named sys-kernel/%(prog)s.', '', ( 'name_override', '%(prog)s-sources', True, True ), '', '# no_name_override', '# ================', '# No default value', '# scope: sectional only', '#', '# Instead of the no_name_prefix scheme, it is possible to specify a', '# name explicitly for the no-sources overlay packages generated by', '# %(progdesc)s to mirror the portage package in a given section. For', '# example if we put no_name_override = nope in the', '# [sys-kernel/gentoo-sources] section, then the no-sources package', '# mirroring sys-kernel/gentoo-sources in the overlay generated by', '# %(progdesc)s would be named sys-kernel/nope.', '', ( 'no_name_override', 'no-sources', True, True ), '', ) for key in result.keys(): val = result[key] result[key] = tuple( tuple( valsubitem if isinstance(valsubitem, bool) else subconsts(valsubitem) for valsubitem in valitem ) if isinstance(valitem, tuple) else subconsts(valitem) for valitem in val ) kng_example_config_data = result.copy() return result kng_global_defaults = None @trace def KNGGlobalDefaults(): global kng_global_defaults if kng_global_defaults: return kng_global_defaults.copy() ecd = KNGExampleConfigData() implicit = ecd['implicit_global'] if 'implicit_global' in ecd else () explicit = ecd['global'] if 'global' in ecd else () result = { valitem[0]: valitem[1] for valitem in chain(implicit, explicit) if isinstance(valitem, tuple) and (len(valitem) < 4 or not valitem[3]) } kng_global_defaults = result.copy() return result class KNGConfigItems(list): ''' Implements a list of KNGConfigItem instances with some dict-like interfaces for, i.e., determining whether a particular configuration key is already in the list, or setting the key in-place via __getitem__. For dict-like behaviors, the comments are ignored. ''' @trace def __init__(self, *args, **kwargs): if 'fetal' in kwargs: self._fetal = kwargs.pop('fetal') else: self._fetal = False if 'daddy' in kwargs: self._daddy = kwargs.pop('daddy') else: self._daddy = None if self._fetal and self._daddy is None: raise TypeError('KNGConfigItems.__init__: fetal requires daddy.') super(KNGConfigItems, self).__init__(*args, **kwargs) @property def fetal(self): return self.is_fetal() def is_fetal(self): return self._fetal @trace def __contains__(self, key): for item in self: if item.key == key: return True return super(KNGConfigItems, self).__contains__(key) @suppress_tracing def __repr__(self): return 'KNGConfigItems(%s)' % super(KNGConfigItems, self).__repr__() @trace def iterkeypairs(self): return ( (item.key, item.value) for item in self if (not item.fetal) and (not item.iscomment) ) @trace def iterkeys(self): return ( item[0] for item in self.iterkeypairs() ) @trace def itervalues(self): return ( item[1] for item in self.iterkeypairs() ) @trace def iterexplicit(self): return ( item for item in self if item.isexplicit ) @trace def find_default(self, key): ''' Returns any default that would be associated with the provided key in the current section or None, if none can be found, using the global defaults dict. Raises TypeError if we have no daddy. ''' if self._daddy is None: raise TypeError('find_default requires daddy') if self._daddy.section_of(self) in ['global', 'implicit_global']: if key in KNGGlobalDefaults(): return KNGGlobalDefaults()[key] return None @trace def __getitem__(self, index): if isinstance(index, slice) or isinstance(index, int): return super(KNGConfigItems, self).__getitem__(index) for item in self: if (not item.iscomment) and item.key == index: # note: this will return any existing "fetus" with the requested key. return item return self._missing(index) @trace def _missing(self, key): # add a "fetal" KNGConfigItem for the provided key, analogous to __missing__ in dict rv = KNGConfigItem(key, None, default=self.find_default(key), daddy=self) self.append(rv) return rv @trace def __setitem__(self, index, value): if value is None: raise ValueError('KNGConfigItems.__setitem__: use del instead? assigning None is prohibited.') elif index == '__comment__': # always treat this as a request to append a new comment self._fetal = False self.append(KNGConfigItem(value, daddy=self)) return elif isinstance(index, slice) or isinstance(index, int): if self._fetal and isinstance(value, KNGConfigItem) and not value.fetal: self._fetal = False super(KNGConfigItems, self).__setitem__(index, value) return for itemindex, item in enumerate(self): if (not item.iscomment) and item.key == index: if isinstance(value, KNGConfigItem): if not value.fetal: self._fetal = False self[itemindex] = value return else: item.value = value return if isinstance(value, KNGConfigItem): self.append(value) else: self.append(KNGConfigItem(index, value, daddy=self)) @trace def __delitem__(self, index): if isinstance(index, slice) or isinstance(index, int): super(KNGConfigItems, self).__delitem__(index) else: for itemindex, item in enumerate(self): if (not item.iscomment) and item.key == index: super(KNGConfigItems, self).__delitem__(itemindex) return raise IndexError('Could not find item matching index "%s" in %s to delete' % (index, self)) @trace def insert(self, index, value): if isinstance(index, int): super(KNGConfigItems, self).insert(index, value) else: for itemindex, item in enumerate(self): if (not item.iscomment) and item.key == index: super(KNGConfigItems, self).insert(itemindex, value) return raise IndexError('Could not find item matching insertion index "%s" in %s' % (index, self)) @trace def append(self, value): for itemindex, item in enumerate(self): if (not item.iscomment) and item.key == value.key: del(self[itemindex]) super(KNGConfigItems, self).append(value) if isinstance(value, KNGConfigItem): if not value.fetal: self._fetal = False @trace def appendnew(self, *args, **kwargs): ''' Constructs a new KNGConfigItem using the provided arguments. If no daddy keyword argument is provided, then daddy=<this KNGConfigItems> will be added to the provided KNGConfigItem constructor arguments. The constructed item is then appended to this KNGConfigItems and returned. ''' kwargs['daddy'] = kwargs.pop('daddy', self) rv = KNGConfigItem(*args, **kwargs) self.append(rv) return rv @trace def extend(self, values): for v in values: self.append(v) @trace def pop(self, index=-1): v = self[index] del self[index] return v @trace def christen(self): # item is not used ATM, this is just a notification that we now have at least # one nonfetal item, which is enough. self._fetal = False @trace def __iadd__(self, values): self.extend(values) return self def __imul__(self, value): raise NotImplementedError('KNGConfigItems.__imul__') def __mul__ (self, other): raise NotImplementedError('KNGConfigItems.__mul__') def __rmul__ (self, other): raise NotImplementedError('KNGConfigItems.__rmul__') class KNGGlobalConfigItemsProxy(KNGConfigItems): @trace def __init__(self, daddy): self._implicit = daddy['implicit_global'] self._explicit = daddy['global'] super(KNGGlobalConfigItemsProxy, self).__init__(daddy=daddy, fetal=self.fetal) @trace def __contains__(self, key): return self._implicit.__contains__(key) or self._explicit.__contains__(key) @trace def __len__(self): return len(self._implicit) + len(self._explicit) def _fake_self_for_query(self): return list(self._implicit) + list(self._explicit) def append_destination_guess(self): if not self._explicit.fetal: return self._explicit elif not self._implicit.fetal: return self._implicit else: return self._explicit @suppress_tracing def __repr__(self): return 'KNGConfigItems(%s)' % self._fake_self_for_query() def is_fetal(self): return self._implicit.fetal and self._explicit.fetal @trace def iterkeypairs(self): return ( (item.key, item.value) for item in self._fake_self_for_query() if (not item.fetal) and (not item.iscomment) ) @trace def iterkeys(self): return ( item[0] for item in self.iterkeypairs() ) @trace def itervalues(self): return ( item[1] for item in self.iterkeypairs() ) @trace def iterexplicit(self): return ( item for item in self._fake_self_for_query() if item.isexplicit ) @trace def find_default(self, key): ''' Returns any default that would be associated with the provided key in the current section or None, if none can be found, using the global defaults dict. Raises TypeError if we have no daddy. ''' # section_of won't work but thankfully we don't need it! if key in KNGGlobalDefaults(): return KNGGlobalDefaults()[key] return None @trace def __getitem__(self, index): if isinstance(index, slice) or isinstance(index, int): return self._fake_self_for_query().__getitem__(index) for item in self._fake_self_for_query(): if (not item.iscomment) and item.key == index: # note: this will return any existing "fetus" with the requested key. return item return self._missing(index) @trace def _missing(self, key): # add a "fetal" KNGConfigItem for the provided key, analogous to __missing__ in dict real_daddy=self.append_destination_guess() rv = KNGConfigItem(key, None, default=self.find_default(key), daddy=real_daddy) real_daddy.append(rv) return rv @trace def __setitem__(self, index, value): if value is None: raise ValueError('KNGGlobalConfigItemsProxy.__setitem__: use del instead? assigning None is prohibited.') elif index == '__comment__': # always treat this as a request to append a new comment real_daddy = self.append_destination_guess() real_daddy._fetal = False real_daddy.append(KNGConfigItem(value, daddy=real_daddy)) return elif isinstance(index, int): if index >= len(self._implicit): self._explicit[index - len(self._implicit)] = value else: self._implicit[index] = value return elif isinstance(index, slice): start, stop, step = index.indices(len(self)) if step != 1: raise NotImplementedError('Fancy stepping behavior not supported here.') if start < len(self._implicit) and stop > len(self._implicit): raise NotImplementedError('No soap, honky-lips: %s, %s.' % (slice(start,stop,step), len(self._implicit))) if start < len(self._implicit): self._implicit[slice(start,stop,step)] = value else: start -= len(self._implicit) stop -= len(self._implicit) self._explicit[slice(start, stop, step)] = value return # done! for (itemindex, item), realdeal in chain(zip(enumerate(self._implicit), repeat(self._implicit)), zip(enumerate(self._explicit), repeat(self._explicit))): if (not item.iscomment) and item.key == index: if isinstance(value, KNGConfigItem): # this is fucked, what if daddy didn't match up? just copy the value i guess... # FIXME realdeal[itemindex].value = value.value return else: item.value = value return if isinstance(value, KNGConfigItem): self.append_destination_guess().append(value) else: self.append_destination_guess().append(KNGConfigItem(index, value, daddy=self)) @trace def __delitem__(self, index): if isinstance(index, slice): start, stop, step = index.indices(len(self)) if step != 1: raise NotImplementedError('Fancy stepping behavior not supported here.') if start < len(self._implicit) and stop > len(self._implicit): raise NotImplementedError('No soap, honky-lips: %s, %s.' % (slice(start,stop,step), len(self._implicit))) if start < len(self._implicit): del(self._implicit[slice(start,stop,step)]) else: start -= len(self._implicit) stop -= len(self._implicit) del(self._explicit[slice(start, stop, step)]) return elif isinstance(index, int): if index >= len(self._implicit): del(self._explicit[index - len(self._implicit)]) else: del(self._implicit[index]) return for (itemindex, item), realdeal in chain(zip(enumerate(self._implicit), repeat(self._implicit)), zip(enumerate(self._explicit), repeat(self._explicit))): if (not item.iscomment) and item.key == index: del(realdeal[itemindex]) return raise IndexError('Could not find item matching index "%s" in %s to delete' % (index, self)) @trace def insert(self, index, value): if isinstance(index, int): if index < len(self._implicit): self._implicit.insert(index, value) else: self._explicit.insert(index - len(self._implicit), value) return for (itemindex, item), realdeal in chain(zip(enumerate(self._implicit), repeat(self._implicit)), zip(enumerate(self._explicit), repeat(self._explicit))): if (not item.iscomment) and item.key == index: realdeal.insert(itemindex, value) return raise IndexError('Could not find item matching insertion index "%s" in %s' % (index, self)) @trace def append(self, value): for (itemindex, item), realdeal in chain(zip(enumerate(self._implicit), repeat(self._implicit)), zip(enumerate(self._explicit), repeat(self._explicit))): if (not item.iscomment) and item.key == value.key: del(realdeal[itemindex]) realdeal.append(value) return self.append_destination_guess().append(value) @trace def appendnew(self, *args, **kwargs): self.append_destination_guess().appendnew(*args, **kwargs) @trace def clear(self): self._implicit.clear() self._explicit.clear() @trace def index(self, *args): return self._fake_self_for_query().index(*args) @trace def pop(self, index=None): if index is None: index = len(self) - 1 if index >= len(self._implicit): return self._explicit.pop(index - len(self._implicit)) else: return self._implicit.pop(index) @trace def remove(self, value): if value in self._implicit: self._implicit.remove(value) else: self._explicit.remove(value) def reverse(self): raise NotImplementedError('KNGGlobalCojnfigItemsProxy.reverse') def __eq__(self, other): return self._fake_self_for_query().__eq__(other) def __ge__(self, other): return self._fake_self_for_query().__ge__(other) def __gt__(self, other): return self._fake_self_for_query().__gt__(other) def __hash__(self): return self._fake_self_for_query().__hash__() @trace def __iter__(self, *args, **kwargs): return self._fake_self_for_query().__iter__(*args, **kwargs) def __le__(self, other): return self._fake_self_for_query().__le__(other) def __lt__(self, other): return self._fake_self_for_query().__lt__(other) def __ne__(self, other): return self._fake_self_for_query().__ne__(other) def sort(self): raise NotImplementedError('KNGGlobalConfigItemsProxy.sort') def __reversed__(self): raise NotImplementedError('KNGGlobalConfigItemsProxy.__reversed__') def __sizeof__(self): return self._implicit.__sizeof__() + self._explicit.__sizeof__() @trace def christen(self, item): # should never happen since the KNGConfigItems should have the "real" daddys raise NotImplementedError('KNGGlobalConfigItemsProxy.christen!?') class KNGConfig(OrderedDict): @trace def __init__(self, kernelng_conf_file=KERNELNG_CONF_FILE, repos_conf_file=REPOS_CONF_FILE): self._kernelng_conf_file = kernelng_conf_file self._repos_conf_file = repos_conf_file self._globals = None super(KNGConfig, self).__init__() @trace def section_of(self, configitems): for section, cfgitems in list(self.items()): if cfgitems is configitems: return section raise ValueError(configitems) @trace def loadExampleConfig(self): self.clear() ecd = KNGExampleConfigData() for key in ecd.keys(): self[key] = KNGConfigItems(daddy=self) val = ecd[key] for item in val: if isinstance(item, tuple): if len(item) > 3 and item[3]: # when item[3] is true (no default), then this config. parameter will # not appear in KNGGlobalDefaults and therefore stored, no default is the only # sensible interpretation regardless of item[2] (force-stored). self[key].append(KNGConfigItem(item[0], item[1], reason='stored', daddy=self[key])) elif len(item) > 2 and item[2]: # When item[3] is False (meaning, the config. parameter item[0] does have # a default value and it's item[1]), but item[2] is true, this amounts to # saying "item[0] is set to item[1], which happens to be the default value, # but despite this, please force the config. parameter to appear in the .conf # file anyhow. We achieve this miracle like so: self[key].append(KNGConfigItem(item[0], item[1], default=item[1], reason='stored', daddy=self[key])) else: # add a comment item "illustrating" the default value in "pseudo-prose", as, otherwise, # the KNGConfigItem for the item[0] => item[1] setting would not appear anywhere in the # example configuration file (because its reason will be 'default', not 'stored') self[key].append(KNGConfigItem('# %(confkey)s = %(confval)s' % { 'confkey': item[0], 'confval': item[1] }))<|fim▁hole|> self[key].append(KNGConfigItem(item[0], item[1], default=item[1], reason='default', daddy=self[key])) else: self[key].append(KNGConfigItem(item, daddy=self[key])) @property def globals(self): ''' Returns a virtualized KNGConfigItems proxy which treats the 'global' and 'implicit_global' sections as a unified section. This helps prevent accidental mistakes like adding the same configuration key to both sections, and simplifies various usages. When both global and implicit_global sections exist, new items go into the explicit global section; when only one of these sections exist, new items go into it; when neither section exists, new items go into an explicit global section which will be created on demand. ''' if self._globals is None: self._globals = KNGGlobalConfigItemsProxy(self) return self._globals @trace def writeConfigText(self, file=None, no_comments=False): ''' Write the currently loaded configuration to a given file. :param file: If provided, the output will be written into the provided click.File object. If not provided, output will go to standard output. ''' keys = self.keys() for key in keys: vlist = self[key] if vlist and not vlist.fetal: if key != 'implicit_global': click.echo('[%s]' % key, file=file) for item in vlist.iterexplicit(): if item.iscomment: if not no_comments: click.echo(item.comment, file=file) else: click.echo('%(itemkey)s = %(itemvalue)s' % { 'itemkey': item.key, 'itemvalue': item.value }, file=file) @trace def loadConfigText(self, file=None, dirty=False): ''' Loads the active configuration from a configuration file. If the file cannot be parsed, then raises a SyntaxError. :param file: If provided, this file will be used. It can be a python stream, a filename, or omitted entirely, in which case loadConfigText will look the default filename of kernelng.config.KERNELNG_CONF_FILE. :param dirty: If True, the active configuration object will not be cleaned before loading from the specified file. This will overwrite any settings which conflict and append any new settings values to the end of their corresponding sections. ''' if file is None: file = click.open_file(KERNELNG_CONF_FILE, mode='r') with file: self.clear() section = 'implicit_global' for lineindex, line in enumerate((line.rstrip('\n') for line in file)): if CONFIG_COMMENT_RE.match(line): self[section].appendnew(line) continue m = CONFIG_SECTION_RE.match(line) if m: section = m.group(1) self[section].christen() echov(_sc('%s read section header: "%s"' % ('%(lc)s', click.style(section, fg='yellow', bold=True))), 2) continue m = CONFIG_SETTING_RE.match(line) if m: key, val = m.groups() if key in self[section]: raise KeyError('%s (line %s): [%s].%s first assigned as ' '"%s", then re-assigned as "%s".' % (click.format_filename(file.name), lineindex, section, key, self[section][key].value, val)) self[section][key] = val echov(_sc('%s loaded configuration setting: %s%s%s%s%s %s %s%s%s' % ( '%(lc)s', click.style('[', fg='white', bold=True), click.style(section, fg='yellow', bold=True), click.style(']', fg='white', bold=True), click.style('.', fg='white', bold=True), click.style(key, fg='blue', bold=True), click.style('=', fg='white', bold=True), click.style('"', fg='white', bold=True), click.style(val, fg='blue', bold=True), click.style('"', fg='white', bold=True) )), 2) continue raise SyntaxError('%s (line %s): Syntax error: "%s" unrecognized.' % (click.format_filename(file.name), lineindex, line)) # ATM we need these dummy default settings around... maybe later they should be # virtualized or something, this is pretty gross....? gd = KNGGlobalDefaults() for key in gd.keys(): if not key in self.globals: self.globals.appendnew(key=key, value=gd[key], reason='default') @trace def createOverlay(self, uid, gid, perm): pass @trace def __missing__(self, index): rv=KNGConfigItems(fetal=True, daddy=self) self[index] = rv return rv<|fim▁end|>
# add the KNGConfigItem mapping the config. parameter to its default value
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>class InvalidAPIUsage(Exception):<|fim▁hole|> Exception.__init__(self) self.message = message if status_code is not None: self.status_code = status_code self.payload = payload def to_dict(self): rv = dict(self.payload or ()) rv['message'] = self.message return rv<|fim▁end|>
status_code = 400 def __init__(self, message, status_code=None, payload=None):
<|file_name|>ConMapPersister.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2010 Yahoo! Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the * License. See accompanying LICENSE file. */ package io.s4.persist; import io.s4.util.clock.Clock; import java.util.Enumeration; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import org.apache.log4j.Logger; public class ConMapPersister implements Persister { private AtomicInteger persistCount = new AtomicInteger(0); private boolean selfClean = false; private int cleanWaitTime = 40; // 20 seconds by default private String loggerName = "s4"; ConcurrentHashMap<String, CacheEntry> cache; Clock s4Clock; private int startCapacity = 5000; public void setStartCapacity(int startCapacity) { this.startCapacity = startCapacity; } public int getStartCapacity() { return startCapacity; } public void setSelfClean(boolean selfClean) { this.selfClean = selfClean; } public void setCleanWaitTime(int cleanWaitTime) { this.cleanWaitTime = cleanWaitTime; } public void setLoggerName(String loggerName) { this.loggerName = loggerName; } public ConMapPersister(Clock s4Clock) { this.s4Clock = s4Clock; } public void setS4Clock(Clock s4Clock) { this.s4Clock = s4Clock; } public ConMapPersister() { } public void init() { cache = new ConcurrentHashMap<String, CacheEntry>(this.getStartCapacity()); if (selfClean) { Runnable r = new Runnable() { public void run() { while (!Thread.interrupted()) { int cleanCount = ConMapPersister.this.cleanOutGarbage(); Logger.getLogger(loggerName).info("Cleaned out " + cleanCount + " entries; Persister has " + cache.size() + " entries"); try { Thread.sleep(cleanWaitTime * 1000); } catch (InterruptedException ie) { Thread.currentThread().interrupt(); } <|fim▁hole|> } }; Thread t = new Thread(r); t.start(); t.setPriority(Thread.MIN_PRIORITY); } } public int getQueueSize() { return 0; } public int getPersistCount() { return persistCount.get(); } public int getCacheEntryCount() { return cache.size(); } public void setAsynch(String key, Object value, int period) { // there really is no asynch for the local cache set(key, value, period); } public void set(String key, Object value, int period) { persistCount.getAndIncrement(); CacheEntry ce = new CacheEntry(); ce.value = value; ce.period = period; ce.addTime = s4Clock.getCurrentTime(); cache.put(key, ce); } public Object get(String key) { CacheEntry ce = cache.get(key); if (ce == null) { return null; } if (ce.isExpired()) { return null; } return ce.value; } public Map<String, Object> getBulk(String[] keys) { HashMap map = new HashMap<String, Object>(); for (String key : keys) { Object value = get(key); if (value != null) { map.put(key, value); } } return map; } public Object getObject(String key) { return get(key); } public Map<String, Object> getBulkObjects(String[] keys) { return getBulk(keys); } public void remove(String key) { cache.remove(key); } public int cleanOutGarbage() { int count = 0; for (Enumeration en = cache.keys(); en.hasMoreElements();) { String key = (String) en.nextElement(); CacheEntry ce = cache.get(key); if (ce != null && ce.isExpired()) { count++; cache.remove(key); } } return count; } public Set<String> keySet() { return cache.keySet(); } public class CacheEntry { Object value; long addTime; int period; public boolean isExpired() { if (period > 0) { if ((addTime + (1000 * (long) period)) <= s4Clock.getCurrentTime()) { return true; } } return false; } } }<|fim▁end|>
}
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/** * @typedef {object} SalesAPI * @property {Activities} Activities **/ function SalesAPI(options) { const _Activities = require('./Activities'); return { Activities: new _Activities(options), }; } <|fim▁hole|><|fim▁end|>
module.exports = SalesAPI;
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use std::path::PathBuf; <|fim▁hole|>error_chain!{ foreign_links { Id3(::id3::Error); Flac(::metaflac::Error); Toml(::toml::de::Error); Io(::std::io::Error) #[cfg(unix)]; Clap(::clap::Error); } errors { PathExists(path: PathBuf) { description("The path to be written to exists already") display("The path at {:?} already exists", path) } BadWorkers { description("Invalid workers value") display("Invalid workers value") } } }<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod display; pub mod device; pub mod state; pub mod termios; mod err; use std::os::unix::io::AsRawFd; use std::io::{self, Write}; use std::mem; use std::fmt; use ::libc; use ::child::exec; use ::pty::prelude as pty; use self::device::Device; use self::termios::Termios; pub use self::state::ShellState; pub use self::err::ShellError; use self::display::Display; pub use self::display::winsz::Winszed;<|fim▁hole|> #[derive(Debug)] pub struct Shell { pid: libc::pid_t, #[allow(dead_code)] config: Termios, speudo: pty::Master, device: Device, state: ShellState, screen: Display, } impl Shell { /// The constructor method `new` returns a shell interface according to /// the command's option and a configured mode Line by Line. pub fn new ( repeat: Option<i64>, interval: Option<i64>, command: Option<&str>, windows: Option<Winszed>, ) -> Result<Self, ShellError> { unsafe { let winsz: Winszed = windows.and_then(|winsz| { let _ = Winszed::from_winsized(libc::STDIN_FILENO, &winsz); Some(winsz) }) .or_else(|| Winszed::new(libc::STDIN_FILENO).ok()) .unwrap_or_default(); match pty::Fork::from_ptmx() { Err(cause) => Err(ShellError::ForkFail(cause)), Ok(fork) => match fork { pty::Fork::Child(_) => { libc::ioctl(libc::STDIN_FILENO, libc::TIOCSWINSZ, &winsz); exec(command.unwrap_or("/bin/bash")) }, pty::Fork::Parent(pid, master) => { mem::forget(fork); Ok(Shell { pid: pid, config: Termios::default(), speudo: master, device: Device::from_speudo(master, libc::getpid()), state: ShellState::new(repeat, interval), screen: Display::from_winszed(winsz), }) }, } } } } } impl Parent for Shell { /// The accessor method `get_pid` returns the pid from the master. fn get_pid(&self) -> libc::pid_t { self.pid } /// The accessor method `get_speudo` returns the master interface. fn get_speudo(&self) -> &pty::Master { &self.speudo } /// The accessor method `get_screen` returns a reference on the Display interface. fn get_screen(&self) -> &Display { &self.screen } /// The accessor method `get_window_size` returns the window size. fn get_window_size(&self) -> &Winszed { self.screen.get_window_size() } /// The mutator method `set_window_size` redimentionnes the window /// with a default size. fn set_window_size(&mut self) { if let Ok(size) = Winszed::new(libc::STDOUT_FILENO) { self.set_window_size_with(&size); } } /// The mutator method `set_window_size` redimentionnes the window /// with a argument size. fn set_window_size_with(&mut self, size: &Winszed) { self.screen.set_window_size(size); unsafe { libc::ioctl(self.speudo.as_raw_fd(), libc::TIOCSWINSZ, size); libc::kill(self.pid, libc::SIGWINCH); } } /// The mutator method `write` set a buffer to the display /// without needing to print it fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.screen.write(buf) } /// The mutator method `next` updates the event and returns /// the new state. fn next(&mut self, event: state::DeviceState) -> ShellState { match () { #[cfg(feature = "auto-resize")] () => { self.state.update_from(&mut self.screen, event); if let Some(size) = self.state.is_resized() { self.set_window_size_with(&size); } self.state }, #[cfg(not(feature = "auto-resize"))] () => { self.state.update_from(&mut self.screen, event); self.state }, } } } impl Iterator for Shell { type Item = ShellState; fn next(&mut self) -> Option<ShellState> { match self.device.next() { None => None, #[cfg(feature = "auto-resize")] Some(event) => { self.state.update_from(&mut self.screen, event); if let Some(size) = self.state.is_resized() { self.set_window_size_with(&size); } Some(self.state) }, #[cfg(not(feature = "auto-resize"))] Some(event) => { self.state.update_from(&mut self.screen, event); Some(self.state) }, } } } impl Write for Shell { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { self.speudo.write(buf) } fn flush(&mut self) -> io::Result<()> { self.speudo.flush() } } impl fmt::Display for Shell { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "{}", self.screen) } } impl Drop for Shell { fn drop(&mut self) { unsafe { assert_ne!(libc::close(self.speudo.as_raw_fd()), -1); libc::kill(self.pid, libc::SIGKILL); } } } impl Default for Shell { fn default() -> Shell { unsafe { let master: pty::Master = mem::zeroed(); Shell { pid: 0, config: mem::zeroed(), speudo: master, device: Device::from_speudo(master, 0), state: ShellState::default(), screen: Display::default(), } } } }<|fim▁end|>
pub use super::parent::Parent; /// The struct `Shell` is the speudo terminal interface.
<|file_name|>models.py<|end_file_name|><|fim▁begin|># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from datetime import datetime from django.conf import settings from django.db import models from django_extensions.db.fields import CreationDateTimeField class TimeStampedModel(models.Model): """ Replacement for django_extensions.db.models.TimeStampedModel that updates the modified timestamp by default, but allows that behavior to be overridden by passing a modified=False parameter to the save method """ created = CreationDateTimeField() modified = models.DateTimeField(editable=False, blank=True, db_index=True) class Meta: abstract = True def save(self, *args, **kwargs): if kwargs.pop('modified', True): self.modified = datetime.now() super(TimeStampedModel, self).save(*args, **kwargs) class Release(TimeStampedModel): CHANNELS = ('Nightly', 'Aurora', 'Beta', 'Release', 'ESR') PRODUCTS = ('Firefox', 'Firefox for Android', 'Firefox Extended Support Release', 'Firefox OS', 'Thunderbird') product = models.CharField(max_length=255, choices=[(p, p) for p in PRODUCTS]) channel = models.CharField(max_length=255, choices=[(c, c) for c in CHANNELS]) version = models.CharField(max_length=255) release_date = models.DateTimeField() text = models.TextField(blank=True) is_public = models.BooleanField(default=False) bug_list = models.TextField(blank=True) bug_search_url = models.CharField(max_length=2000, blank=True) system_requirements = models.TextField(blank=True) def major_version(self): return self.version.split('.', 1)[0] def get_bug_search_url(self): if self.bug_search_url: return self.bug_search_url if self.product == 'Thunderbird': return ( 'https://bugzilla.mozilla.org/buglist.cgi?' 'classification=Client%20Software&query_format=advanced&' 'bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&' 'target_milestone=Thunderbird%20{version}.0&product=Thunderbird' '&resolution=FIXED' ).format(version=self.major_version()) return ( 'https://bugzilla.mozilla.org/buglist.cgi?' 'j_top=OR&f1=target_milestone&o3=equals&v3=Firefox%20{version}&' 'o1=equals&resolution=FIXED&o2=anyexact&query_format=advanced&' 'f3=target_milestone&f2=cf_status_firefox{version}&' 'bug_status=RESOLVED&bug_status=VERIFIED&bug_status=CLOSED&' 'v1=mozilla{version}&v2=fixed%2Cverified&limit=0' ).format(version=self.major_version()) def equivalent_release_for_product(self, product): """ Returns the release for a specified product with the same channel and major version with the highest minor version, or None if no such releases exist """ releases = self._default_manager.filter( version__startswith=self.major_version() + '.', channel=self.channel, product=product).order_by('-version') if not getattr(settings, 'DEV', False): releases = releases.filter(is_public=True) if releases: return sorted( sorted(releases, reverse=True, key=lambda r: len(r.version.split('.'))), reverse=True, key=lambda r: r.version.split('.')[1])[0] def equivalent_android_release(self): if self.product == 'Firefox': return self.equivalent_release_for_product('Firefox for Android') def equivalent_desktop_release(self): if self.product == 'Firefox for Android': return self.equivalent_release_for_product('Firefox') def notes(self, public_only=False): """ Retrieve a list of Note instances that should be shown for this release, grouped as either new features or known issues, and sorted first by sort_num highest to lowest, which is applied to both groups, and then for new features we also sort by tag in the order specified by Note.TAGS, with untagged notes coming first, then finally moving any note with the fixed tag that starts with the release version to the top, for what we call "dot fixes". """ tag_index = dict((tag, i) for i, tag in enumerate(Note.TAGS))<|fim▁hole|> known_issues = [n for n in notes if n.is_known_issue_for(self)] new_features = sorted( sorted( (n for n in notes if not n.is_known_issue_for(self)), key=lambda note: tag_index.get(note.tag, 0)), key=lambda n: n.tag == 'Fixed' and n.note.startswith(self.version), reverse=True) return new_features, known_issues def __unicode__(self): return '{product} {version} {channel}'.format( product=self.product, version=self.version, channel=self.channel) class Meta: # TODO: see if this has a significant performance impact ordering = ('product', '-version', 'channel') unique_together = (('product', 'version'),) class Note(TimeStampedModel): TAGS = ('New', 'Changed', 'HTML5', 'Feature', 'Language', 'Developer', 'Fixed') bug = models.IntegerField(null=True, blank=True) note = models.TextField(blank=True) releases = models.ManyToManyField(Release, blank=True) is_known_issue = models.BooleanField(default=False) fixed_in_release = models.ForeignKey(Release, null=True, blank=True, related_name='fixed_note_set') tag = models.CharField(max_length=255, blank=True, choices=[(t, t) for t in TAGS]) sort_num = models.IntegerField(default=0) is_public = models.BooleanField(default=True) image = models.ImageField(upload_to=lambda instance, filename: '/'.join(['screenshot', str(instance.pk), filename])) def is_known_issue_for(self, release): return self.is_known_issue and self.fixed_in_release != release def __unicode__(self): return self.note<|fim▁end|>
notes = self.note_set.order_by('-sort_num') if public_only: notes = notes.filter(is_public=True)
<|file_name|>testSQL.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Perform the following tests: # 1. Generate a POT file from a set of marked SQL statements # 2. Generate an SQL file from a translated PO file import filecmp import os import subprocess import testhelper import unittest class TestSQLFramework(unittest.TestCase): basedir = os.path.dirname(__file__) script = os.path.join(basedir, '../scripts/db-seed-i18n.py') tmpdirs = [(os.path.join(basedir, 'tmp/'))] sqlsource = os.path.join(basedir, 'data/sqlsource.sql') canonpot = os.path.join(basedir, 'data/sql2pot.pot') canonpo = os.path.join(basedir, 'data/sqlsource.po') testpot = os.path.join(basedir, 'tmp/sql2pot.pot') canonsql = os.path.join(basedir, 'data/po2sql.sql') testsql = os.path.join(basedir, 'tmp/testi18n.sql') def setUp(self): testhelper.setUp(self) def tearDown(self): testhelper.tearDown(self) def testgenpot(self): """ Create a POT file from our test SQL statements. """ subprocess.Popen( ('python', self.script, '--pot', self.sqlsource, '--output', self.testpot), 0, None, None).wait() # avoid basic timestamp conflicts<|fim▁hole|> def testgensql(self): """ Create a SQL file from a translated PO file. """ devnull = open('/dev/null', 'w') subprocess.Popen( ('python', self.script, '--sql', self.canonpo, '--locale', 'zz-ZZ', '--output', self.testsql), 0, None, None, devnull, devnull).wait() self.assertEqual(filecmp.cmp(self.canonsql, self.testsql), 1) if __name__ == '__main__': unittest.main()<|fim▁end|>
testhelper.mungepothead(self.testpot) testhelper.mungepothead(self.canonpot) self.assertEqual(filecmp.cmp(self.canonpot, self.testpot), 1)
<|file_name|>vi.js<|end_file_name|><|fim▁begin|>/* <|fim▁hole|> block: 'Canh đều', center: 'Toggle Toolbars', left: 'Canh trái', right: 'Canh phải' });<|fim▁end|>
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or http://ckeditor.com/license */ CKEDITOR.plugins.setLang( 'kitchensink', 'vi', {
<|file_name|>sns.rs<|end_file_name|><|fim▁begin|>#![cfg(feature = "sns")] extern crate rusoto; use rusoto::sns::{SnsClient, ListTopicsInput}; use rusoto::{DefaultCredentialsProvider, Region}; use rusoto::default_tls_client;<|fim▁hole|>#[test] fn should_list_topics() { let credentials = DefaultCredentialsProvider::new().unwrap(); let client = SnsClient::new(default_tls_client().unwrap(), credentials, Region::UsEast1); let request = ListTopicsInput::default(); let result = client.list_topics(&request).unwrap(); println!("{:#?}", result); }<|fim▁end|>
<|file_name|>class-implement-traits.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. trait noisy { fn speak(&mut self); } #[deriving(Clone)] struct cat { meows : uint, how_hungry : int, name : String, } impl cat { fn meow(&mut self) { println!("Meow"); self.meows += 1u; if self.meows % 5u == 0u { self.how_hungry += 1; } } } impl cat { pub fn eat(&mut self) -> bool { if self.how_hungry > 0 { println!("OM NOM NOM"); self.how_hungry -= 2; return true; } else { println!("Not hungry!"); return false; } } } impl noisy for cat { fn speak(&mut self) { self.meow(); } } fn cat(in_x : uint, in_y : int, in_name: String) -> cat { cat { meows: in_x, how_hungry: in_y, name: in_name.clone()<|fim▁hole|>} fn make_speak<C:noisy>(mut c: C) { c.speak(); } pub fn main() { let mut nyan = cat(0u, 2, "nyan".to_string()); nyan.eat(); assert!((!nyan.eat())); for _ in range(1u, 10u) { make_speak(nyan.clone()); } }<|fim▁end|>
}
<|file_name|>Config.js<|end_file_name|><|fim▁begin|>/** * @author mrdoob / http://mrdoob.com/ */ var Config = function () { var namespace = 'threejs-inspector'; var storage = { 'selectionBoxEnabled': false, 'rafEnabled' : false, 'rafFps' : 30, } if ( window.localStorage[ namespace ] === undefined ) { window.localStorage[ namespace ] = JSON.stringify( storage ); } else { var data = JSON.parse( window.localStorage[ namespace ] ); for ( var key in data ) { storage[ key ] = data[ key ]; } } return { getKey: function ( key ) { return storage[ key ]; }, setKey: function () { // key, value, key, value ... for ( var i = 0, l = arguments.length; i < l; i += 2 ) { storage[ arguments[ i ] ] = arguments[ i + 1 ]; } window.localStorage[ namespace ] = JSON.stringify( storage ); console.log( '[' + /\d\d\:\d\d\:\d\d/.exec( new Date() )[ 0 ] + ']', 'Saved config to LocalStorage.' ); }, <|fim▁hole|> } } };<|fim▁end|>
clear: function () { delete window.localStorage[ namespace ];
<|file_name|>export_fragments.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django.core.management.base import BaseCommand, CommandError from annotations.models import Corpus from annotations.exports import export_fragments from core.utils import CSV, XLSX class Command(BaseCommand): help = 'Exports existing Fragments for the given Corpus and Languages' def add_arguments(self, parser): parser.add_argument('corpus', type=str) parser.add_argument('languages', nargs='+', type=str) parser.add_argument('--add_lemmata', action='store_true', dest='add_lemmata', default=False) parser.add_argument('--add_indices', action='store_true', dest='add_indices', default=False) parser.add_argument('--xlsx', action='store_true', dest='format_xlsx', default=False) parser.add_argument('--doc', dest='document') parser.add_argument('--formal_structure') def handle(self, *args, **options): # Retrieve the Corpus from the database try: corpus = Corpus.objects.get(title=options['corpus']) except Corpus.DoesNotExist: raise CommandError('Corpus with title {} does not exist'.format(options['corpus'])) format_ = XLSX if options['format_xlsx'] else CSV for language in options['languages']: if not corpus.languages.filter(iso=language):<|fim▁hole|> raise CommandError('Language {} does not exist'.format(language)) filename = 'fragments_{lang}.{ext}'.format(lang=language, ext=format_) export_fragments(filename, format_, corpus, language, document=options['document'], add_lemmata=options['add_lemmata'], add_indices=options['add_indices'], formal_structure=options['formal_structure'])<|fim▁end|>
<|file_name|>deriving-in-fn.rs<|end_file_name|><|fim▁begin|>// run-pass #![allow(dead_code)] pub fn main() { #[derive(Debug)] struct Foo {<|fim▁hole|> let f = Foo { foo: 10 }; format!("{:?}", f); }<|fim▁end|>
foo: isize, }
<|file_name|>memory.rs<|end_file_name|><|fim▁begin|>/* Precached - A Linux process monitor and pre-caching daemon Copyright (C) 2017-2020 the precached developers This file is part of precached. Precached is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Precached is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Precached. If not, see <http://www.gnu.org/licenses/>. */ use std; use std::ffi::CString; use std::fs::File; use std::io::{Error, ErrorKind, Result}; use std::os::unix::io::IntoRawFd; use std::path::{Path, PathBuf}; use std::ptr; use serde_derive::{Serialize, Deserialize}; use log::{trace, debug, info, warn, error, log, LevelFilter}; use crate::constants; /// Represents a file backed memory mapping #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub struct MemoryMapping { pub filename: PathBuf, pub addr: usize, pub len: usize, } impl MemoryMapping { pub fn new(filename: &Path, addr: usize, len: usize) -> MemoryMapping { MemoryMapping { filename: PathBuf::from(filename), addr, len, } } } #[cfg(target_pointer_width = "64")] type StatSize = i64; #[cfg(target_pointer_width = "32")] type StatSize = i32; /// Cache the file `filename` into the systems page cache /// This currently performs the following actions: /// * Open file `filename` and query it's size. /// Return an Err if file exceeds the max. prefetch size /// * Give the system's kernel a readahead hint via readahead(2) syscall /// * Additionally mmap(2) the file /// * Call posix_fadvise(2) with `POSIX_FADV_WILLNEED` | `POSIX_FADV_SEQUENTIAL` /// to give the kernel a hint on how we are about to use that file /// * Call madvise(2) with `MADV_WILLNEED` | `MADV_SEQUENTIAL` | `MADV_MERGEABLE` /// to give the kernel a hint on how we are about to use that memory mapping /// * Call mlock(2) if `with_mlock` is set to `true` to prevent /// eviction of the files pages from the page cache /// /// Returns a `MemoryMapping` representing the newly created file backed mapping /// or an Err if the requested actions could not be performed pub fn cache_file(filename: &Path, with_mlock: bool) -> Result<MemoryMapping> { trace!("Caching file: {:?}", filename); let file = File::open(filename)?; let fd = file.into_raw_fd(); // We are interested in file size let mut stat: libc::stat = unsafe { std::mem::zeroed() }; unsafe { libc::fstat(fd, &mut stat); }; if stat.st_mode & libc::S_ISUID == libc::S_ISUID || stat.st_mode & libc::S_ISGID == libc::S_ISGID { // Try to close the file descriptor unsafe { libc::close(fd) }; let custom_error = Error::new(ErrorKind::Other, "Not prefetching SUID/SGID files!"); Err(custom_error) } else if stat.st_size > constants::MAX_ALLOWED_PREFETCH_SIZE as StatSize { // Try to close the file descriptor unsafe { libc::close(fd) }; let custom_error = Error::new(ErrorKind::Other, "Maximum allowed file size for prefetching exceeded!"); Err(custom_error) } else { // Manually fault in all pages let result = unsafe { libc::readahead(fd, 0, stat.st_size as usize) }; if result < 0 { // Try to close the file descriptor unsafe { libc::close(fd) }; Err(std::io::Error::last_os_error()) } else { trace!("Successfully called readahead() for: {:?}", filename); // Call to readahead succeeded, now mmap() and mlock() if requested let addr = unsafe { libc::mmap( ptr::null_mut(), stat.st_size as usize, libc::PROT_READ, libc::MAP_SHARED, fd, 0, ) }; if addr < ptr::null_mut() { // Try to close the file descriptor unsafe { libc::close(fd) }; Err(std::io::Error::last_os_error()) } else { trace!("Successfully called mmap() for: {:?}", filename); // If we are on a 64 bit architecture #[cfg(target_pointer_width = "64")] let result = unsafe { libc::posix_fadvise( fd, 0, stat.st_size as i64, libc::POSIX_FADV_WILLNEED | libc::POSIX_FADV_SEQUENTIAL, ) }; <|fim▁hole|> libc::posix_fadvise( fd, 0, stat.st_size as i32, libc::POSIX_FADV_WILLNEED | libc::POSIX_FADV_SEQUENTIAL, ) }; if result < 0 { // Try to close the file descriptor unsafe { libc::close(fd) }; Err(std::io::Error::last_os_error()) } else { trace!("Successfully called posix_fadvise() for: {:?}", filename); let result = unsafe { libc::madvise( addr as *mut libc::c_void, stat.st_size as usize, libc::MADV_WILLNEED | libc::MADV_SEQUENTIAL | libc::MADV_MERGEABLE, ) }; if result < 0 as libc::c_int { // Try to close the file descriptor unsafe { libc::close(fd) }; Err(std::io::Error::last_os_error()) } else { trace!("Successfully called madvise() for: {:?}", filename); if with_mlock { let result = unsafe { libc::mlock(addr as *mut libc::c_void, stat.st_size as usize) }; if result < 0 as libc::c_int { // Try to close the file descriptor unsafe { libc::close(fd) }; Err(std::io::Error::last_os_error()) } else { trace!("Successfully called mlock() for: {:?}", filename); let result = unsafe { libc::close(fd) }; if result < 0 as libc::c_int { Err(std::io::Error::last_os_error()) } else { trace!("Successfully called close() for: {:?}", filename); let mapping = MemoryMapping::new(filename, addr as usize, stat.st_size as usize); Ok(mapping) } } } else { // We don't perform a call to mlock() // Try to close the file descriptor unsafe { libc::close(fd) }; let mapping = MemoryMapping::new(filename, addr as usize, stat.st_size as usize); Ok(mapping) } } } } } } } /// Unmaps a memory mapping that was previously created by `cache_file(...)` pub fn free_mapping(mapping: &MemoryMapping) -> bool { let result = unsafe { libc::munmap(mapping.addr as *mut libc::c_void, mapping.len) }; result == 0 } /// Prime the kernel's dentry caches by reading the metadata of the file `filename` pub fn prime_metadata_cache(filename: &Path) -> Result<()> { trace!("Caching metadata of file: {:?}", filename); let mut stat: libc::stat = unsafe { std::mem::zeroed() }; let f = unsafe { CString::from_vec_unchecked(filename.to_string_lossy().into_owned().into()) }; let result = unsafe { libc::stat(f.as_ptr(), &mut stat) }; if result < 0 as libc::c_int { Err(std::io::Error::last_os_error()) } else { trace!("Successfully called stat() for: {:?}", filename); Ok(()) } }<|fim▁end|>
// If we are on a 32 bit architecture #[cfg(target_pointer_width = "32")] let result = unsafe {
<|file_name|>media.dev.js<|end_file_name|><|fim▁begin|>var findPosts; (function($){ findPosts = { open : function(af_name, af_val) { var st = document.documentElement.scrollTop || $(document).scrollTop(); if ( af_name && af_val ) { $('#affected').attr('name', af_name).val(af_val); } $('#find-posts').show().draggable({ handle: '#find-posts-head' }).css({'top':st + 50 + 'px','left':'50%','marginLeft':'-250px'}); $('#find-posts-input').focus().keyup(function(e){ if (e.which == 27) { findPosts.close(); } // close on Escape }); return false; }, close : function() { $('#find-posts-response').html(''); $('#find-posts').draggable('destroy').hide(); }, send : function() { var post = { ps: $('#find-posts-input').val(), action: 'find_posts', _ajax_nonce: $('#_ajax_nonce').val(), post_type: $('input[name="find-posts-what"]:checked').val() }; $.ajax({ type : 'POST', url : ajaxurl, data : post, success : function(x) { findPosts.show(x); }, error : function(r) { findPosts.error(r); } }); }, show : function(x) { if ( typeof(x) == 'string' ) { this.error({'responseText': x}); return; <|fim▁hole|> if ( r.errors ) { this.error({'responseText': wpAjax.broken}); } r = r.responses[0]; $('#find-posts-response').html(r.data); }, error : function(r) { var er = r.statusText; if ( r.responseText ) { er = r.responseText.replace( /<.[^<>]*?>/g, '' ); } if ( er ) { $('#find-posts-response').html(er); } } }; $(document).ready(function() { $('#find-posts-submit').click(function(e) { if ( '' == $('#find-posts-response').html() ) e.preventDefault(); }); $( '#find-posts .find-box-search :input' ).keypress( function( event ) { if ( 13 == event.which ) { findPosts.send(); return false; } } ); $( '#find-posts-search' ).click( findPosts.send ); $( '#find-posts-close' ).click( findPosts.close ); $('#doaction, #doaction2').click(function(e){ $('select[name^="action"]').each(function(){ if ( $(this).val() == 'attach' ) { e.preventDefault(); findPosts.open(); } }); }); }); })(jQuery);<|fim▁end|>
} var r = wpAjax.parseAjaxResponse(x);
<|file_name|>main.js<|end_file_name|><|fim▁begin|>ace.require("ace/ext/language_tools"); var editor = ace.edit("editor"); editor.setOptions({ enableBasicAutocompletion: true }); editor.setTheme("ace/theme/eclipse"); editor.getSession().setMode("ace/mode/java"); document.getElementById('editor').style.fontSize = '18px'; editor.setAutoScrollEditorIntoView(true); var codeTemplates = {}; var viewConfig = { showOutput: false, showInput: false }; var prevLang; function showOutput(output) { var stdout = ''; var stderr = ''; if (output.status === 0) { stdout = output.output; } else if (output.status === 1) { stderr = '<p class="error">Compiler error !</p>'; stderr += output.error; } else if (output.status === 2) { stderr = '<p class="error">Runtime error !</p>'; stderr += output.error; } else if (output.status === 3) { stderr = '<p class="error">Timeout !</p>'; stderr += output.error; } var out = ''; if (stderr) { out += '<b>Stderror</b>\n' + stderr; } if (stdout) { out += stdout; } if (!viewConfig.showOutput) { $('#btn-show-output').click(); } $('#output-p').html(out); // $('#output-data').show(); // if (!$('#bottom-pane').hasClass('opened')) { // $('#bottom-pane').addClass('opened'); // } windowResized(); } function loadLangs() { $('#btn-run').prop('disabled', true); $.ajax({ type: "GET", url: '/apis/langs' }).done(function (data) { showLangs(data); $('#btn-run').prop('disabled', false); }).fail(function (data) { alert("error"); $('#btn-run').prop('disabled', false); }); } function showLangs(langs) { for (var i = 0; i < langs.supportedLangs.length; i++) { var supportedLang = langs.supportedLangs[i]; $('#lang').append($('<option>', { value: supportedLang.id, text: supportedLang.name })); codeTemplates[supportedLang.id] = supportedLang.template; } $('#lang').val(langs.supportedLangs[0].id); onLangChanged(); } $(document).ready(function () { $('#btn-output').click(function () { $('#output-data').toggle(); $('#bottom-pane').toggleClass('opened'); windowResized(); }); $('#btn-run').click(function () { onRunning(); var codeRunRequest = { lang: $('#lang').find(":selected").val(), code: editor.getValue(), input: $('#text-input').val() }; runCode(codeRunRequest); // $.ajax({ // type: "POST", // url: '/apis/run',<|fim▁hole|>// showOutput(data); // $('#btn-run').prop('disabled', false); // }).fail(function (data) { // alert("error"); // $('#btn-run').prop('disabled', false); // }); }); $('#btn-hide-output').click(function () { $('#btn-hide-output').hide(); $('#btn-show-output').show(); viewConfig.showOutput = false; windowResized(); }); $('#btn-show-output').click(function () { $('#btn-hide-output').show(); $('#btn-show-output').hide(); viewConfig.showOutput = true; windowResized(); }); $('#btn-hide-input').click(function () { $('#btn-hide-input').hide(); $('#btn-show-input').show(); viewConfig.showInput = false; windowResized(); }); $('#btn-show-input').click(function () { $('#btn-hide-input').show(); $('#btn-show-input').hide(); viewConfig.showInput = true; windowResized(); }); loadLangs(); $(window).resize(function () { windowResized(); }); windowResized(); }); $('#lang').change(function () { onLangChanged(); }); function onLangChanged() { var lang = $('#lang').find(":selected").val(); if (prevLang) { codeTemplates[prevLang] = editor.getValue(); } editor.setValue(codeTemplates[lang], -1); prevLang = lang; if (lang === 'java7') { editor.getSession().setMode("ace/mode/java"); } else if (lang === 'python3') { editor.getSession().setMode("ace/mode/python"); } else if (lang === 'c' || lang === 'c++') { editor.getSession().setMode("ace/mode/c_cpp"); } else if (lang === 'c#') { editor.getSession().setMode("ace/mode/csharp"); } } function windowResized() { var aceWrapper = $('#ace_wrapper'); var aceEditor = $('#editor'); var outputWrapper = $('#output-wrapper'); var outputData = $('#output-data'); var inputWrapper = $('#input-wrapper'); var textInput = $('#text-input'); var rightDiff; var inputHeight; var outputHeight; var textInputWidth; var textInputHeight; if (viewConfig.showInput || viewConfig.showOutput) { rightDiff = outputWrapper.width() + 4; } else { rightDiff = 0; } var aceHeight = window.innerHeight - aceWrapper.position().top;// - bottomPane.height(); var aceWidth = window.innerWidth - rightDiff; if (viewConfig.showOutput) { outputWrapper.show(); outputHeight = aceHeight; } else { outputWrapper.hide(); outputHeight = 0; } if (viewConfig.showInput) { inputWrapper.show(); inputHeight = 225; if (viewConfig.showOutput) { outputHeight -= inputHeight; } else { inputHeight = aceHeight; } textInputHeight = inputHeight - 62; textInputWidth = rightDiff - 23; } else { inputWrapper.hide(); inputHeight = 0; } // var bottomPane = $('#bottom-pane'); // var outputWrapperHeight = aceHeight - inputWrapper.height() - 2; var outputTextHeight = outputHeight - 52; aceWrapper.css('height', aceHeight + 'px'); aceWrapper.css('width', aceWidth + 'px'); aceEditor.css('height', aceHeight + 'px'); aceEditor.css('width', aceWidth + 'px'); editor.resize(); if (viewConfig.showOutput) { outputWrapper.css('height', outputHeight + 'px'); outputData.css('max-height', outputTextHeight + 'px'); } if (viewConfig.showInput) { inputWrapper.css('height', inputHeight + 'px'); textInput.css('width', textInputWidth + 'px'); textInput.css('height', textInputHeight + 'px'); // outputData.css('max-height', outputTextHeight + 'px'); } } var stompClient; function connect() { var socket = new SockJS('/coderun'); stompClient = Stomp.over(socket); stompClient.connect({}, function (frame) { // window.alert("connected !"); stompClient.subscribe('/user/queue/reply', function (reply) { showOutput(JSON.parse(reply.body)); onFinished(); }); }); } function runCode(codeRunRequest) { stompClient.send('/iapis/run', {}, JSON.stringify(codeRunRequest)); } function onRunning() { $('#btn-run').prop('disabled', true); $('#icon-running').show(); $('#icon-run').hide(); } function onFinished() { $('#btn-run').prop('disabled', false); $('#icon-running').hide(); $('#icon-run').show(); } connect();<|fim▁end|>
// data: JSON.stringify(codeRunRequest), // contentType: 'application/json' // }).done(function (data) {
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var test = require("tape") var add = require("../index.js") <|fim▁hole|> test("can add numbers", function (assert) { assert.equal(add(3, 9), 12) assert.end() })<|fim▁end|>
test("add is a function", function (assert) { assert.equal(typeof add, "function") assert.end() })
<|file_name|>book.py<|end_file_name|><|fim▁begin|># Copyright 2015 Rackspace US, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Fastfood Chef Cookbook manager.""" from __future__ import print_function import os from fastfood import utils class CookBook(object): """Chef Cookbook object. Understands metadata.rb, Berksfile and how to parse them. """ def __init__(self, path): """Initialize CookBook wrapper at 'path'.""" self.path = utils.normalize_path(path) self._metadata = None if not os.path.isdir(path): raise ValueError("Cookbook dir %s does not exist." % self.path) self._berksfile = None @property def name(self): """Cookbook name property.""" try: return self.metadata.to_dict()['name'] except KeyError: raise LookupError("%s is missing 'name' attribute'." % self.metadata) @property def metadata(self): """Return dict representation of this cookbook's metadata.rb .""" self.metadata_path = os.path.join(self.path, 'metadata.rb') if not os.path.isfile(self.metadata_path): raise ValueError("Cookbook needs metadata.rb, %s" % self.metadata_path) if not self._metadata: self._metadata = MetadataRb(open(self.metadata_path, 'r+')) return self._metadata @property def berksfile(self): """Return this cookbook's Berksfile instance.""" self.berks_path = os.path.join(self.path, 'Berksfile') if not self._berksfile: if not os.path.isfile(self.berks_path): raise ValueError("No Berksfile found at %s" % self.berks_path) self._berksfile = Berksfile(open(self.berks_path, 'r+')) return self._berksfile class MetadataRb(utils.FileWrapper): """Wrapper for a metadata.rb file.""" @classmethod def from_dict(cls, dictionary): """Create a MetadataRb instance from a dict.""" cookbooks = set() # put these in order groups = [cookbooks] for key, val in dictionary.items(): if key == 'depends': cookbooks.update({cls.depends_statement(cbn, meta) for cbn, meta in val.items()}) body = '' for group in groups: if group: body += '\n' body += '\n'.join(group) return cls.from_string(body) @staticmethod def depends_statement(cookbook_name, metadata=None): """Return a valid Ruby 'depends' statement for the metadata.rb file.""" line = "depends '%s'" % cookbook_name if metadata: if not isinstance(metadata, dict): raise TypeError("Stencil dependency options for %s " "should be a dict of options, not %s." % (cookbook_name, metadata)) if metadata: line = "%s '%s'" % (line, "', '".join(metadata)) return line def to_dict(self): """Return a dictionary representation of this metadata.rb file.""" return self.parse() def parse(self): """Parse the metadata.rb into a dict.""" data = utils.ruby_lines(self.readlines()) data = [tuple(j.strip() for j in line.split(None, 1)) for line in data] depends = {} for line in data: if not len(line) == 2: continue key, value = line if key == 'depends': value = value.split(',') lib = utils.ruby_strip(value[0]) detail = [utils.ruby_strip(j) for j in value[1:]] depends[lib] = detail datamap = {key: utils.ruby_strip(val) for key, val in data} if depends: datamap['depends'] = depends self.seek(0) return datamap def merge(self, other): """Add requirements from 'other' metadata.rb into this one.""" if not isinstance(other, MetadataRb): raise TypeError("MetadataRb to merge should be a 'MetadataRb' " "instance, not %s.", type(other)) current = self.to_dict() new = other.to_dict() # compare and gather cookbook dependencies meta_writelines = ['%s\n' % self.depends_statement(cbn, meta) for cbn, meta in new.get('depends', {}).items() if cbn not in current.get('depends', {})] self.write_statements(meta_writelines) return self.to_dict() class Berksfile(utils.FileWrapper): """Wrapper for a Berksfile.""" berks_options = [ 'branch', 'git', 'path', 'ref', 'revision', 'tag', ] def to_dict(self): """Return a dictionary representation of this Berksfile.""" return self.parse() def parse(self): """Parse this Berksfile into a dict.""" self.flush() self.seek(0) data = utils.ruby_lines(self.readlines()) data = [tuple(j.strip() for j in line.split(None, 1)) for line in data] datamap = {} for line in data: if len(line) == 1: datamap[line[0]] = True elif len(line) == 2: key, value = line if key == 'cookbook': datamap.setdefault('cookbook', {}) value = [utils.ruby_strip(v) for v in value.split(',')] lib, detail = value[0], value[1:] datamap['cookbook'].setdefault(lib, {}) # if there is additional dependency data but its # not the ruby hash, its the version constraint if detail and not any("".join(detail).startswith(o) for o in self.berks_options): constraint, detail = detail[0], detail[1:] datamap['cookbook'][lib]['constraint'] = constraint if detail: for deet in detail: opt, val = [ utils.ruby_strip(i) for i in deet.split(':', 1) ] if not any(opt == o for o in self.berks_options):<|fim▁hole|> raise ValueError( "Cookbook detail '%s' does not specify " "one of '%s'" % (opt, self.berks_options)) else: datamap['cookbook'][lib][opt.strip(':')] = ( utils.ruby_strip(val)) elif key == 'source': datamap.setdefault(key, []) datamap[key].append(utils.ruby_strip(value)) elif key: datamap[key] = utils.ruby_strip(value) self.seek(0) return datamap @classmethod def from_dict(cls, dictionary): """Create a Berksfile instance from a dict.""" cookbooks = set() sources = set() other = set() # put these in order groups = [sources, cookbooks, other] for key, val in dictionary.items(): if key == 'cookbook': cookbooks.update({cls.cookbook_statement(cbn, meta) for cbn, meta in val.items()}) elif key == 'source': sources.update({"source '%s'" % src for src in val}) elif key == 'metadata': other.add('metadata') body = '' for group in groups: if group: body += '\n' body += '\n'.join(group) return cls.from_string(body) @staticmethod def cookbook_statement(cookbook_name, metadata=None): """Return a valid Ruby 'cookbook' statement for the Berksfile.""" line = "cookbook '%s'" % cookbook_name if metadata: if not isinstance(metadata, dict): raise TypeError("Berksfile dependency hash for %s " "should be a dict of options, not %s." % (cookbook_name, metadata)) # not like the others... if 'constraint' in metadata: line += ", '%s'" % metadata.pop('constraint') for opt, spec in metadata.items(): line += ", %s: '%s'" % (opt, spec) return line def merge(self, other): """Add requirements from 'other' Berksfile into this one.""" if not isinstance(other, Berksfile): raise TypeError("Berksfile to merge should be a 'Berksfile' " "instance, not %s.", type(other)) current = self.to_dict() new = other.to_dict() # compare and gather cookbook dependencies berks_writelines = ['%s\n' % self.cookbook_statement(cbn, meta) for cbn, meta in new.get('cookbook', {}).items() if cbn not in current.get('cookbook', {})] # compare and gather 'source' requirements berks_writelines.extend(["source '%s'\n" % src for src in new.get('source', []) if src not in current.get('source', [])]) self.write_statements(berks_writelines) return self.to_dict()<|fim▁end|>
<|file_name|>search.js<|end_file_name|><|fim▁begin|>/* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Ajax.org Code Editor (ACE). * * The Initial Developer of the Original Code is * Ajax.org B.V. * Portions created by the Initial Developer are Copyright (C) 2010 * the Initial Developer. All Rights Reserved. * * Contributor(s): * Fabian Jakobs <fabian AT ajax DOT org> * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ define(function(require, exports, module) { var lang = require("pilot/lang"); var oop = require("pilot/oop"); var Range = require("ace/range").Range; var Search = function() { this.$options = { needle: "", backwards: false, wrap: false, caseSensitive: false, wholeWord: false, scope: Search.ALL, regExp: false }; }; Search.ALL = 1; Search.SELECTION = 2; (function() { this.set = function(options) { oop.mixin(this.$options, options); return this; }; this.getOptions = function() { return lang.copyObject(this.$options); }; this.find = function(session) { if (!this.$options.needle) return null; if (this.$options.backwards) { var iterator = this.$backwardMatchIterator(session); } else { iterator = this.$forwardMatchIterator(session); } var firstRange = null; iterator.forEach(function(range) { firstRange = range; return true; }); return firstRange; }; this.findAll = function(session) { if (!this.$options.needle) return []; if (this.$options.backwards) { var iterator = this.$backwardMatchIterator(session); } else { iterator = this.$forwardMatchIterator(session); } var ranges = []; iterator.forEach(function(range) { ranges.push(range); }); return ranges; }; this.replace = function(input, replacement) { var re = this.$assembleRegExp(); var match = re.exec(input); if (match && match[0].length == input.length) { if (this.$options.regExp) { return input.replace(re, replacement); } else { return replacement; } } else { return null; } }; this.$forwardMatchIterator = function(session) { var re = this.$assembleRegExp(); var self = this; return { forEach: function(callback) { self.$forwardLineIterator(session).forEach(function(line, startIndex, row) { if (startIndex) { line = line.substring(startIndex); } var matches = []; line.replace(re, function(str) { var offset = arguments[arguments.length-2]; matches.push({ str: str, offset: startIndex + offset }); return str; });<|fim▁hole|> var match = matches[i]; var range = self.$rangeFromMatch(row, match.offset, match.str.length); if (callback(range)) return true; } }); } }; }; this.$backwardMatchIterator = function(session) { var re = this.$assembleRegExp(); var self = this; return { forEach: function(callback) { self.$backwardLineIterator(session).forEach(function(line, startIndex, row) { if (startIndex) { line = line.substring(startIndex); } var matches = []; line.replace(re, function(str, offset) { matches.push({ str: str, offset: startIndex + offset }); return str; }); for (var i=matches.length-1; i>= 0; i--) { var match = matches[i]; var range = self.$rangeFromMatch(row, match.offset, match.str.length); if (callback(range)) return true; } }); } }; }; this.$rangeFromMatch = function(row, column, length) { return new Range(row, column, row, column+length); }; this.$assembleRegExp = function() { if (this.$options.regExp) { var needle = this.$options.needle; } else { needle = lang.escapeRegExp(this.$options.needle); } if (this.$options.wholeWord) { needle = "\\b" + needle + "\\b"; } var modifier = "g"; if (!this.$options.caseSensitive) { modifier += "i"; } var re = new RegExp(needle, modifier); return re; }; this.$forwardLineIterator = function(session) { var searchSelection = this.$options.scope == Search.SELECTION; var range = session.getSelection().getRange(); var start = session.getSelection().getCursor(); var firstRow = searchSelection ? range.start.row : 0; var firstColumn = searchSelection ? range.start.column : 0; var lastRow = searchSelection ? range.end.row : session.getLength() - 1; var wrap = this.$options.wrap; function getLine(row) { var line = session.getLine(row); if (searchSelection && row == range.end.row) { line = line.substring(0, range.end.column); } return line; } return { forEach: function(callback) { var row = start.row; var line = getLine(row); var startIndex = start.column; var stop = false; while (!callback(line, startIndex, row)) { if (stop) { return; } row++; startIndex = 0; if (row > lastRow) { if (wrap) { row = firstRow; startIndex = firstColumn; } else { return; } } if (row == start.row) stop = true; line = getLine(row); } } }; }; this.$backwardLineIterator = function(session) { var searchSelection = this.$options.scope == Search.SELECTION; var range = session.getSelection().getRange(); var start = searchSelection ? range.end : range.start; var firstRow = searchSelection ? range.start.row : 0; var firstColumn = searchSelection ? range.start.column : 0; var lastRow = searchSelection ? range.end.row : session.getLength() - 1; var wrap = this.$options.wrap; return { forEach : function(callback) { var row = start.row; var line = session.getLine(row).substring(0, start.column); var startIndex = 0; var stop = false; while (!callback(line, startIndex, row)) { if (stop) return; row--; startIndex = 0; if (row < firstRow) { if (wrap) { row = lastRow; } else { return; } } if (row == start.row) stop = true; line = session.getLine(row); if (searchSelection) { if (row == firstRow) startIndex = firstColumn; else if (row == lastRow) line = line.substring(0, range.end.column); } } } }; }; }).call(Search.prototype); exports.Search = Search; });<|fim▁end|>
for (var i=0; i<matches.length; i++) {
<|file_name|>configure.js<|end_file_name|><|fim▁begin|>import 'babel-polyfill'; import EdgeGrid from 'edgegrid'; import dotenv from 'dotenv'; import inquirer from 'inquirer'; import formatJson from 'format-json'; import fs from 'fs'; (async function() { // load .env vars dotenv.config(); let papiResponses = new Map(); const edgegrid = new EdgeGrid({ path: process.env.AKA_EDGERC, section: 'default' }); let contractId = await papiChoice( 'Select Akamai contract:', '/papi/v1/contracts', 'contracts', 'contractId', 'contractTypeName' ); let groupId = await papiChoice( 'Select Akamai property group:', '/papi/v1/groups/?contractId=' + contractId, 'groups', 'groupId', 'groupName' ); let propertyId = await papiChoice( 'Select Akamai property:', '/papi/v1/properties/?contractId=' + contractId + '&groupId=' + groupId, 'properties', 'propertyId', 'propertyName' ); let latestVersion = papiResponses.get('properties').properties.items.filter((property) => { return property.propertyId === propertyId; })[0].latestVersion; // request property version let version = await inquirer.prompt([ { type: 'input', name: 'version', message: 'The latest property verions is ' + latestVersion + ', which would you like?', default: latestVersion, validate: (version) => { if (parseInt(version) > 0 && parseInt(version) <= latestVersion) { return true; } else { return 'Please enter a valid version number.'; } } } ]).then(function (answers) { console.log('selected version = ' + answers.version); return answers.version; }); let propertyJson = await callPapi('property', '/papi/v1/properties/' + propertyId + '/versions/' + version + '/rules?contractId=' + contractId + '&groupId=' + groupId).then((data) => { return data; }); let propertyName = papiResponses.get('properties').properties.items.filter((property) => { return property.propertyId === propertyId; })[0].propertyName; await inquirer.prompt([ { type: 'confirm',<|fim▁hole|> default: true, } ]).then(function (answers) { console.log('selected outputToFile = ' + answers.outputToFile); if (answers.outputToFile) { let outputDir = __dirname + '/../papiJson'; if (!fs.existsSync(outputDir)) { fs.mkdirSync(outputDir); } fs.writeFileSync( outputDir + '/' + propertyName + '-v' + version + '.papi.json', formatJson.plain(propertyJson), 'utf8' ); console.log('\npapi json written to: ./papiJson/' + propertyName + '-v' + version + '.papi.json'); } }); console.log( '\n# ---------------------------------------------------------\n' + '# place the following in .env or set as shell/node env vars\n' + '# if you would like to use these parameters to configure nginx directly\n' + '# from api calls - otherwise point at the generated papi json.\n' + '# refer to start.js and start-local.js\n' + 'AKA_CONTRACT_ID=' + contractId + '\n' + 'AKA_GROUP_ID=' + groupId + '\n' + 'AKA_PROPERTY_ID=' + propertyId + '\n' + 'AKA_PROPERTY_VERSION=' + version + '\n' ); async function papiChoice(message, papiUrl, containerField, valueField, nameField) { let choices = await callPapi(containerField, papiUrl).then((data) => { return data[containerField].items.map((item) => { let choice = {}; choice.name = item[valueField] + ' ' + item[nameField]; choice.value = item[valueField]; return choice; }); }); return await inquirer.prompt([ { type: 'list', name: valueField, message: message, paginated: true, choices: choices } ]).then(function (answers) { console.log('selected ' + valueField + ' = ' + answers[valueField]); return answers[valueField]; }); } async function callPapi(type, papiUrl) { return new Promise( (resolve, reject) => { console.log('calling papi url: ' + papiUrl + '\n'); edgegrid.auth({ path: papiUrl, method: 'GET' }).send((error, response, body) => { if (error) { return reject(error); } let jsonResult = JSON.parse(body); papiResponses.set(type, jsonResult); return resolve(jsonResult); }); }); } })();<|fim▁end|>
name: 'outputToFile', message: 'Output property ' + propertyName + ' v' + version + ' json to file now?',
<|file_name|>most.ts<|end_file_name|><|fim▁begin|>import { Stream, switchLatest, map } from 'most';<|fim▁hole|>export function switchMap<A, B>(f: (a: A) => Stream<B>, stream: Stream<A>): Stream<B> { return switchLatest(map(f, stream)); }<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2 from distutils.core import setup setup(name='python-hivemindrpc', version='0.1', description='Enhanced version of python-jsonrpc for use with Hivemind', long_description=open('README').read(), author='Jeff Garzik', author_email='<[email protected]>', maintainer='Jeff Garzik',<|fim▁hole|> url='http://www.github.com/jgarzik/python-hivemindrpc', packages=['hivemindrpc'], classifiers=['License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', 'Operating System :: OS Independent'])<|fim▁end|>
maintainer_email='<[email protected]>',
<|file_name|>dns_domain.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014, Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from nova import db from nova import objects from nova.objects import base from nova.objects import fields # TODO(berrange): Remove NovaObjectDictCompat class DNSDomain(base.NovaPersistentObject, base.NovaObject, base.NovaObjectDictCompat): # Version 1.0: Initial version VERSION = '1.0' fields = { 'domain': fields.StringField(), 'scope': fields.StringField(nullable=True), 'availability_zone': fields.StringField(nullable=True), 'project_id': fields.StringField(nullable=True), } @staticmethod def _from_db_object(context, vif, db_vif): for field in vif.fields: vif[field] = db_vif[field] vif._context = context vif.obj_reset_changes() return vif @base.remotable_classmethod def get_by_domain(cls, context, domain): db_dnsd = db.dnsdomain_get(context, domain) if db_dnsd: return cls._from_db_object(context, cls(), db_dnsd) @base.remotable_classmethod def register_for_zone(cls, context, domain, zone): db.dnsdomain_register_for_zone(context, domain, zone) @base.remotable_classmethod def register_for_project(cls, context, domain, project): db.dnsdomain_register_for_project(context, domain, project) @base.remotable_classmethod def delete_by_domain(cls, context, domain): db.dnsdomain_unregister(context, domain) class DNSDomainList(base.ObjectListBase, base.NovaObject): # Version 1.0: Initial version VERSION = '1.0' fields = { 'objects': fields.ListOfObjectsField('DNSDomain'), } child_versions = {<|fim▁hole|> @base.remotable_classmethod def get_all(cls, context): db_domains = db.dnsdomain_get_all(context) return base.obj_make_list(context, cls(context), objects.DNSDomain, db_domains)<|fim▁end|>
'1.0': '1.0', }
<|file_name|>terminal_info.py<|end_file_name|><|fim▁begin|>import wmi import requests import pythoncom def get_iip(): """""" f = requests.get("http://myip.dnsomatic.com") iip = f.text return iip def get_lip(): """""" c = wmi.WMI() lip = "" for interface in c.Win32_NetworkAdapterConfiguration(IPEnabled=1): lip = interface.IPAddress[0] return lip def get_mac(): """""" c = wmi.WMI() mac = "" for interface in c.Win32_NetworkAdapterConfiguration(IPEnabled=1): mac = interface.MACAddress return mac def get_hd(): """""" c = wmi.WMI() hd = "disk01" # for disk in c.Win32_DiskDrive(): # hd = disk.SerialNumber.strip() return hd def get_terminal_info(): """""" # Initialize COM object in this thread. pythoncom.CoInitialize() iip = "" iport = "" lip = get_lip() mac = get_mac()<|fim▁hole|> hd = get_hd() terminal_info = ";".join([ "PC", f"IIP={iip}", f"IPORT={iport}", f"LIP={lip}", f"MAC={mac}", f"HD={hd}", "PCN=NA;CPU=NA;PI=NA;VOL=NA@NA" ]) return terminal_info<|fim▁end|>
<|file_name|>qbittorrent_client.py<|end_file_name|><|fim▁begin|># Author: Mr_Orange <[email protected]> # URL: http://code.google.com/p/sickbeard/ # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. import sickbeard from .generic import GenericClient from requests.auth import HTTPDigestAuth class qbittorrentAPI(GenericClient): def __init__(self, host=None, username=None, password=None): super(qbittorrentAPI, self).__init__('qbittorrent', host, username, password) self.url = self.host self.session.auth = HTTPDigestAuth(self.username, self.password); def _get_auth(self): try: self.response = self.session.get(self.host, verify=False) self.auth = self.response.content except: return None return self.auth if not self.response.status_code == 404 else None def _add_torrent_uri(self, result): self.url = self.host+'command/download' data = {'urls': result.url}<|fim▁hole|> def _add_torrent_file(self, result): self.url = self.host+'command/upload' files = {'torrents': (result.name + '.torrent', result.content)} return self._request(method='post', files=files) def _set_torrent_priority(self, result): self.url = self.host+'command/decreasePrio ' if result.priority == 1: self.url = self.host+'command/increasePrio' data = {'hashes': result.hash} return self._request(method='post', data=data) def _set_torrent_pause(self, result): self.url = self.host+'command/resume' if sickbeard.TORRENT_PAUSED: self.url = self.host+'command/pause' data = {'hash': result.hash} return self._request(method='post', data=data) api = qbittorrentAPI()<|fim▁end|>
return self._request(method='post', data=data)
<|file_name|>cpg_gene.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ''' Purpose: This script, using default values, determines and plots the CpG islands in relation to a given feature "type" (e.g. "gene" or "mRNA") from a GFF file which corresponds to the user-provided fasta file. Note: CpG Islands are determined by ObEx = (Observed CpG) / (Expected CpG) , default threshold > 1. Where Expected CpG = (count(C) * count(G)) / WindowSize Usage: python cpg_gene.py FastaFile Gff_File OutFile.png Default optional parameters: -s, Step Size, default = 50 -w, Window Size, default = 200 -oe, Minimum Observed Expected CpG, default = 1 -gc, Minimum GC, default = .5 -r Range from ATG, or provided feature, default = 5000 -f, GFF Feature, default = "gene" -i, Gene ID from GFF, default = "" ''' import sys import os import argparse from collections import Counter from Bio import SeqIO import cpgmod import gffutils import pandas as pd import numpy as np from ggplot import * # Capture command line args, with or without defaults if __name__ == '__main__': # Parse the arguments LineArgs = cpgmod.parseArguments() # Populate vars with args FastaFile = LineArgs.FastaFile GffFile = LineArgs.GffFile OutFile = LineArgs.FileOut Step = LineArgs.s WinSize = LineArgs.w ObExthresh = LineArgs.oe GCthresh = LineArgs.gc StartRange = LineArgs.r FeatGFF = LineArgs.f ID_Feat = LineArgs.i<|fim▁hole|>MergedRecs = [] print "Parsing sequences...\n" for SeqRecord in SeqIO.parse(FastaFile, "fasta"): print SeqRecord.id # Determine if sequences and args are acceptable cpgmod.arg_seqcheck(SeqRecord, WinSize, Step) # Pre-determine number of islands NumOfChunks = cpgmod.chunks(SeqRecord, WinSize, Step) # Return array of SeqRec class (potential CpG island) instances SeqRecList = cpgmod.compute(SeqRecord, Step, NumOfChunks, WinSize) MergedRecs = MergedRecs + SeqRecList # Create GFF DB GffDb = gffutils.create_db(GffFile, dbfn='GFF.db', force=True, keep_order=True, merge_strategy='merge', sort_attribute_values=True, disable_infer_transcripts=True, disable_infer_genes=True) print "\nGFF Database Created...\n" # Filter out SeqRec below threshold DistArr = [] for Rec in MergedRecs: Cond1 = Rec.expect() > 0 if Cond1 == True: ObEx = (Rec.observ() / Rec.expect()) Cond2 = ObEx > ObExthresh Cond3 = Rec.gc_cont() > GCthresh if Cond2 and Cond3: # Query GFF DB for closest gene feature *or provided feature* Arr = cpgmod.get_closest(Rec, GffDb, StartRange, FeatGFF, ID_Feat) if Arr <> False: Arr.append(ObEx) DistArr.append(Arr) print "CpG Islands predicted...\n" print "Generating Figure...\n" # Releasing SeqRecs MergedRecs = None SeqRecList = None # Pre-check DistArr Results if len(DistArr) < 2: print "WARNING, "+ str(len(DistArr)) + " sites were found." print "Consider changing parameters.\n" # Generate Figure: ObExRes = pd.DataFrame({ 'gene' : [], 'xval': [], 'yval': []}) try: Cnt = 0 for Dist in DistArr: Cnt += 1 print "PROGRESS: "+str(Cnt) +" of "+ str(len(DistArr)) ObExdf = pd.DataFrame({ 'gene': [Dist[2]], 'xval': [Dist[1]], 'yval': [Dist[3]]}) ObExFram = [ObExRes, ObExdf] ObExRes = pd.concat(ObExFram, ignore_index=True) p = ggplot(aes(x='xval', y='yval'), data=ObExRes) \ + geom_point() \ + ylab("Observed/Expected CpG") \ + xlab("Position (bp) Relative to (ATG = 0)") \ + ggtitle("Predicted CpG Island Position Relative to ATG") p.save(OutFile) except IndexError as e: print 'Error: '+ str(e) sys.exit('Exiting script...') print p # Remove GFF DB os.remove('GFF.db')<|fim▁end|>
# Gather all possible CpG islands
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2014 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Unit tests for utility functions.""" from __future__ import absolute_import from invenio.testsuite import InvenioTestCase, make_test_suite, run_test_suite class HoldingPenUtilsTest(InvenioTestCase): """Test basic utility functions for Holding Pen.""" def test_get_previous_next_objects_empty(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [] self.assertEqual(get_previous_next_objects(objects, 1), (None, None)) def test_get_previous_next_objects_not_there(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [3, 4] self.assertEqual(get_previous_next_objects(objects, 42), (None, None)) def test_get_previous_next_objects_previous(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [3, 4] self.assertEqual(get_previous_next_objects(objects, 4), (3, None)) def test_get_previous_next_objects_next(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [3, 4] self.assertEqual(get_previous_next_objects(objects, 3), (None, 4))<|fim▁hole|> def test_get_previous_next_objects_previous_next(self): """Test the getting of prev, next object ids from the list.""" from invenio.modules.workflows.utils import get_previous_next_objects objects = [3, 4, 5] self.assertEqual(get_previous_next_objects(objects, 4), (3, 5)) TEST_SUITE = make_test_suite(HoldingPenUtilsTest) if __name__ == "__main__": run_test_suite(TEST_SUITE)<|fim▁end|>
<|file_name|>BattleExperienceAI.py<|end_file_name|><|fim▁begin|>from direct.directnotify import DirectNotifyGlobal from toontown.toonbase import ToontownBattleGlobals from toontown.suit import SuitDNA BattleExperienceAINotify = DirectNotifyGlobal.directNotify.newCategory('BattleExprienceAI') def getSkillGained(toonSkillPtsGained, toonId, track): exp = 0 expList = toonSkillPtsGained.get(toonId, None) if expList != None: exp = expList[track] return int(exp + 0.5) def getBattleExperience(numToons, activeToons, toonExp, toonSkillPtsGained, toonOrigQuests, toonItems, toonOrigMerits, toonMerits, toonParts, suitsKilled, helpfulToonsList = None): if helpfulToonsList == None: BattleExperienceAINotify.warning('=============\nERROR ERROR helpfulToons=None in assignRewards , tell Red') p = [] for k in xrange(numToons): toon = None if k < len(activeToons): toonId = activeToons[k] toon = simbase.air.doId2do.get(toonId) if toon == None: p.append(-1) p.append([0, 0, 0, 0, 0, 0, 0]) p.append([0, 0, 0, 0, 0, 0, 0]) p.append([]) p.append([]) p.append([]) p.append([0, 0, 0, 0]) p.append([0, 0, 0, 0]) p.append([0, 0, 0, 0]) else: p.append(toonId) origExp = toonExp[toonId] earnedExp = [] for i in xrange(len(ToontownBattleGlobals.Tracks)): earnedExp.append(getSkillGained(toonSkillPtsGained, toonId, i)) p.append(origExp) p.append(earnedExp) origQuests = toonOrigQuests.get(toonId, []) p.append(origQuests) items = toonItems.get(toonId, ([], [])) p.append(items[0]) p.append(items[1]) origMerits = toonOrigMerits.get(toonId, []) p.append(origMerits) merits = toonMerits.get(toonId, [0, 0, 0, 0]) p.append(merits) parts = toonParts.get(toonId, [0, 0, 0, 0]) p.append(parts) deathList = [] toonIndices = {} for i in xrange(len(activeToons)): toonIndices[activeToons[i]] = i for deathRecord in suitsKilled: level = deathRecord['level'] type = deathRecord['type'] if deathRecord['isVP'] or deathRecord['isCFO']: level = 0 typeNum = SuitDNA.suitDepts.index(deathRecord['track']) else: typeNum = SuitDNA.suitHeadTypes.index(type) involvedToonIds = deathRecord['activeToons'] toonBits = 0 for toonId in involvedToonIds: if toonId in toonIndices: toonBits |= 1 << toonIndices[toonId] flags = 0 if deathRecord['isSkelecog']: flags |= ToontownBattleGlobals.DLF_SKELECOG if deathRecord['isForeman']: flags |= ToontownBattleGlobals.DLF_FOREMAN if deathRecord['isVP']: flags |= ToontownBattleGlobals.DLF_VP if deathRecord['isCFO']: flags |= ToontownBattleGlobals.DLF_CFO if deathRecord['isSupervisor']: flags |= ToontownBattleGlobals.DLF_SUPERVISOR if deathRecord['isVirtual']: flags |= ToontownBattleGlobals.DLF_VIRTUAL if 'hasRevies' in deathRecord and deathRecord['hasRevives']: flags |= ToontownBattleGlobals.DLF_REVIVES deathList.extend([typeNum, level, toonBits, flags]) p.append(deathList) uberStats = getToonUberStatus(activeToons, numToons) p.append(uberStats) if helpfulToonsList == None: helpfulToonsList = [] p.append(helpfulToonsList) return p def getToonUberStatus(toons, numToons): fieldList = [] uberIndex = ToontownBattleGlobals.LAST_REGULAR_GAG_LEVEL + 1 for toonId in toons: toonList = [] toon = simbase.air.doId2do.get(toonId) if toon == None: fieldList.append(-1) else: for trackIndex in xrange(ToontownBattleGlobals.MAX_TRACK_INDEX + 1): toonList.append(toon.inventory.numItem(trackIndex, uberIndex)) fieldList.append(ToontownBattleGlobals.encodeUber(toonList)) lenDif = numToons - len(toons) if lenDif > 0: for index in xrange(lenDif): fieldList.append(-1) return fieldList <|fim▁hole|> BattleExperienceAINotify.warning('=============\nERROR ERROR helpfulToons=None in assignRewards , tell Red') activeToonList = [] for t in activeToons: toon = simbase.air.doId2do.get(t) if toon != None: activeToonList.append(toon) for toon in activeToonList: for i in xrange(len(ToontownBattleGlobals.Tracks)): uberIndex = ToontownBattleGlobals.LAST_REGULAR_GAG_LEVEL + 1 exp = getSkillGained(toonSkillPtsGained, toon.doId, i) needed = ToontownBattleGlobals.Levels[i][ToontownBattleGlobals.LAST_REGULAR_GAG_LEVEL + 1] + ToontownBattleGlobals.UberSkill hasUber = 0 totalExp = exp + toon.experience.getExp(i) if toon.inventory.numItem(i, uberIndex) > 0: hasUber = 1 if totalExp >= needed or totalExp >= ToontownBattleGlobals.MaxSkill: if toon.inventory.totalProps < toon.getMaxCarry() and not hasUber: uberLevel = ToontownBattleGlobals.LAST_REGULAR_GAG_LEVEL + 1 toon.inventory.addItem(i, uberLevel) toon.experience.setExp(i, ToontownBattleGlobals.Levels[i][ToontownBattleGlobals.LAST_REGULAR_GAG_LEVEL + 1]) else: toon.experience.setExp(i, ToontownBattleGlobals.MaxSkill) else: if exp > 0: newGagList = toon.experience.getNewGagIndexList(i, exp) toon.experience.addExp(i, amount=exp) toon.inventory.addItemWithList(i, newGagList) toon.b_setExperience(toon.experience.makeNetString()) toon.d_setInventory(toon.inventory.makeNetString()) toon.b_setAnimState('victory', 1) if simbase.air.config.GetBool('battle-passing-no-credit', True): # Check if the toon was a helpful toon if helpfulToons and toon.doId in helpfulToons: # Notify the AI that the toon killed cogs simbase.air.questManager.toonKilledCogs(toon, suitsKilled, zoneId, activeToonList) simbase.air.cogPageManager.toonKilledCogs(toon, suitsKilled, zoneId) # Looks like the toon wasnt too helpful... else: BattleExperienceAINotify.debug('toon=%d unhelpful not getting killed cog quest credit' % toon.doId)<|fim▁end|>
def assignRewards(activeToons, toonSkillPtsGained, suitsKilled, zoneId, helpfulToons = None): if helpfulToons == None:
<|file_name|>Domicilio.js<|end_file_name|><|fim▁begin|>import React, { Component } from 'react'; import { PanResponder, StyleSheet, TextInput, ToastAndroid, View } from 'react-native'; import { Body, Button, Card, CardItem, Container, Content, Footer, FooterTab, Header, Icon, Left, Right, Text, Title } from 'native-base'; import SideMenu from 'react-native-side-menu'; import SideMenuDomicilio from './SideMenuDomicilio'; import ReplyInputCurrency from './reply/ReplyInputCurrency'; import ReplyInputNumeric from './reply/ReplyInputNumeric'; import ReplyMultiSelect from './reply/ReplyMultiSelect'; import ReplyRadio from './reply/ReplyRadio'; import ReplyText from './reply/ReplyText'; import ReplyTime from './reply/ReplyTime'; import { passQuestion } from './business/PassQuestionDomicilio'; import FileStore from './../../FileStore'; import DomicilioData from './../../data/DomicilioData'; import { questoes } from './../../data/QuestoesDomicilio'; import { styles } from './../../Styles'; var type = (function(global) { var cache = {}; return function(obj) { var key; return obj === null ? 'null' : obj === global ? 'global' : (key = typeof obj) !== 'object' ? key : obj.nodeType ? 'object' : cache[key = ({}).toString.call(obj)] || (cache[key] = key.slice(8, -1).toLowerCase()); }; }(this)); export default class Domicilio extends Component { constructor(props) { super(props); this.state = { admin: this.props.admin, quiz: this.props.quiz }; } componentWillMount(){ if(this.state.quiz.domicilio === null){ this.state.quiz.domicilio = new DomicilioData(this.state.admin.id); } FileStore.saveFileDomicilio(this.state.quiz.domicilio); let questao = questoes[this.state.admin.indexPage].id.replace(/\D/g,''); for(key in passQuestion){ if(questao == passQuestion[key].questao){ //this.state.admin.maxQuestion = passQuestion[key].passe; for (i = questao + 1; i < passQuestion[key].passe; i++) { for(key in this.state.quiz){ if(key.replace(/\D/g,'') == i){ this.state.quiz[key] = -1; } } } break; } } idQuestao = 'questao_' + questoes[this.state.admin.indexPage].id; numeroQuestao = questoes[this.state.admin.indexPage].id.replace(/\D/g,''); } popQuizScreen(){ if(this.state.admin.indexPage === 0 && this.state.quiz['questao_1'] === null){<|fim▁hole|> this.props.navigator.replacePreviousAndPop({ name: 'quiz', admin: this.state.admin, quiz: this.state.quiz, isOpen: false }); } popScreen(){ if(this.state.admin.indexPage > 0){ this.state.admin.indexPage = Number(this.state.admin.indexPage) - 1; this.props.navigator.replacePreviousAndPop({ name: 'domicilio', admin: this.state.admin, quiz: this.state.quiz }); }else{ ToastAndroid.showWithGravity('Não há como voltar mais', ToastAndroid.SHORT, ToastAndroid.CENTER); } } pushScreen(){ let flagResponse = true; if(type(this.state.quiz.domicilio[idQuestao]) == 'array'){ if(this.state.quiz.domicilio[idQuestao].length == 0){ flagResponse = false; } }else{ if(this.state.quiz.domicilio[idQuestao] == null){ flagResponse = false; } } if(flagResponse || Number(numeroQuestao) + 1 <= this.state.admin.maxQuestion){ if(Number(numeroQuestao) + 1 <= this.state.admin.maxQuestion){ this.state.admin.indexPage = Number(this.state.admin.indexPage) + 1; FileStore.saveFileDomicilio(this.state.quiz.domicilio); if(this.state.admin.indexPage >= questoes.length){ ToastAndroid.showWithGravity('Questionário Finalizado\nNão há como avançar mais', ToastAndroid.SHORT, ToastAndroid.CENTER); }else{ this.props.navigator.push({ name: 'domicilio', admin: this.state.admin, quiz: this.state.quiz }); } }else{ ToastAndroid.showWithGravity('Responda a questão ' + numeroQuestao, ToastAndroid.SHORT, ToastAndroid.CENTER); } }else{ ToastAndroid.showWithGravity('Responda a questão', ToastAndroid.SHORT, ToastAndroid.CENTER); } } updateMenuState() { this.setState({ isOpen: !this.state.isOpen, }); } setMenuState(isOpen) { this.setState({ isOpen: isOpen }); } render() { let isOpen = this.state.isOpen; let admin = this.state.admin; let quiz = this.state.quiz; let questao = questoes[admin.indexPage]; let pergunta_extensao = questao.pergunta_extensao; const menu = <SideMenuDomicilio admin={admin} quiz={quiz} navigator={this.props.navigator} />; function renderIf(condition, contentIf, contentElse = null) { if (condition) { return contentIf; } else { return contentElse; } } return ( <Container style={styles.container}> <Header style={styles.header}> <Left> <Button transparent onPress={() => {this.popQuizScreen()}}> <Icon name='ios-arrow-back' /> </Button> </Left> <Body style={styles.bodyHeader}> <Title>{questao.titulo}</Title> </Body> <Right> <Button transparent onPress={() => {this.updateMenuState()}}> <Text></Text> </Button> </Right> </Header> <Content> <Card> <CardItem style={styles.cardItemQuestao}> <Text style={styles.questao}>{questao.id.replace(/\D/g,'') + '. ' + questao.pergunta}</Text> <Text style={styles.observacaoQuestao}>{questao.observacao_pergunta}</Text> </CardItem> {renderIf(questao.pergunta_secundaria !== '', <CardItem style={styles.pergunta_secundaria}> <Text style={styles.questao_secundaria}>{questao.id.replace(/[0-9]/g, '').toUpperCase() + ') ' + questao.pergunta_secundaria.pergunta}</Text> <Text note>{questao.pergunta_secundaria.observacao_pergunta}</Text> </CardItem> )} <CardItem cardBody style={{justifyContent: 'center'}}> {renderIf(questao.tipo === 'input_currency', <ReplyInputCurrency admin={admin} quiz={quiz.domicilio} questao={questao} passQuestion={passQuestion} /> )} {renderIf(questao.tipo === 'input_numeric', <ReplyInputNumeric admin={admin} quiz={quiz.domicilio} questao={questao} passQuestion={passQuestion} /> )} {renderIf(questao.tipo === 'multiple', <ReplyMultiSelect admin={admin} quiz={quiz.domicilio} questao={questao} passQuestion={passQuestion} /> )} {renderIf(questao.tipo === 'radio', <ReplyRadio admin={admin} quiz={quiz.domicilio} questao={questao} passQuestion={passQuestion} /> )} {renderIf(questao.tipo === 'text', <ReplyText admin={admin} quiz={quiz.domicilio} questao={questao} passQuestion={passQuestion} /> )} {renderIf(questao.tipo === 'input_time', <ReplyTime admin={admin} quiz={quiz.domicilio} questao={questao} passQuestion={passQuestion} /> )} </CardItem> {renderIf(pergunta_extensao != '', <CardItem> <Text style={styles.observacaoQuestao}>{pergunta_extensao.pergunta}</Text> <TextInput style={{width: 500, fontSize: 20}} keyboardType = 'default' onChangeText = {(value) => { if(quiz.domicilio[idQuestao] != null){ if(quiz.domicilio[idQuestao] == pergunta_extensao.referencia){ quiz.domicilio[idQuestao + '_secundaria'] = value; }else if(quiz.domicilio[idQuestao].indexOf(Number(pergunta_extensao.referencia)) > -1){ quiz.domicilio[idQuestao + '_secundaria'] = value; } } }} defaultValue = {quiz.domicilio[idQuestao + '_secundaria']} maxLength = {500} /> </CardItem> )} </Card> </Content> <Footer> <FooterTab> <Button style={{backgroundColor: '#005376'}} onPress={() => {this.popScreen()}}> <Icon name='ios-arrow-back' /> </Button> <Button style={{backgroundColor: '#005376'}} onPress={() => {this.pushScreen()}}> <Icon name='ios-arrow-forward' /> </Button> </FooterTab> </Footer> </Container> ); } }<|fim▁end|>
this.state.quiz.domicilio = null; FileStore.deleteDomicilio(this.state.admin.id); };
<|file_name|>llviewerdisplayname.cpp<|end_file_name|><|fim▁begin|>/** * @file llviewerdisplayname.cpp * @brief Wrapper for display name functionality * * $LicenseInfo:firstyear=2010&license=viewerlgpl$ * Second Life Viewer Source Code * Copyright (C) 2010, Linden Research, Inc. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; * version 2.1 of the License only. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA * $/LicenseInfo$ */ #include "llviewerprecompiledheaders.h" #include "llviewerdisplayname.h" // viewer includes #include "llagent.h" #include "llviewerregion.h" #include "llvoavatar.h" // library includes #include "llavatarnamecache.h"<|fim▁hole|>#include "llhttpnode.h" #include "llnotificationsutil.h" #include "llui.h" // getLanguage() #include "fsradar.h" #include "lggcontactsets.h" #include "llviewercontrol.h" namespace LLViewerDisplayName { // Fired when viewer receives server response to display name change set_name_signal_t sSetDisplayNameSignal; // Fired when there is a change in the agent's name name_changed_signal_t sNameChangedSignal; void addNameChangedCallback(const name_changed_signal_t::slot_type& cb) { sNameChangedSignal.connect(cb); } void doNothing() { } } class LLSetDisplayNameResponder : public LLHTTPClient::Responder { public: // only care about errors /*virtual*/ void errorWithContent(U32 status, const std::string& reason, const LLSD& content) { LL_WARNS() << "LLSetDisplayNameResponder error [status:" << status << "]: " << content << LL_ENDL; LLViewerDisplayName::sSetDisplayNameSignal(false, "", LLSD()); LLViewerDisplayName::sSetDisplayNameSignal.disconnect_all_slots(); } }; void LLViewerDisplayName::set(const std::string& display_name, const set_name_slot_t& slot) { // TODO: simple validation here LLViewerRegion* region = gAgent.getRegion(); llassert(region); std::string cap_url = region->getCapability("SetDisplayName"); if (cap_url.empty()) { // this server does not support display names, report error slot(false, "unsupported", LLSD()); return; } // People API can return localized error messages. Indicate our // language preference via header. LLSD headers; headers["Accept-Language"] = LLUI::getLanguage(); // People API requires both the old and new value to change a variable. // Our display name will be in cache before the viewer's UI is available // to request a change, so we can use direct lookup without callback. LLAvatarName av_name; if (!LLAvatarNameCache::get( gAgent.getID(), &av_name)) { slot(false, "name unavailable", LLSD()); return; } // People API expects array of [ "old value", "new value" ] LLSD change_array = LLSD::emptyArray(); change_array.append(av_name.getDisplayName()); change_array.append(display_name); LL_INFOS() << "Set name POST to " << cap_url << LL_ENDL; // Record our caller for when the server sends back a reply sSetDisplayNameSignal.connect(slot); // POST the requested change. The sim will not send a response back to // this request directly, rather it will send a separate message after it // communicates with the back-end. LLSD body; body["display_name"] = change_array; LLHTTPClient::post(cap_url, body, new LLSetDisplayNameResponder, headers); } class LLSetDisplayNameReply : public LLHTTPNode { LOG_CLASS(LLSetDisplayNameReply); public: /*virtual*/ void post( LLHTTPNode::ResponsePtr response, const LLSD& context, const LLSD& input) const { LLSD body = input["body"]; S32 status = body["status"].asInteger(); bool success = (status == 200); std::string reason = body["reason"].asString(); LLSD content = body["content"]; LL_INFOS() << "status " << status << " reason " << reason << LL_ENDL; // If viewer's concept of display name is out-of-date, the set request // will fail with 409 Conflict. If that happens, fetch up-to-date // name information. if (status == 409) { LLUUID agent_id = gAgent.getID(); // Flush stale data LLAvatarNameCache::erase( agent_id ); // Queue request for new data: nothing to do on callback though... // Note: no need to disconnect the callback as it never gets out of scope LLAvatarNameCache::get(agent_id, boost::bind(&LLViewerDisplayName::doNothing)); // Kill name tag, as it is wrong LLVOAvatar::invalidateNameTag( agent_id ); } // inform caller of result LLViewerDisplayName::sSetDisplayNameSignal(success, reason, content); LLViewerDisplayName::sSetDisplayNameSignal.disconnect_all_slots(); } }; class LLDisplayNameUpdate : public LLHTTPNode { /*virtual*/ void post( LLHTTPNode::ResponsePtr response, const LLSD& context, const LLSD& input) const { LLSD body = input["body"]; LLUUID agent_id = body["agent_id"]; std::string old_display_name = body["old_display_name"]; // By convention this record is called "agent" in the People API LLSD name_data = body["agent"]; // Inject the new name data into cache LLAvatarName av_name; av_name.fromLLSD( name_data ); LL_INFOS() << "name-update now " << LLDate::now() << " next_update " << LLDate(av_name.mNextUpdate) << LL_ENDL; // Name expiration time may be provided in headers, or we may use a // default value // *TODO: get actual headers out of ResponsePtr //LLSD headers = response->mHeaders; LLSD headers; av_name.mExpires = LLAvatarNameCache::nameExpirationFromHeaders(headers); LLAvatarNameCache::insert(agent_id, av_name); // force name tag to update LLVOAvatar::invalidateNameTag(agent_id); LLSD args; args["OLD_NAME"] = old_display_name; args["SLID"] = av_name.getUserName(); args["NEW_NAME"] = av_name.getDisplayName(); if (LGGContactSets::getInstance()->hasPseudonym(agent_id)) { LLSD payload; payload["agent_id"] = agent_id; LLNotificationsUtil::add("DisplayNameUpdateRemoveAlias", args, payload, boost::bind(&LGGContactSets::callbackAliasReset, LGGContactSets::getInstance(), _1, _2)); } else { // <FS:Ansariel> Optional hiding of display name update notification if (gSavedSettings.getBOOL("FSShowDisplayNameUpdateNotification")) { LLNotificationsUtil::add("DisplayNameUpdate", args); } // </FS:Ansariel> Optional hiding of display name update notification } if (agent_id == gAgent.getID()) { LLViewerDisplayName::sNameChangedSignal(); } // <FS:Ansariel> Update name in radar else { FSRadar* radar = FSRadar::getInstance(); if (radar) { radar->updateName(agent_id); } } // </FS:Ansariel> } }; LLHTTPRegistration<LLSetDisplayNameReply> gHTTPRegistrationMessageSetDisplayNameReply( "/message/SetDisplayNameReply"); LLHTTPRegistration<LLDisplayNameUpdate> gHTTPRegistrationMessageDisplayNameUpdate( "/message/DisplayNameUpdate");<|fim▁end|>
#include "llhttpclient.h"
<|file_name|>AbstractActionGraphAndResolver.java<|end_file_name|><|fim▁begin|>/* * Copyright 2016-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.core.model.actiongraph; import com.facebook.buck.core.rules.BuildRuleResolver; import com.facebook.buck.core.util.immutables.BuckStyleImmutable; import org.immutables.value.Value;<|fim▁hole|>/** Holds an ActionGraph with the BuildRuleResolver that created it. */ @Value.Immutable @BuckStyleImmutable interface AbstractActionGraphAndResolver { @Value.Parameter ActionGraph getActionGraph(); @Value.Parameter BuildRuleResolver getResolver(); }<|fim▁end|>
<|file_name|>responses.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals import json import xmltodict from jinja2 import Template from six import iteritems from moto.core.responses import BaseResponse from .models import redshift_backends def convert_json_error_to_xml(json_error): error = json.loads(json_error) code = error["Error"]["Code"] message = error["Error"]["Message"] template = Template( """ <RedshiftClientError> <Error> <Code>{{ code }}</Code> <Message>{{ message }}</Message> <Type>Sender</Type> </Error> <RequestId>6876f774-7273-11e4-85dc-39e55ca848d1</RequestId> </RedshiftClientError>""" ) return template.render(code=code, message=message) def itemize(data): """ The xmltodict.unparse requires we modify the shape of the input dictionary slightly. Instead of a dict of the form: {'key': ['value1', 'value2']} We must provide: {'key': {'item': ['value1', 'value2']}} """ if isinstance(data, dict): ret = {} for key in data: ret[key] = itemize(data[key]) return ret elif isinstance(data, list): return {"item": [itemize(value) for value in data]} else: return data class RedshiftResponse(BaseResponse): @property def redshift_backend(self): return redshift_backends[self.region] def get_response(self, response): if self.request_json: return json.dumps(response) else: xml = xmltodict.unparse(itemize(response), full_document=False) if hasattr(xml, "decode"): xml = xml.decode("utf-8") return xml def call_action(self): status, headers, body = super(RedshiftResponse, self).call_action() if status >= 400 and not self.request_json: body = convert_json_error_to_xml(body) return status, headers, body def unpack_complex_list_params(self, label, names): unpacked_list = list() count = 1 while self._get_param("{0}.{1}.{2}".format(label, count, names[0])): param = dict() for i in range(len(names)): param[names[i]] = self._get_param( "{0}.{1}.{2}".format(label, count, names[i]) ) unpacked_list.append(param) count += 1 return unpacked_list def unpack_list_params(self, label): unpacked_list = list() count = 1 while self._get_param("{0}.{1}".format(label, count)): unpacked_list.append(self._get_param("{0}.{1}".format(label, count))) count += 1 return unpacked_list def _get_cluster_security_groups(self): cluster_security_groups = self._get_multi_param("ClusterSecurityGroups.member") if not cluster_security_groups: cluster_security_groups = self._get_multi_param( "ClusterSecurityGroups.ClusterSecurityGroupName" ) return cluster_security_groups def _get_vpc_security_group_ids(self): vpc_security_group_ids = self._get_multi_param("VpcSecurityGroupIds.member") if not vpc_security_group_ids: vpc_security_group_ids = self._get_multi_param( "VpcSecurityGroupIds.VpcSecurityGroupId" ) return vpc_security_group_ids def _get_iam_roles(self): iam_roles = self._get_multi_param("IamRoles.member") if not iam_roles: iam_roles = self._get_multi_param("IamRoles.IamRoleArn") return iam_roles def _get_subnet_ids(self): subnet_ids = self._get_multi_param("SubnetIds.member") if not subnet_ids: subnet_ids = self._get_multi_param("SubnetIds.SubnetIdentifier") return subnet_ids def create_cluster(self): cluster_kwargs = { "cluster_identifier": self._get_param("ClusterIdentifier"), "node_type": self._get_param("NodeType"), "master_username": self._get_param("MasterUsername"), "master_user_password": self._get_param("MasterUserPassword"), "db_name": self._get_param("DBName"), "cluster_type": self._get_param("ClusterType"), "cluster_security_groups": self._get_cluster_security_groups(), "vpc_security_group_ids": self._get_vpc_security_group_ids(), "cluster_subnet_group_name": self._get_param("ClusterSubnetGroupName"), "availability_zone": self._get_param("AvailabilityZone"), "preferred_maintenance_window": self._get_param( "PreferredMaintenanceWindow" ), "cluster_parameter_group_name": self._get_param( "ClusterParameterGroupName" ), "automated_snapshot_retention_period": self._get_int_param( "AutomatedSnapshotRetentionPeriod" ), "port": self._get_int_param("Port"), "cluster_version": self._get_param("ClusterVersion"), "allow_version_upgrade": self._get_bool_param("AllowVersionUpgrade"), "number_of_nodes": self._get_int_param("NumberOfNodes"), "publicly_accessible": self._get_param("PubliclyAccessible"), "encrypted": self._get_param("Encrypted"), "region_name": self.region, "tags": self.unpack_complex_list_params("Tags.Tag", ("Key", "Value")), "iam_roles_arn": self._get_iam_roles(), "enhanced_vpc_routing": self._get_param("EnhancedVpcRouting"), "kms_key_id": self._get_param("KmsKeyId"), } cluster = self.redshift_backend.create_cluster(**cluster_kwargs).to_json()<|fim▁hole|> return self.get_response( { "CreateClusterResponse": { "CreateClusterResult": {"Cluster": cluster}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def restore_from_cluster_snapshot(self): enhanced_vpc_routing = self._get_bool_param("EnhancedVpcRouting") restore_kwargs = { "snapshot_identifier": self._get_param("SnapshotIdentifier"), "cluster_identifier": self._get_param("ClusterIdentifier"), "port": self._get_int_param("Port"), "availability_zone": self._get_param("AvailabilityZone"), "allow_version_upgrade": self._get_bool_param("AllowVersionUpgrade"), "cluster_subnet_group_name": self._get_param("ClusterSubnetGroupName"), "publicly_accessible": self._get_param("PubliclyAccessible"), "cluster_parameter_group_name": self._get_param( "ClusterParameterGroupName" ), "cluster_security_groups": self._get_cluster_security_groups(), "vpc_security_group_ids": self._get_vpc_security_group_ids(), "preferred_maintenance_window": self._get_param( "PreferredMaintenanceWindow" ), "automated_snapshot_retention_period": self._get_int_param( "AutomatedSnapshotRetentionPeriod" ), "region_name": self.region, "iam_roles_arn": self._get_iam_roles(), } if enhanced_vpc_routing is not None: restore_kwargs["enhanced_vpc_routing"] = enhanced_vpc_routing cluster = self.redshift_backend.restore_from_cluster_snapshot( **restore_kwargs ).to_json() cluster["ClusterStatus"] = "creating" return self.get_response( { "RestoreFromClusterSnapshotResponse": { "RestoreFromClusterSnapshotResult": {"Cluster": cluster}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def describe_clusters(self): cluster_identifier = self._get_param("ClusterIdentifier") clusters = self.redshift_backend.describe_clusters(cluster_identifier) return self.get_response( { "DescribeClustersResponse": { "DescribeClustersResult": { "Clusters": [cluster.to_json() for cluster in clusters] }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def modify_cluster(self): request_kwargs = { "cluster_identifier": self._get_param("ClusterIdentifier"), "new_cluster_identifier": self._get_param("NewClusterIdentifier"), "node_type": self._get_param("NodeType"), "master_user_password": self._get_param("MasterUserPassword"), "cluster_type": self._get_param("ClusterType"), "cluster_security_groups": self._get_cluster_security_groups(), "vpc_security_group_ids": self._get_vpc_security_group_ids(), "cluster_subnet_group_name": self._get_param("ClusterSubnetGroupName"), "preferred_maintenance_window": self._get_param( "PreferredMaintenanceWindow" ), "cluster_parameter_group_name": self._get_param( "ClusterParameterGroupName" ), "automated_snapshot_retention_period": self._get_int_param( "AutomatedSnapshotRetentionPeriod" ), "cluster_version": self._get_param("ClusterVersion"), "allow_version_upgrade": self._get_bool_param("AllowVersionUpgrade"), "number_of_nodes": self._get_int_param("NumberOfNodes"), "publicly_accessible": self._get_param("PubliclyAccessible"), "encrypted": self._get_param("Encrypted"), "iam_roles_arn": self._get_iam_roles(), "enhanced_vpc_routing": self._get_param("EnhancedVpcRouting"), } cluster_kwargs = {} # We only want parameters that were actually passed in, otherwise # we'll stomp all over our cluster metadata with None values. for (key, value) in iteritems(request_kwargs): if value is not None and value != []: cluster_kwargs[key] = value cluster = self.redshift_backend.modify_cluster(**cluster_kwargs) return self.get_response( { "ModifyClusterResponse": { "ModifyClusterResult": {"Cluster": cluster.to_json()}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def delete_cluster(self): request_kwargs = { "cluster_identifier": self._get_param("ClusterIdentifier"), "final_cluster_snapshot_identifier": self._get_param( "FinalClusterSnapshotIdentifier" ), "skip_final_snapshot": self._get_bool_param("SkipFinalClusterSnapshot"), } cluster = self.redshift_backend.delete_cluster(**request_kwargs) return self.get_response( { "DeleteClusterResponse": { "DeleteClusterResult": {"Cluster": cluster.to_json()}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def create_cluster_subnet_group(self): cluster_subnet_group_name = self._get_param("ClusterSubnetGroupName") description = self._get_param("Description") subnet_ids = self._get_subnet_ids() tags = self.unpack_complex_list_params("Tags.Tag", ("Key", "Value")) subnet_group = self.redshift_backend.create_cluster_subnet_group( cluster_subnet_group_name=cluster_subnet_group_name, description=description, subnet_ids=subnet_ids, region_name=self.region, tags=tags, ) return self.get_response( { "CreateClusterSubnetGroupResponse": { "CreateClusterSubnetGroupResult": { "ClusterSubnetGroup": subnet_group.to_json() }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def describe_cluster_subnet_groups(self): subnet_identifier = self._get_param("ClusterSubnetGroupName") subnet_groups = self.redshift_backend.describe_cluster_subnet_groups( subnet_identifier ) return self.get_response( { "DescribeClusterSubnetGroupsResponse": { "DescribeClusterSubnetGroupsResult": { "ClusterSubnetGroups": [ subnet_group.to_json() for subnet_group in subnet_groups ] }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def delete_cluster_subnet_group(self): subnet_identifier = self._get_param("ClusterSubnetGroupName") self.redshift_backend.delete_cluster_subnet_group(subnet_identifier) return self.get_response( { "DeleteClusterSubnetGroupResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" } } } ) def create_cluster_security_group(self): cluster_security_group_name = self._get_param("ClusterSecurityGroupName") description = self._get_param("Description") tags = self.unpack_complex_list_params("Tags.Tag", ("Key", "Value")) security_group = self.redshift_backend.create_cluster_security_group( cluster_security_group_name=cluster_security_group_name, description=description, region_name=self.region, tags=tags, ) return self.get_response( { "CreateClusterSecurityGroupResponse": { "CreateClusterSecurityGroupResult": { "ClusterSecurityGroup": security_group.to_json() }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def describe_cluster_security_groups(self): cluster_security_group_name = self._get_param("ClusterSecurityGroupName") security_groups = self.redshift_backend.describe_cluster_security_groups( cluster_security_group_name ) return self.get_response( { "DescribeClusterSecurityGroupsResponse": { "DescribeClusterSecurityGroupsResult": { "ClusterSecurityGroups": [ security_group.to_json() for security_group in security_groups ] }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def delete_cluster_security_group(self): security_group_identifier = self._get_param("ClusterSecurityGroupName") self.redshift_backend.delete_cluster_security_group(security_group_identifier) return self.get_response( { "DeleteClusterSecurityGroupResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" } } } ) def authorize_cluster_security_group_ingress(self): cluster_security_group_name = self._get_param("ClusterSecurityGroupName") cidr_ip = self._get_param("CIDRIP") security_group = self.redshift_backend.authorize_cluster_security_group_ingress( cluster_security_group_name, cidr_ip ) return self.get_response( { "AuthorizeClusterSecurityGroupIngressResponse": { "AuthorizeClusterSecurityGroupIngressResult": { "ClusterSecurityGroup": { "ClusterSecurityGroupName": cluster_security_group_name, "Description": security_group.description, "IPRanges": [ { "Status": "authorized", "CIDRIP": cidr_ip, "Tags": security_group.tags, }, ], } } } } ) def create_cluster_parameter_group(self): cluster_parameter_group_name = self._get_param("ParameterGroupName") group_family = self._get_param("ParameterGroupFamily") description = self._get_param("Description") tags = self.unpack_complex_list_params("Tags.Tag", ("Key", "Value")) parameter_group = self.redshift_backend.create_cluster_parameter_group( cluster_parameter_group_name, group_family, description, self.region, tags ) return self.get_response( { "CreateClusterParameterGroupResponse": { "CreateClusterParameterGroupResult": { "ClusterParameterGroup": parameter_group.to_json() }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def describe_cluster_parameter_groups(self): cluster_parameter_group_name = self._get_param("ParameterGroupName") parameter_groups = self.redshift_backend.describe_cluster_parameter_groups( cluster_parameter_group_name ) return self.get_response( { "DescribeClusterParameterGroupsResponse": { "DescribeClusterParameterGroupsResult": { "ParameterGroups": [ parameter_group.to_json() for parameter_group in parameter_groups ] }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def delete_cluster_parameter_group(self): cluster_parameter_group_name = self._get_param("ParameterGroupName") self.redshift_backend.delete_cluster_parameter_group( cluster_parameter_group_name ) return self.get_response( { "DeleteClusterParameterGroupResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" } } } ) def create_cluster_snapshot(self): cluster_identifier = self._get_param("ClusterIdentifier") snapshot_identifier = self._get_param("SnapshotIdentifier") tags = self.unpack_complex_list_params("Tags.Tag", ("Key", "Value")) snapshot = self.redshift_backend.create_cluster_snapshot( cluster_identifier, snapshot_identifier, self.region, tags ) return self.get_response( { "CreateClusterSnapshotResponse": { "CreateClusterSnapshotResult": {"Snapshot": snapshot.to_json()}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def describe_cluster_snapshots(self): cluster_identifier = self._get_param("ClusterIdentifier") snapshot_identifier = self._get_param("SnapshotIdentifier") snapshots = self.redshift_backend.describe_cluster_snapshots( cluster_identifier, snapshot_identifier ) return self.get_response( { "DescribeClusterSnapshotsResponse": { "DescribeClusterSnapshotsResult": { "Snapshots": [snapshot.to_json() for snapshot in snapshots] }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def delete_cluster_snapshot(self): snapshot_identifier = self._get_param("SnapshotIdentifier") snapshot = self.redshift_backend.delete_cluster_snapshot(snapshot_identifier) return self.get_response( { "DeleteClusterSnapshotResponse": { "DeleteClusterSnapshotResult": {"Snapshot": snapshot.to_json()}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def create_snapshot_copy_grant(self): copy_grant_kwargs = { "snapshot_copy_grant_name": self._get_param("SnapshotCopyGrantName"), "kms_key_id": self._get_param("KmsKeyId"), "region_name": self._get_param("Region"), } copy_grant = self.redshift_backend.create_snapshot_copy_grant( **copy_grant_kwargs ) return self.get_response( { "CreateSnapshotCopyGrantResponse": { "CreateSnapshotCopyGrantResult": { "SnapshotCopyGrant": copy_grant.to_json() }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def delete_snapshot_copy_grant(self): copy_grant_kwargs = { "snapshot_copy_grant_name": self._get_param("SnapshotCopyGrantName") } self.redshift_backend.delete_snapshot_copy_grant(**copy_grant_kwargs) return self.get_response( { "DeleteSnapshotCopyGrantResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" } } } ) def describe_snapshot_copy_grants(self): copy_grant_kwargs = { "snapshot_copy_grant_name": self._get_param("SnapshotCopyGrantName") } copy_grants = self.redshift_backend.describe_snapshot_copy_grants( **copy_grant_kwargs ) return self.get_response( { "DescribeSnapshotCopyGrantsResponse": { "DescribeSnapshotCopyGrantsResult": { "SnapshotCopyGrants": [ copy_grant.to_json() for copy_grant in copy_grants ] }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def create_tags(self): resource_name = self._get_param("ResourceName") tags = self.unpack_complex_list_params("Tags.Tag", ("Key", "Value")) self.redshift_backend.create_tags(resource_name, tags) return self.get_response( { "CreateTagsResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" } } } ) def describe_tags(self): resource_name = self._get_param("ResourceName") resource_type = self._get_param("ResourceType") tagged_resources = self.redshift_backend.describe_tags( resource_name, resource_type ) return self.get_response( { "DescribeTagsResponse": { "DescribeTagsResult": {"TaggedResources": tagged_resources}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def delete_tags(self): resource_name = self._get_param("ResourceName") tag_keys = self.unpack_list_params("TagKeys.TagKey") self.redshift_backend.delete_tags(resource_name, tag_keys) return self.get_response( { "DeleteTagsResponse": { "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" } } } ) def enable_snapshot_copy(self): snapshot_copy_kwargs = { "cluster_identifier": self._get_param("ClusterIdentifier"), "destination_region": self._get_param("DestinationRegion"), "retention_period": self._get_param("RetentionPeriod", 7), "snapshot_copy_grant_name": self._get_param("SnapshotCopyGrantName"), } cluster = self.redshift_backend.enable_snapshot_copy(**snapshot_copy_kwargs) return self.get_response( { "EnableSnapshotCopyResponse": { "EnableSnapshotCopyResult": {"Cluster": cluster.to_json()}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def disable_snapshot_copy(self): snapshot_copy_kwargs = { "cluster_identifier": self._get_param("ClusterIdentifier") } cluster = self.redshift_backend.disable_snapshot_copy(**snapshot_copy_kwargs) return self.get_response( { "DisableSnapshotCopyResponse": { "DisableSnapshotCopyResult": {"Cluster": cluster.to_json()}, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def modify_snapshot_copy_retention_period(self): snapshot_copy_kwargs = { "cluster_identifier": self._get_param("ClusterIdentifier"), "retention_period": self._get_param("RetentionPeriod"), } cluster = self.redshift_backend.modify_snapshot_copy_retention_period( **snapshot_copy_kwargs ) return self.get_response( { "ModifySnapshotCopyRetentionPeriodResponse": { "ModifySnapshotCopyRetentionPeriodResult": { "Clusters": [cluster.to_json()] }, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } ) def get_cluster_credentials(self): cluster_identifier = self._get_param("ClusterIdentifier") db_user = self._get_param("DbUser") auto_create = self._get_bool_param("AutoCreate", False) duration_seconds = self._get_int_param("DurationSeconds", 900) cluster_credentials = self.redshift_backend.get_cluster_credentials( cluster_identifier, db_user, auto_create, duration_seconds ) return self.get_response( { "GetClusterCredentialsResponse": { "GetClusterCredentialsResult": cluster_credentials, "ResponseMetadata": { "RequestId": "384ac68d-3775-11df-8963-01868b7c937a" }, } } )<|fim▁end|>
cluster["ClusterStatus"] = "creating"
<|file_name|>Sketcher_pt-BR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="pt-BR" sourcelanguage="en"> <context> <name>CmdSketcherBSplineComb</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="162"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="163"/> <source>Show/hide B-spline curvature comb</source> <translation>Mostrar/ocultar o pente de curvatura de B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="164"/> <source>Switches between showing and hiding the curvature comb for all B-splines</source> <translation>Alterna entre mostrar e ocultar o pente de curvatura para todas as B-splines</translation> </message> </context> <context> <name>CmdSketcherBSplineDegree</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="100"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="101"/> <source>Show/hide B-spline degree</source> <translation>Mostrar/ocultar grau de B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="102"/> <source>Switches between showing and hiding the degree for all B-splines</source> <translation>Alterna entre mostrar e ocultar o grau para todas os B-splines</translation> </message> </context> <context> <name>CmdSketcherBSplineKnotMultiplicity</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="193"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="194"/> <source>Show/hide B-spline knot multiplicity</source> <translation>Mostrar/ocultar multiplicidade de nós de B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="195"/> <source>Switches between showing and hiding the knot multiplicity for all B-splines</source> <translation>Alterna entre mostrar e ocultar a multiplicidade de nós para todas as B-splines</translation> </message> </context> <context> <name>CmdSketcherBSplinePoleWeight</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="224"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="225"/> <source>Show/hide B-spline control point weight</source> <translation>Mostrar/ocultar o peso dos pontos de controle da B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="226"/> <source>Switches between showing and hiding the control point weight for all B-splines</source> <translation>Alterna entre mostrar e ocultar o peso dos pontos de controle para todas as B-splines</translation> </message> </context> <context> <name>CmdSketcherBSplinePolygon</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="131"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="132"/> <source>Show/hide B-spline control polygon</source> <translation>Mostrar/ocultar polígono de controle de B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="133"/> <source>Switches between showing and hiding the control polygons for all B-splines</source> <translation>Alterna entre mostrar e ocultar os polígonos de controle para todas as B-splines</translation> </message> </context> <context> <name>CmdSketcherCarbonCopy</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6204"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6205"/> <source>Carbon copy</source> <translation>Com cópia</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6206"/> <source>Copies the geometry of another sketch</source> <translation>Copia a geometria de outro esboço</translation> </message> </context> <context> <name>CmdSketcherClone</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1558"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1559"/> <source>Clone</source> <translation>Clonar</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1560"/> <source>Creates a clone of the geometry taking as reference the last selected point</source> <translation>Cria um clone da geometria tomando como referência o último ponto selecionado</translation> </message> </context> <context> <name>CmdSketcherCloseShape</name> <message> <location filename="../../CommandSketcherTools.cpp" line="100"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="101"/> <source>Close shape</source> <translation>Fechar forma</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="102"/> <source>Produce a closed shape by tying the end point of one element with the next element's starting point</source> <translation>Produzir uma forma fechada ligando o ponto de extremidade de um elemento com o ponto de partida do próximo elemento</translation> </message> </context> <context> <name>CmdSketcherCompBSplineShowHideGeometryInformation</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="255"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="256"/> <source>Show/hide B-spline information layer</source> <translation>Mostrar/ocultar a camada de informações da B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="331"/> <source>Show/hide B-spline degree</source> <translation>Mostrar/ocultar grau de B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="338"/> <source>Show/hide B-spline control polygon</source> <translation>Mostrar/ocultar polígono de controle de B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="345"/> <source>Show/hide B-spline curvature comb</source> <translation>Mostrar/ocultar o pente de curvatura de B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="352"/> <source>Show/hide B-spline knot multiplicity</source> <translation>Mostrar/ocultar multiplicidade de nós de B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="360"/> <source>Show/hide B-spline control point weight</source> <translation>Mostrar/ocultar o peso dos pontos de controle da B-spline</translation> </message> </context> <context> <name>CmdSketcherCompConstrainRadDia</name> <message> <location filename="../../CommandConstraints.cpp" line="5730"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5731"/> <source>Constrain arc or circle</source> <translation>Restringir arco ou círculo</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5732"/> <source>Constrain an arc or a circle</source> <translation>Restringir um arco ou um círculo</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5815"/> <source>Constrain radius</source> <translation>Restrição de raio</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5819"/> <source>Constrain diameter</source> <translation>Restringir o diâmetro</translation> </message> </context> <context> <name>CmdSketcherCompCopy</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1633"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1634"/> <source>Copy</source> <translation>Copiar</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1635"/> <source>Creates a clone of the geometry taking as reference the last selected point</source> <translation>Cria um clone da geometria tomando como referência o último ponto selecionado</translation> </message> </context> <context> <name>CmdSketcherCompCreateArc</name> <message> <location filename="../../CommandCreateGeo.cpp" line="1809"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1810"/> <source>Create arc</source> <translation>Criar arco</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1811"/> <source>Create an arc in the sketcher</source> <translation>Criar um arco na bancada de esboços</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1888"/> <source>Center and end points</source> <translation>Pontos de centro e extremidades</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1892"/> <source>End points and rim point</source> <translation>Pontos de extremidade e ponto de borda</translation> </message> </context> <context> <name>CmdSketcherCompCreateBSpline</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4445"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4446"/> <source>Create a B-spline</source> <translation>Criar uma B-spline</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4447"/> <source>Create a B-spline in the sketch</source> <translation>Criar uma B-spline no esboço</translation> </message> </context> <context> <name>CmdSketcherCompCreateCircle</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4763"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4764"/> <source>Create circle</source> <translation>Criar círculo</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4765"/> <source>Create a circle in the sketcher</source> <translation>Criar um círculo no esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4842"/> <source>Center and rim point</source> <translation>Ponto de centro e borda</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4846"/> <source>3 rim points</source> <translation>3 pontos de borda</translation> </message> </context> <context> <name>CmdSketcherCompCreateConic</name> <message> <location filename="../../CommandCreateGeo.cpp" line="3868"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3869"/> <source>Create a conic</source> <translation>Criar uma cônica</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3870"/> <source>Create a conic in the sketch</source> <translation>Criar uma cônica no esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3975"/> <source>Ellipse by center, major radius, point</source> <translation>Elipse pelo centro, raio maior, ponto</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3979"/> <source>Ellipse by periapsis, apoapsis, minor radius</source> <translation>Elipse pelo periélio, apoapsis, menor raio</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3983"/> <source>Arc of ellipse by center, major radius, endpoints</source> <translation>Arco de elipse pelo centro, raio principal, pontos de extremidade</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3987"/> <source>Arc of hyperbola by center, major radius, endpoints</source> <translation>Arco de hipérbole pelo centro, raio maior, pontos de extremidade</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3991"/> <source>Arc of parabola by focus, vertex, endpoints</source> <translation>Arco da parábola por foco, vértice, pontos de extremidade</translation> </message> </context> <context> <name>CmdSketcherCompCreateFillets</name> <message> <location filename="../../CommandCreateGeo.cpp" line="5340"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5341"/> <source>Fillets</source> <translation>Filetes</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5342"/> <source>Create a fillet between two lines</source> <translation>Crie um filete entre duas linhas</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5417"/> <source>Sketch fillet</source> <translation>Filete de esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5421"/> <source>Constraint-preserving sketch fillet</source> <translation>Filete de esboço com preservação de restrições</translation> </message> </context> <context> <name>CmdSketcherCompCreateRegularPolygon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6833"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6834"/> <source>Create regular polygon</source> <translation>Criar polígono regular</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6835"/> <source>Create a regular polygon in the sketcher</source> <translation>Criar um polígono regular no esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6950"/> <source>Triangle</source> <translation>Triângulo</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6954"/> <source>Square</source> <translation>Quadrado</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6958"/> <source>Pentagon</source> <translation>Pentágono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6962"/> <source>Hexagon</source> <translation>Hexágono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6966"/> <source>Heptagon</source> <translation>Heptágono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6970"/> <source>Octagon</source> <translation>Octógono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6974"/> <source>Regular Polygon</source> <translation>Polígono regular</translation> </message> </context> <context> <name>CmdSketcherCompModifyKnotMultiplicity</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="898"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="899"/> <source>Modify knot multiplicity</source> <translation>Modificar a multiplicidade de nós</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="900"/> <source>Modifies the multiplicity of the selected knot of a B-spline</source> <translation>Modifica a multiplicidade do nós selecionados de uma B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="961"/> <source>Increase knot multiplicity</source> <translation>Aumentar a multiplicidade de nós</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="968"/> <source>Decrease knot multiplicity</source> <translation>Diminuir a multiplicidade de nós</translation> </message> </context> <context> <name>CmdSketcherConnect</name> <message> <location filename="../../CommandSketcherTools.cpp" line="211"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="212"/> <source>Connect edges</source> <translation>Conectar nós</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="213"/> <source>Tie the end point of the element with next element's starting point</source> <translation>Amarrar o ponto final do elemento com o ponto de partida do próximo elemento</translation> </message> </context> <context> <name>CmdSketcherConstrainAngle</name> <message> <location filename="../../CommandConstraints.cpp" line="5848"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5849"/> <source>Constrain angle</source> <translation>Ângulo de restrição</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5850"/> <source>Fix the angle of a line or the angle between two lines</source> <translation>Fixar o ângulo de uma linha ou o ângulo entre duas linhas</translation> </message> </context> <context> <name>CmdSketcherConstrainBlock</name> <message> <location filename="../../CommandConstraints.cpp" line="1753"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1754"/> <source>Constrain block</source> <translation>Restrição de bloqueio</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1755"/> <source>Block constraint: block the selected edge from moving</source> <translation>Restrição de bloqueio: impede o deslocamento da aresta selecionada</translation> </message> </context> <context> <name>CmdSketcherConstrainCoincident</name> <message> <location filename="../../CommandConstraints.cpp" line="2063"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2064"/> <source>Constrain coincident</source> <translation>Restrição de coincidência</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2065"/> <source>Create a coincident constraint on the selected item</source> <translation>Criar uma restrição de coincidência sobre o item selecionado</translation> </message> </context> <context> <name>CmdSketcherConstrainDiameter</name> <message> <location filename="../../CommandConstraints.cpp" line="5275"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5276"/> <source>Constrain diameter</source> <translation>Restringir o diâmetro</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5277"/> <source>Fix the diameter of a circle or an arc</source> <translation>Corrigir o diâmetro de um círculo ou arco</translation> </message> </context> <context> <name>CmdSketcherConstrainDistance</name> <message> <location filename="../../CommandConstraints.cpp" line="2251"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2252"/> <source>Constrain distance</source> <translation>Restrição de distância</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2253"/> <source>Fix a length of a line or the distance between a line and a vertex</source> <translation>Trancar o comprimento de uma linha ou a distância entre uma linha e um vértice</translation> </message> </context> <context> <name>CmdSketcherConstrainDistanceX</name> <message> <location filename="../../CommandConstraints.cpp" line="2806"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2807"/> <source>Constrain horizontal distance</source> <translation>Restrição de distância horizontal</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2808"/> <source>Fix the horizontal distance between two points or line ends</source> <translation>Fixar a distância horizontal entre dois pontos ou extremidades de linha</translation> </message> </context> <context> <name>CmdSketcherConstrainDistanceY</name> <message> <location filename="../../CommandConstraints.cpp" line="3060"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3061"/> <source>Constrain vertical distance</source> <translation>Restringir a distância vertical</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3062"/> <source>Fix the vertical distance between two points or line ends</source> <translation>Fixar a distância vertical entre dois pontos ou extremidades de linha</translation> </message> </context> <context> <name>CmdSketcherConstrainEqual</name> <message> <location filename="../../CommandConstraints.cpp" line="6375"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6376"/> <source>Constrain equal</source> <translation>Restrição igual</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6377"/> <source>Create an equality constraint between two lines or between circles and arcs</source> <translation>Criar uma restrição de igualdade entre duas linhas ou círculos e arcos</translation> </message> </context> <context> <name>CmdSketcherConstrainHorizontal</name> <message> <location filename="../../CommandConstraints.cpp" line="1056"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1057"/> <source>Constrain horizontally</source> <translation>Restringir horizontalmente</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1058"/> <source>Create a horizontal constraint on the selected item</source> <translation>Criar uma restrição horizontal sobre o item selecionado</translation> </message> </context> <context> <name>CmdSketcherConstrainInternalAlignment</name> <message> <location filename="../../CommandConstraints.cpp" line="7056"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7057"/> <source>Constrain internal alignment</source> <translation>Restrição de alinhamento interno</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7058"/> <source>Constrains an element to be aligned with the internal geometry of another element</source> <translation>Restringe um elemento para ser alinhado com a geometria interna de um outro elemento</translation> </message> </context> <context> <name>CmdSketcherConstrainLock</name> <message> <location filename="../../CommandConstraints.cpp" line="1528"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1529"/> <source>Constrain lock</source> <translation>Restrição de bloqueio</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1530"/> <source>Lock constraint: create both a horizontal and a vertical distance constraint on the selected vertex</source> <translation>Restrição de bloqueio: adiciona uma restrição de distância horizontal e vertical ao vértice selecionado</translation> </message> </context> <context> <name>CmdSketcherConstrainParallel</name> <message> <location filename="../../CommandConstraints.cpp" line="3306"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3307"/> <source>Constrain parallel</source> <translation>Restrição paralela</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3308"/> <source>Create a parallel constraint between two lines</source> <translation>Criar uma restrição paralela entre duas linhas</translation> </message> </context> <context> <name>CmdSketcherConstrainPerpendicular</name> <message> <location filename="../../CommandConstraints.cpp" line="3456"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3457"/> <source>Constrain perpendicular</source> <translation>Restrição perpendicular</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3458"/> <source>Create a perpendicular constraint between two lines</source> <translation>Criar uma restrição perpendicular entre duas linhas</translation> </message> </context> <context> <name>CmdSketcherConstrainPointOnObject</name> <message> <location filename="../../CommandConstraints.cpp" line="2599"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2600"/> <source>Constrain point onto object</source> <translation>Restringir um ponto sobre um objeto</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2601"/> <source>Fix a point onto an object</source> <translation>Fixar um ponto sobre um objeto</translation> </message> </context> <context> <name>CmdSketcherConstrainRadius</name> <message> <location filename="../../CommandConstraints.cpp" line="4764"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4765"/> <source>Constrain radius or weight</source> <translation>Restrição de raio ou peso</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4766"/> <source>Fix the radius of a circle or an arc or fix the weight of a pole of a B-Spline</source> <translation>Bloquea o raio de um círculo ou arco ou o peso de um polo de um B-Spline</translation> </message> </context> <context> <name>CmdSketcherConstrainSnellsLaw</name> <message> <location filename="../../CommandConstraints.cpp" line="6892"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6893"/> <source>Constrain refraction (Snell's law')</source> <translation>Restrição de refração (lei de Snell)</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6894"/> <source>Create a refraction law (Snell's law) constraint between two endpoints of rays and an edge as an interface.</source> <translation>Cria uma restrição de refração (lei de Snell) entre dois pontos de extremidade de raios e uma aresta usada como interface.</translation> </message> </context> <context> <name>CmdSketcherConstrainSymmetric</name> <message> <location filename="../../CommandConstraints.cpp" line="6592"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6593"/> <source>Constrain symmetrical</source> <translation>Restrição simétrica</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6594"/> <source>Create a symmetry constraint between two points with respect to a line or a third point</source> <translation>Criar uma restrição de simetria entre dois pontos em relação a uma linha ou um terceiro ponto</translation> </message> </context> <context> <name>CmdSketcherConstrainTangent</name> <message> <location filename="../../CommandConstraints.cpp" line="4092"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4093"/> <source>Constrain tangent</source> <translation>Restrição tangente</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4094"/> <source>Create a tangent constraint between two entities</source> <translation>Criar uma restrição tangente entre duas entidades</translation> </message> </context> <context> <name>CmdSketcherConstrainVertical</name> <message> <location filename="../../CommandConstraints.cpp" line="1294"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1295"/> <source>Constrain vertically</source> <translation>Restringir verticalmente</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1296"/> <source>Create a vertical constraint on the selected item</source> <translation>Criar uma restrição vertical sobre o item selecionado</translation> </message> </context> <context> <name>CmdSketcherConvertToNURB</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="384"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="385"/> <source>Convert geometry to B-spline</source> <translation>Converter geometria para B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="386"/> <source>Converts the selected geometry to a B-spline</source> <translation>Converte a geometria selecionada em uma B-spline</translation> </message> </context> <context> <name>CmdSketcherCopy</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1514"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1515"/> <source>Copy</source> <translation>Copiar</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1516"/> <source>Creates a simple copy of the geometry taking as reference the last selected point</source> <translation>Cria uma cópia simples da geometria tomando como referência o último ponto selecionado</translation> </message> </context> <context> <name>CmdSketcherCreate3PointArc</name> <message> <location filename="../../CommandCreateGeo.cpp" line="1782"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1783"/> <source>Create arc by three points</source> <translation>Criar um arco a partir de três pontos</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1784"/> <source>Create an arc by its end points and a point along the arc</source> <translation>Criar um arco a partir de seus pontos de extremidade e um ponto ao longo do arco</translation> </message> </context> <context> <name>CmdSketcherCreate3PointCircle</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4736"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4737"/> <source>Create circle by three points</source> <translation>Criar um círculo a partir de três pontos</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4738"/> <source>Create a circle by 3 perimeter points</source> <translation>Criar um círculo a partir de 3 pontos do perímetro</translation> </message> </context> <context> <name>CmdSketcherCreateArc</name> <message> <location filename="../../CommandCreateGeo.cpp" line="1513"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1514"/> <source>Create arc by center</source> <translation>Criar um arco pelo centro</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1515"/> <source>Create an arc by its center and by its end points</source> <translation>Criar um arco a partir do seu centro e por seus pontos de extremidade</translation> </message> </context> <context> <name>CmdSketcherCreateArcOfEllipse</name> <message> <location filename="../../CommandCreateGeo.cpp" line="3195"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3196"/> <source>Create an arc of ellipse</source> <translation>Criar um arco de elipse</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3197"/> <source>Create an arc of ellipse in the sketch</source> <translation>Criar um arco de elipse no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateArcOfHyperbola</name> <message> <location filename="../../CommandCreateGeo.cpp" line="3538"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3539"/> <source>Create an arc of hyperbola</source> <translation>Cria um arco de hipérbole</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3540"/> <source>Create an arc of hyperbola in the sketch</source> <translation>Cria um arco de hipérbole no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateArcOfParabola</name> <message> <location filename="../../CommandCreateGeo.cpp" line="3836"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3837"/> <source>Create an arc of parabola</source> <translation>Criar um arco de parábola</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3838"/> <source>Create an arc of parabola in the sketch</source> <translation>Criar um arco de parábola no Esboço</translation> </message> </context> <context> <name>CmdSketcherCreateBSpline</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4370"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4371"/> <source>Create B-spline</source> <translation>Criar B-spline</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4372"/> <source>Create a B-spline via control points in the sketch.</source> <translation>Criar uma B-spline através de pontos de controle no esboço.</translation> </message> </context> <context> <name>CmdSketcherCreateCircle</name> <message> <location filename="../../CommandCreateGeo.cpp" line="2043"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="2044"/> <source>Create circle</source> <translation>Criar círculo</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="2045"/> <source>Create a circle in the sketch</source> <translation>Criar um círculo no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateDraftLine</name> <message> <location filename="../../CommandCreateGeo.cpp" line="5000"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5001"/> <source>Create draft line</source> <translation>Criar linha de rascunho</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5002"/> <source>Create a draft line in the sketch</source> <translation>Criar uma linha de rascunho no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateEllipseBy3Points</name> <message> <location filename="../../CommandCreateGeo.cpp" line="2868"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="2869"/> <source>Create ellipse by 3 points</source> <translation>Criar elipse por 3 pontos</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="2870"/> <source>Create an ellipse by 3 points in the sketch</source> <translation>Criar uma elipse por 3 pontos no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateEllipseByCenter</name> <message> <location filename="../../CommandCreateGeo.cpp" line="2838"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="2839"/> <source>Create ellipse by center</source> <translation>Criar elipse pelo centro</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="2840"/> <source>Create an ellipse by center in the sketch</source> <translation>Criar uma elipse pelo centro no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateFillet</name> <message> <location filename="../../CommandCreateGeo.cpp" line="5280"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5281"/> <source>Create fillet</source> <translation>Criar filete</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5282"/> <source>Create a fillet between two lines or at a coincident point</source> <translation>Criar um arredondamento entre duas linhas ou em um ponto de coincidência</translation> </message> </context> <context> <name>CmdSketcherCreateHeptagon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6748"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6749"/> <source>Create heptagon</source> <translation>Criar heptágono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6750"/> <source>Create a heptagon in the sketch</source> <translation>Criar um heptágono no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateHexagon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6721"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6722"/> <source>Create hexagon</source> <translation>Criar hexágono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6723"/> <source>Create a hexagon in the sketch</source> <translation>Criar um hexágono no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateLine</name> <message> <location filename="../../CommandCreateGeo.cpp" line="383"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="384"/> <source>Create line</source> <translation>Criar linha</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="385"/> <source>Create a line in the sketch</source> <translation>Criar uma linha no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateOctagon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6775"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6776"/> <source>Create octagon</source> <translation>Criar octógono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6777"/> <source>Create an octagon in the sketch</source> <translation>Criar um octógono no esboço</translation> </message> </context> <context> <name>CmdSketcherCreatePentagon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6693"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6694"/> <source>Create pentagon</source> <translation>Criar pentágono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6695"/> <source>Create a pentagon in the sketch</source> <translation>Criar um pentágono no esboço</translation> </message> </context> <context> <name>CmdSketcherCreatePeriodicBSpline</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4414"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4415"/> <source>Create periodic B-spline</source> <translation>Criar B-spline periódica</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4416"/> <source>Create a periodic B-spline via control points in the sketch.</source> <translation>Crie uma B-spline periódica através de pontos de controle no esboço.</translation> </message> </context> <context> <name>CmdSketcherCreatePoint</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4943"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4944"/> <source>Create point</source> <translation>Criar ponto</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4945"/> <source>Create a point in the sketch</source> <translation>Criar um ponto no esboço</translation> </message> </context> <context> <name>CmdSketcherCreatePointFillet</name> <message> <location filename="../../CommandCreateGeo.cpp" line="5309"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5310"/> <source>Create corner-preserving fillet</source> <translation>Criar filete de preservação de canto</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5311"/> <source>Fillet that preserves intersection point and most constraints</source> <translation>Filete que preserva o ponto de interseção e a maioria das restrições</translation> </message> </context> <context> <name>CmdSketcherCreatePolyline</name> <message> <location filename="../../CommandCreateGeo.cpp" line="1270"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1271"/> <source>Create polyline</source> <translation>Criar polígono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1272"/> <source>Create a polyline in the sketch. 'M' Key cycles behaviour</source> <translation>Criar um polígono no esboço. A tecla 'M' alterna os modos de desenho</translation> </message> </context> <context> <name>CmdSketcherCreateRectangle</name> <message> <location filename="../../CommandCreateGeo.cpp" line="582"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="583"/> <source>Create rectangle</source> <translation>Criar retângulo</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="584"/> <source>Create a rectangle in the sketch</source> <translation>Criar um retângulo no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateRegularPolygon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6802"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6803"/> <source>Create regular polygon</source> <translation>Criar polígono regular</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6804"/> <source>Create a regular polygon in the sketch</source> <translation>Criar um polígono regular no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateSlot</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6445"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6446"/> <source>Create slot</source> <translation>Criar uma fresta</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6447"/> <source>Create a slot in the sketch</source> <translation>Criar uma fresta no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateSquare</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6666"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6667"/> <source>Create square</source> <translation>Criar quadrado</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6668"/> <source>Create a square in the sketch</source> <translation>Criar um quadrado no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateText</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4972"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4973"/> <source>Create text</source> <translation>Criar texto</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4974"/> <source>Create text in the sketch</source> <translation>Criar um texto no esboço</translation> </message> </context> <context> <name>CmdSketcherCreateTriangle</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6639"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6640"/> <source>Create equilateral triangle</source> <translation>Criar triângulo equilátero</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6641"/> <source>Create an equilateral triangle in the sketch</source> <translation>Criar um triângulo equilátero no esboço</translation> </message> </context> <context> <name>CmdSketcherDecreaseDegree</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="525"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="526"/> <source>Decrease B-spline degree</source> <translation>Diminuir grau B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="527"/> <source>Decreases the degree of the B-spline</source> <translation>Diminui o grau de uma B-spline</translation> </message> </context> <context> <name>CmdSketcherDecreaseKnotMultiplicity</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="756"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="757"/> <source>Decrease knot multiplicity</source> <translation>Diminuir a multiplicidade de nós</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="758"/> <source>Decreases the multiplicity of the selected knot of a B-spline</source> <translation>Diminui a multiplicidade do nó selecionado de uma B-spline</translation> </message> </context> <context> <name>CmdSketcherDeleteAllConstraints</name> <message> <location filename="../../CommandSketcherTools.cpp" line="2094"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2095"/> <source>Delete all constraints</source> <translation>Excluir todas as restrições</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2096"/> <source>Delete all constraints in the sketch</source> <translation>Excluir todas as restrições do esboço</translation> </message> </context> <context> <name>CmdSketcherDeleteAllGeometry</name> <message> <location filename="../../CommandSketcherTools.cpp" line="2033"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2034"/> <source>Delete all geometry</source> <translation>Excluir toda a geometria</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2035"/> <source>Delete all geometry and constraints in the current sketch, with the exception of external geometry</source> <translation>Excluir todas as restrições e geometria do esboço atual, com exceção da geometria externa</translation> </message> </context> <context> <name>CmdSketcherEditSketch</name> <message> <location filename="../../Command.cpp" line="265"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="266"/> <source>Edit sketch</source> <translation>Editar esboço</translation> </message> <message> <location filename="../../Command.cpp" line="267"/> <source>Edit the selected sketch.</source> <translation>Editar o esboço selecionado.</translation> </message> </context> <context> <name>CmdSketcherExtend</name> <message> <location filename="../../CommandCreateGeo.cpp" line="5844"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5845"/> <source>Extend edge</source> <translation>Prolongar aresta</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5846"/> <source>Extend an edge with respect to the picked position</source> <translation>Estende uma aresta em relação à posição escolhida</translation> </message> </context> <context> <name>CmdSketcherExternal</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6028"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6029"/> <source>External geometry</source> <translation>Geometria externa</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6030"/> <source>Create an edge linked to an external geometry</source> <translation>Criar uma aresta ligada a uma geometria externa</translation> </message> </context> <context> <name>CmdSketcherIncreaseDegree</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="452"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="453"/> <source>Increase B-spline degree</source> <translation>Aumentar grau B-spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="454"/> <source>Increases the degree of the B-spline</source> <translation>Aumenta o grau da B-spline</translation> </message> </context> <context> <name>CmdSketcherIncreaseKnotMultiplicity</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="602"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="603"/> <source>Increase knot multiplicity</source> <translation>Aumentar a multiplicidade de nós</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="604"/> <source>Increases the multiplicity of the selected knot of a B-spline</source> <translation>Aumenta a multiplicidade do nó selecionado de uma B-spline</translation> </message> </context> <context> <name>CmdSketcherLeaveSketch</name> <message> <location filename="../../Command.cpp" line="295"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="296"/> <source>Leave sketch</source> <translation>Sair do esboço</translation> </message> <message> <location filename="../../Command.cpp" line="297"/> <source>Finish editing the active sketch.</source> <translation>Finaliza a edição do esboço ativo.</translation> </message> </context> <context> <name>CmdSketcherMapSketch</name> <message> <location filename="../../Command.cpp" line="507"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="508"/> <source>Map sketch to face...</source> <translation>Esboço para face...</translation> </message> <message> <location filename="../../Command.cpp" line="509"/> <source>Set the 'Support' of a sketch. First select the supporting geometry, for example, a face or an edge of a solid object, then call this command, then choose the desired sketch.</source> <translation>Defina o suporte de um esboço. Selecione primeiro a geometria de suporte, por exemplo uma face ou uma aresta de um objeto sólido, em seguida, execute este comando e escolha o esboço desejado.</translation> </message> <message> <location filename="../../Command.cpp" line="561"/> <source>Some of the selected objects depend on the sketch to be mapped. Circular dependencies are not allowed.</source> <translation>Alguns dos objetos selecionados dependem do esboço a ser mapeado. Dependências circulares não são permitidas.</translation> </message> </context> <context> <name>CmdSketcherMergeSketches</name> <message> <location filename="../../Command.cpp" line="863"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="864"/> <source>Merge sketches</source> <translation>Mesclar esboços</translation> </message> <message> <location filename="../../Command.cpp" line="865"/> <source>Create a new sketch from merging two or more selected sketches.</source> <translation>Criar um novo esboço ao mesclar dois ou mais esboços selecionados.</translation> </message> <message> <location filename="../../Command.cpp" line="878"/> <source>Wrong selection</source> <translation>Seleção errada</translation> </message> <message> <location filename="../../Command.cpp" line="879"/> <source>Select at least two sketches.</source> <translation>Selecione pelo menos dois esboços.</translation> </message> </context> <context> <name>CmdSketcherMirrorSketch</name> <message> <location filename="../../Command.cpp" line="756"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="757"/> <source>Mirror sketch</source> <translation>Espelhar o esboço</translation> </message> <message> <location filename="../../Command.cpp" line="758"/> <source>Create a new mirrored sketch for each selected sketch by using the X or Y axes, or the origin point, as mirroring reference.</source> <translation>Criar um novo esboço espelhado para cada esboço selecionado usando os eixos X ou Y ou o ponto de origem como referência espelhada.</translation> </message> <message> <location filename="../../Command.cpp" line="773"/> <source>Wrong selection</source> <translation>Seleção errada</translation> </message> <message> <location filename="../../Command.cpp" line="774"/> <source>Select one or more sketches.</source> <translation>Selecione um ou mais esboços.</translation> </message> </context> <context> <name>CmdSketcherMove</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1601"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1602"/> <source>Move</source> <translation>Mover</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1603"/> <source>Moves the geometry taking as reference the last selected point</source> <translation>Move a geometria usando como referência o último ponto selecionado</translation> </message> </context> <context> <name>CmdSketcherNewSketch</name> <message> <location filename="../../Command.cpp" line="142"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="143"/> <source>Create sketch</source> <translation>Criar um esboço</translation> </message> <message> <location filename="../../Command.cpp" line="144"/> <source>Create a new sketch.</source> <translation>Criar um novo esboço.</translation> </message> </context> <context> <name>CmdSketcherRectangularArray</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1895"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1896"/> <source>Rectangular array</source> <translation>Rede retangular</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1897"/> <source>Creates a rectangular array pattern of the geometry taking as reference the last selected point</source> <translation>Cria um padrão retangular da geometria com referência ao último ponto selecionado</translation> </message> </context> <context> <name>CmdSketcherReorientSketch</name> <message> <location filename="../../Command.cpp" line="380"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="381"/> <source>Reorient sketch...</source> <translation>Reorientar um esboço...</translation> </message> <message> <location filename="../../Command.cpp" line="382"/> <source>Place the selected sketch on one of the global coordinate planes. This will clear the 'Support' property, if any.</source> <translation>Coloque o esboço selecionado em um dos planos de coordenadas globais. Isto irá limpar a propriedade 'Suporte', se houver.</translation> </message> </context> <context> <name>CmdSketcherRestoreInternalAlignmentGeometry</name> <message> <location filename="../../CommandSketcherTools.cpp" line="906"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="907"/> <source>Show/hide internal geometry</source> <translation>Mostrar/ocultar geometria interna</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="908"/> <source>Show all internal geometry or hide unused internal geometry</source> <translation>Mostrar toda a geometria interna ou ocultar a geometria interna não utilizada</translation> </message> </context> <context> <name>CmdSketcherSelectConflictingConstraints</name> <message> <location filename="../../CommandSketcherTools.cpp" line="652"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="653"/> <location filename="../../CommandSketcherTools.cpp" line="654"/> <source>Select conflicting constraints</source> <translation>Selecionar restrições conflitantes</translation> </message> </context> <context> <name>CmdSketcherSelectConstraints</name> <message> <location filename="../../CommandSketcherTools.cpp" line="296"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="297"/> <source>Select associated constraints</source> <translation>Selecionar restrições associadas</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="298"/> <source>Select the constraints associated with the selected geometrical elements</source> <translation>Selecionar as restrições associadas aos elementos geométricos selecionados</translation> </message> </context> <context> <name>CmdSketcherSelectElementsAssociatedWithConstraints</name> <message> <location filename="../../CommandSketcherTools.cpp" line="703"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="704"/> <source>Select associated geometry</source> <translation>Selecionar geometria associada</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="705"/> <source>Select the geometrical elements associated with the selected constraints</source> <translation>Selecionar os elementos geométricos associados às restrições selecionadas</translation> </message> </context> <context> <name>CmdSketcherSelectElementsWithDoFs</name> <message> <location filename="../../CommandSketcherTools.cpp" line="823"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="824"/> <source>Select unconstrained DoF</source> <translation>Selecionar grau de liberdade não restrito</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="825"/> <source>Select geometrical elements where the solver still detects unconstrained degrees of freedom.</source> <translation>Selecionar os elementos geométricos onde o calculador ainda detecta graus de liberdade sem restrições.</translation> </message> </context> <context> <name>CmdSketcherSelectHorizontalAxis</name> <message> <location filename="../../CommandSketcherTools.cpp" line="454"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="455"/> <source>Select horizontal axis</source> <translation>Selecionar eixo horizontal</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="456"/> <source>Select the local horizontal axis of the sketch</source> <translation>Selecionar o eixo horizontal local do esboço</translation> </message> </context> <context> <name>CmdSketcherSelectMalformedConstraints</name> <message> <location filename="../../CommandSketcherTools.cpp" line="547"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="548"/> <location filename="../../CommandSketcherTools.cpp" line="549"/> <source>Select malformed constraints</source> <translation>Selecione restrições malformadas</translation> </message> </context> <context> <name>CmdSketcherSelectOrigin</name> <message> <location filename="../../CommandSketcherTools.cpp" line="368"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="369"/> <source>Select origin</source> <translation>Selecionar a origem</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="370"/> <source>Select the local origin point of the sketch</source> <translation>Selecionar o ponto de origem local do esboço</translation> </message> </context> <context> <name>CmdSketcherSelectPartiallyRedundantConstraints</name> <message> <location filename="../../CommandSketcherTools.cpp" line="599"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="600"/> <location filename="../../CommandSketcherTools.cpp" line="601"/> <source>Select partially redundant constraints</source> <translation>Selecionar restrições parcialmente redundantes</translation> </message> </context> <context> <name>CmdSketcherSelectRedundantConstraints</name> <message> <location filename="../../CommandSketcherTools.cpp" line="495"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="496"/> <location filename="../../CommandSketcherTools.cpp" line="497"/> <source>Select redundant constraints</source> <translation>Selecionar restrições redundantes</translation> </message> </context> <context> <name>CmdSketcherSelectVerticalAxis</name> <message> <location filename="../../CommandSketcherTools.cpp" line="412"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="413"/> <source>Select vertical axis</source> <translation>Selecionar o eixo vertical</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="414"/> <source>Select the local vertical axis of the sketch</source> <translation>Selecionar o eixo vertical local do esboço</translation> </message> </context> <context> <name>CmdSketcherStopOperation</name> <message> <location filename="../../Command.cpp" line="339"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="340"/> <source>Stop operation</source> <translation>Parar a operação</translation> </message> <message> <location filename="../../Command.cpp" line="341"/> <source>When in edit mode, stop the active operation (drawing, constraining, etc.).</source> <translation>Quando estiver no modo de edição, para a operação ativa (desenho, restrição, etc.).</translation> </message> </context> <context> <name>CmdSketcherSwitchVirtualSpace</name> <message> <location filename="../../CommandSketcherVirtualSpace.cpp" line="92"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherVirtualSpace.cpp" line="93"/> <source>Switch virtual space</source> <translation>Alternar espaço virtual</translation> </message> <message> <location filename="../../CommandSketcherVirtualSpace.cpp" line="94"/> <source>Switches the selected constraints or the view to the other virtual space</source> <translation>Alterna as restrições selecionadas para um outro espaço virtual</translation> </message> </context> <context> <name>CmdSketcherSymmetry</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1000"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1001"/> <source>Symmetry</source> <translation>Simetria</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1002"/> <source>Creates symmetric geometry with respect to the last selected line or point</source> <translation>Cria uma geometria simétrica em relação ao último ponto ou linha selecionada</translation> </message> </context> <context> <name>CmdSketcherToggleActiveConstraint</name> <message> <location filename="../../CommandConstraints.cpp" line="7643"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7644"/> <source>Activate/deactivate constraint</source> <translation>Ativar/desativar restrição</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7645"/> <source>Activates or deactivates the selected constraints</source> <translation>Ativa ou desativa as restrições selecionadas</translation> </message> </context> <context> <name>CmdSketcherToggleConstruction</name> <message> <location filename="../../CommandAlterGeometry.cpp" line="73"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandAlterGeometry.cpp" line="74"/> <source>Toggle construction geometry</source> <translation>Ativa/desativa a geometria de construção</translation> </message> <message> <location filename="../../CommandAlterGeometry.cpp" line="75"/> <source>Toggles the toolbar or selected geometry to/from construction mode</source> <translation>Ativa/desativa a barra de ferramentas ou geometria selecionada de/para o modo de construção</translation> </message> </context> <context> <name>CmdSketcherToggleDrivingConstraint</name> <message> <location filename="../../CommandConstraints.cpp" line="7517"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7518"/> <source>Toggle driving/reference constraint</source> <translation>Ativar/desativar restrição atuante ou de referência</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7519"/> <source>Set the toolbar, or the selected constraints, into driving or reference mode</source> <translation>Colocar a barra de ferramentas, ou as restrições selecionadas, no modo atuante ou de referência</translation> </message> </context> <context> <name>CmdSketcherTrimming</name> <message> <location filename="../../CommandCreateGeo.cpp" line="5530"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5531"/> <source>Trim edge</source> <translation>Recortar aresta</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5532"/> <source>Trim an edge with respect to the picked position</source> <translation>Aparar uma aresta em relação a posição escolhida</translation> </message> </context> <context> <name>CmdSketcherValidateSketch</name> <message> <location filename="../../Command.cpp" line="718"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="719"/> <source>Validate sketch...</source> <translation>Validar um esboço...</translation> </message> <message> <location filename="../../Command.cpp" line="720"/> <source>Validate a sketch by looking at missing coincidences, invalid constraints, degenerated geometry, etc.</source> <translation>Validar um esboço olhando para coincidências faltando, restrições inválidas, geometria corrompida, etc.</translation> </message> <message> <location filename="../../Command.cpp" line="735"/> <source>Select only one sketch.</source> <translation>Selecione apenas um esboço.</translation> </message> <message> <location filename="../../Command.cpp" line="734"/> <source>Wrong selection</source> <translation>Seleção errada</translation> </message> </context> <context> <name>CmdSketcherViewSection</name> <message> <location filename="../../Command.cpp" line="947"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="948"/> <source>View section</source> <translation>Ver seção</translation> </message> <message> <location filename="../../Command.cpp" line="949"/> <source>When in edit mode, switch between section view and full view.</source> <translation>Quando em modo de edição, alterna entre vista da seção e vista completa.</translation> </message> </context> <context> <name>CmdSketcherViewSketch</name> <message> <location filename="../../Command.cpp" line="679"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="680"/> <source>View sketch</source> <translation>Ver esboço</translation> </message> <message> <location filename="../../Command.cpp" line="681"/> <source>When in edit mode, set the camera orientation perpendicular to the sketch plane.</source> <translation>Quando estiver em modo de edição, coloca a orientação da câmera perpendicular ao plano do esboço.</translation> </message> </context> <context> <name>Command</name> <message> <location filename="../../CommandConstraints.cpp" line="1158"/> <location filename="../../CommandConstraints.cpp" line="1233"/> <source>Add horizontal constraint</source> <translation>Adicionar restrição horizontal</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1167"/> <location filename="../../CommandConstraints.cpp" line="1259"/> <location filename="../../CommandConstraints.cpp" line="1494"/> <source>Add horizontal alignment</source> <translation>Adicionar alinhamento horizontal</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1395"/> <location filename="../../CommandConstraints.cpp" line="1469"/> <source>Add vertical constraint</source> <translation>Adicionar restrição vertical</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1403"/> <source>Add vertical alignment</source> <translation>Adicionar alinhamento vertical</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1608"/> <source>Add 'Lock' constraint</source> <translation>Adicionar restrição 'Travar'</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1649"/> <source>Add relative 'Lock' constraint</source> <translation>Adicionar restrição 'Travar' relativa</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1694"/> <source>Add fixed constraint</source> <translation>Adicionar restrição fixa</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1836"/> <source>Add 'Block' constraint</source> <translation>Adicionar restrição 'Bloquear'</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1880"/> <source>Add block constraint</source> <translation>Adicionar restrição 'Bloquear'</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2026"/> <location filename="../../CommandConstraints.cpp" line="2126"/> <location filename="../../CommandConstraints.cpp" line="2215"/> <location filename="../../CommandSketcherTools.cpp" line="144"/> <location filename="../../CommandSketcherTools.cpp" line="250"/> <source>Add coincident constraint</source> <translation>Adicionar restrição coincidente</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2150"/> <source>Swap edge tangency with ptp tangency</source> <translation>Trocar tangência de aresta por tangência ponto-a-ponto</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2321"/> <location filename="../../CommandConstraints.cpp" line="2453"/> <source>Add distance from horizontal axis constraint</source> <translation>Adiciona restrição na distância ao eixo horizontal</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2329"/> <location filename="../../CommandConstraints.cpp" line="2460"/> <source>Add distance from vertical axis constraint</source> <translation>Adiciona restrição na distância ao eixo vertical</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2337"/> <location filename="../../CommandConstraints.cpp" line="2467"/> <source>Add point to point distance constraint</source> <translation>Adiciona restrição na distância ponto a ponto</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2372"/> <location filename="../../CommandConstraints.cpp" line="2542"/> <source>Add point to line Distance constraint</source> <translation>Adicionar restrição na distância entre ponto e linha</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2406"/> <location filename="../../CommandConstraints.cpp" line="2499"/> <source>Add length constraint</source> <translation>Adiciona restrição de comprimento</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2657"/> <location filename="../../CommandConstraints.cpp" line="2740"/> <source>Add point on object constraint</source> <translation>Adiciona restrição tipo 'ponto-no-objeto'</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2908"/> <location filename="../../CommandConstraints.cpp" line="3010"/> <source>Add point to point horizontal distance constraint</source> <translation>Adicionar restrição de distância horizontal ponto a ponto</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2939"/> <source>Add fixed x-coordinate constraint</source> <translation>Adiciona restrição de coordenada x fixa</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3157"/> <location filename="../../CommandConstraints.cpp" line="3259"/> <source>Add point to point vertical distance constraint</source> <translation>Adiciona restrição de distância vertical ponto a ponto</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3187"/> <source>Add fixed y-coordinate constraint</source> <translation>Adiciona restrição de coordenada y fixa</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3388"/> <location filename="../../CommandConstraints.cpp" line="3429"/> <source>Add parallel constraint</source> <translation>Adiciona restrição paralela</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3553"/> <location filename="../../CommandConstraints.cpp" line="3621"/> <location filename="../../CommandConstraints.cpp" line="3775"/> <location filename="../../CommandConstraints.cpp" line="3811"/> <location filename="../../CommandConstraints.cpp" line="3956"/> <location filename="../../CommandConstraints.cpp" line="3990"/> <location filename="../../CommandConstraints.cpp" line="4032"/> <source>Add perpendicular constraint</source> <translation>Adiciona restrição perpendicular</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3658"/> <source>Add perpendicularity constraint</source> <translation>Adicionar restrição de perpendicularidade</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4187"/> <location filename="../../CommandConstraints.cpp" line="4239"/> <location filename="../../CommandConstraints.cpp" line="4275"/> <location filename="../../CommandConstraints.cpp" line="4468"/> <location filename="../../CommandConstraints.cpp" line="4625"/> <location filename="../../CommandConstraints.cpp" line="4683"/> <location filename="../../CommandConstraints.cpp" line="4704"/> <source>Add tangent constraint</source> <translation>Adiciona restrição tangente</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4317"/> <source>Swap coincident+tangency with ptp tangency</source> <translation>Trocar coincidência+tangência por tangência ponto-a-ponto</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4354"/> <location filename="../../CommandConstraints.cpp" line="4361"/> <location filename="../../CommandConstraints.cpp" line="4368"/> <location filename="../../CommandConstraints.cpp" line="4392"/> <location filename="../../CommandConstraints.cpp" line="4400"/> <location filename="../../CommandConstraints.cpp" line="4425"/> <location filename="../../CommandConstraints.cpp" line="4433"/> <location filename="../../CommandConstraints.cpp" line="4460"/> <location filename="../../CommandConstraints.cpp" line="4546"/> <location filename="../../CommandConstraints.cpp" line="4553"/> <location filename="../../CommandConstraints.cpp" line="4560"/> <location filename="../../CommandConstraints.cpp" line="4584"/> <location filename="../../CommandConstraints.cpp" line="4591"/> <location filename="../../CommandConstraints.cpp" line="4617"/> <source>Add tangent constraint point</source> <translation>Adiciona ponto de tangência</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4881"/> <location filename="../../CommandConstraints.cpp" line="4946"/> <location filename="../../CommandConstraints.cpp" line="4965"/> <location filename="../../CommandConstraints.cpp" line="5123"/> <source>Add radius constraint</source> <translation>Adicionar restrição de raio</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5382"/> <location filename="../../CommandConstraints.cpp" line="5441"/> <location filename="../../CommandConstraints.cpp" line="5453"/> <location filename="../../CommandConstraints.cpp" line="5608"/> <source>Add diameter constraint</source> <translation>Adicionar restrição de diâmetro</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5940"/> <location filename="../../CommandConstraints.cpp" line="6073"/> <location filename="../../CommandConstraints.cpp" line="6103"/> <location filename="../../CommandConstraints.cpp" line="6127"/> <location filename="../../CommandConstraints.cpp" line="6240"/> <location filename="../../CommandConstraints.cpp" line="6294"/> <source>Add angle constraint</source> <translation>Adicionar restrição de ângulo</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6508"/> <location filename="../../CommandConstraints.cpp" line="6560"/> <source>Add equality constraint</source> <translation>Adicionar restrição de igualdade</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6681"/> <location filename="../../CommandConstraints.cpp" line="6733"/> <location filename="../../CommandConstraints.cpp" line="6749"/> <location filename="../../CommandConstraints.cpp" line="6835"/> <location filename="../../CommandConstraints.cpp" line="6870"/> <source>Add symmetric constraint</source> <translation>Adicionar restrição simétrica</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7009"/> <source>Add Snell's law constraint</source> <translation>Adicionar restrição lei de Snell</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7218"/> <location filename="../../CommandConstraints.cpp" line="7396"/> <source>Add internal alignment constraint</source> <translation>Adicionar restrição de alinhamento interno</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7602"/> <source>Toggle constraint to driving/reference</source> <translation>Alternar o tipo da restrição entre motriz ou referência</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7686"/> <source>Activate/Deactivate constraint</source> <translation>Ativar/desativar restrição</translation> </message> <message> <location filename="../../Command.cpp" line="207"/> <source>Create a new sketch on a face</source> <translation>Criar um novo esboço em uma face</translation> </message> <message> <location filename="../../Command.cpp" line="239"/> <source>Create a new sketch</source> <translation>Criar um novo esboço</translation> </message> <message> <location filename="../../Command.cpp" line="489"/> <source>Reorient sketch</source> <translation>Reorientar um esboço</translation> </message> <message> <location filename="../../Command.cpp" line="644"/> <source>Attach sketch</source> <translation>Anexar esboço</translation> </message> <message> <location filename="../../Command.cpp" line="649"/> <source>Detach sketch</source> <translation>Desanexar esboço</translation> </message> <message> <location filename="../../Command.cpp" line="789"/> <source>Create a mirrored sketch for each selected sketch</source> <translation>Criar um esboço espelhado para cada esboço selecionado</translation> </message> <message> <location filename="../../Command.cpp" line="888"/> <source>Merge sketches</source> <translation>Mesclar esboços</translation> </message> <message> <location filename="../../CommandAlterGeometry.cpp" line="137"/> <source>Toggle draft from/to draft</source> <translation>Alternar modo rascunho</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="319"/> <source>Add sketch line</source> <translation>Adicionar linha do esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="493"/> <source>Add sketch box</source> <translation>Adicionar caixa de esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="981"/> <source>Add line to sketch wire</source> <translation>Adicionar linha ao arame do esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1001"/> <source>Add arc to sketch wire</source> <translation>Adicionar arco ao arame do esboço</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1444"/> <location filename="../../CommandCreateGeo.cpp" line="1713"/> <source>Add sketch arc</source> <translation>Adicionar esboço de arco</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="1982"/> <location filename="../../CommandCreateGeo.cpp" line="4668"/> <source>Add sketch circle</source> <translation>Adicionar esboço de círculo</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="2741"/> <source>Add sketch ellipse</source> <translation>Adicionar esboço de elipse</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3110"/> <source>Add sketch arc of ellipse</source> <translation>Adicionar esboço de arco de elipse</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3448"/> <source>Add sketch arc of hyperbola</source> <translation>Adicionar esboço de arco de hipérbole</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="3750"/> <source>Add sketch arc of Parabola</source> <translation>Adicionar arco de parábola</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4073"/> <source>Add Pole circle</source> <translation>Adicionar círculo de polo</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4895"/> <source>Add sketch point</source> <translation>Adicionar ponto</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5144"/> <location filename="../../CommandCreateGeo.cpp" line="5222"/> <source>Create fillet</source> <translation>Criar filete</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5505"/> <source>Trim edge</source> <translation>Recortar aresta</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5776"/> <source>Extend edge</source> <translation>Prolongar aresta</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5992"/> <source>Add external geometry</source> <translation>Adicionar geometria externa</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6173"/> <source>Add carbon copy</source> <translation>Adicionar cópia de carbono</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6354"/> <source>Add slot</source> <translation>Adicionar fresta</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6567"/> <source>Add hexagon</source> <translation>Adicionar hexágono</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="413"/> <source>Convert to NURBS</source> <translation>Converter para NURBS</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="479"/> <source>Increase spline degree</source> <translation>Aumentar grau de spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="554"/> <source>Decrease spline degree</source> <translation>Diminuir grau de spline</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="647"/> <source>Increase knot multiplicity</source> <translation>Aumentar a multiplicidade de nós</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="801"/> <source>Decrease knot multiplicity</source> <translation>Diminuir a multiplicidade de nós</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="964"/> <source>Exposing Internal Geometry</source> <translation>Exposição da geometria interna</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1174"/> <source>Create symmetric geometry</source> <translation>Criar geometria simétrica</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1316"/> <source>Copy/clone/move geometry</source> <translation>Copiar/clonar/mover geometria</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1840"/> <source>Create copy of geometry</source> <translation>Criar cópia da geometria</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2060"/> <source>Delete all geometry</source> <translation>Excluir toda a geometria</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2120"/> <source>Delete All Constraints</source> <translation>Excluir todas as restrições</translation> </message> <message> <location filename="../../CommandSketcherVirtualSpace.cpp" line="155"/> <source>Toggle constraints to the other virtual space</source> <translation>Enviar restrições para o outro espaço virtual</translation> </message> <message> <location filename="../../CommandSketcherVirtualSpace.cpp" line="163"/> <location filename="../../TaskSketcherConstrains.cpp" line="862"/> <source>Update constraint's virtual space</source> <translation>Atualizar espaço virtual das restrições</translation> </message> <message> <location filename="../../DrawSketchHandler.cpp" line="601"/> <source>Add auto constraints</source> <translation>Adicionar restrições automáticas</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="625"/> <source>Swap constraint names</source> <translation>Trocar nomes de restrição</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="847"/> <source>Rename sketch constraint</source> <translation>Renomear restrição do esboço</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="840"/> <source>Drag Point</source> <translation>Arrastar Ponto</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="871"/> <source>Drag Curve</source> <translation>Arrastar Curva</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="922"/> <source>Drag Constraint</source> <translation>Restrição de arrasto</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="1099"/> <source>Modify sketch constraints</source> <translation>Modificar restrições do esboço</translation> </message> </context> <context> <name>Exceptions</name> <message> <location filename="../../../App/SketchAnalysis.cpp" line="373"/> <source>Autoconstrain error: Unsolvable sketch while applying coincident constraints.</source> <translation>Erro de restrição automática: esboço insolúvel ao aplicar restrições coincidentes.</translation> </message> <message> <location filename="../../../App/SketchAnalysis.cpp" line="454"/> <source>Autoconstrain error: Unsolvable sketch while applying vertical/horizontal constraints.</source> <translation>Erro de restrição automática: esboço insolúvel ao aplicar restrições verticais/horizontais.</translation> </message> <message> <location filename="../../../App/SketchAnalysis.cpp" line="651"/> <source>Autoconstrain error: Unsolvable sketch while applying equality constraints.</source> <translation>Erro de restrição automática: esboço insolúvel ao aplicar restrições de igualdade.</translation> </message> <message> <location filename="../../../App/SketchAnalysis.cpp" line="705"/> <source>Autoconstrain error: Unsolvable sketch without constraints.</source> <translation>Erro de restrição automática: esboço insolúvel sem restrições.</translation> </message> <message> <location filename="../../../App/SketchAnalysis.cpp" line="742"/> <source>Autoconstrain error: Unsolvable sketch after applying horizontal and vertical constraints.</source> <translation>Erro de restrição automática: esboço irresolvível após a aplicação de restrições horizontais e verticais.</translation> </message> <message> <location filename="../../../App/SketchAnalysis.cpp" line="764"/> <source>Autoconstrain error: Unsolvable sketch after applying point-on-point constraints.</source> <translation>Erro de restrição automática: esboço insolúvel após a aplicação de restrições de ponto-em-ponto.</translation> </message> <message> <location filename="../../../App/SketchAnalysis.cpp" line="792"/> <source>Autoconstrain error: Unsolvable sketch after applying equality constraints.</source> <translation>Erro de restrição automática: esboço insolúvel após a aplicação de restrições de igualdade.</translation> </message> <message> <location filename="../../../App/SketchObject.cpp" line="1966"/> <source>Unable to guess intersection of curves. Try adding a coincident constraint between the vertices of the curves you are intending to fillet.</source> <translation>Não é possível adivinhar a intersecção das curvas. Tente adicionar uma restrição coincidente entre os vértices das curvas que você pretende filetar.</translation> </message> <message> <location filename="../../../App/SketchObject.cpp" line="5606"/> <source>This version of OCE/OCC does not support knot operation. You need 6.9.0 or higher.</source> <translation>Esta versão do OCE / OCC não suporta operação de nó. Você precisa de 6.9.0 ou superior.</translation> </message> <message> <location filename="../../../App/SketchObject.cpp" line="5610"/> <source>BSpline Geometry Index (GeoID) is out of bounds.</source> <translation>Índice de geometria BSpline (GeoID) está fora dos limites.</translation> </message> <message> <location filename="../../../App/SketchObject.cpp" line="5613"/> <source>You are requesting no change in knot multiplicity.</source> <translation>Você não solicitou nenhuma mudança de multiplicidade em nós.</translation> </message> <message> <location filename="../../../App/SketchObject.cpp" line="5618"/> <source>The Geometry Index (GeoId) provided is not a B-spline curve.</source> <translation>O índice de geometria (GeoId) fornecida não é uma curva B-spline.</translation> </message> <message> <location filename="../../../App/SketchObject.cpp" line="5625"/> <source>The knot index is out of bounds. Note that in accordance with OCC notation, the first knot has index 1 and not zero.</source> <translation>O índice do nó está fora dos limites. Note que, de acordo com a notação do OCC, o primeiro nó tem índice 1 e não zero.</translation> </message> <message> <location filename="../../../App/SketchObject.cpp" line="5632"/> <source>The multiplicity cannot be increased beyond the degree of the B-spline.</source> <translation>A multiplicidade não pode ser aumentada além do grau de B-spline.</translation> </message> <message> <location filename="../../../App/SketchObject.cpp" line="5635"/> <source>The multiplicity cannot be decreased beyond zero.</source> <translation>A multiplicidade não pode ser diminuída abaixo de zero.</translation> </message> <message> <location filename="../../../App/SketchObject.cpp" line="5647"/> <source>OCC is unable to decrease the multiplicity within the maximum tolerance.</source> <translation>O OCC não consegue diminuir a multiplicidade dentro de tolerância máxima.</translation> </message> </context> <context> <name>Gui::TaskView::TaskSketcherCreateCommands</name> <message> <location filename="../../TaskSketcherCreateCommands.cpp" line="41"/> <source>Appearance</source> <translation>Aparência</translation> </message> </context> <context> <name>QObject</name> <message> <location filename="../../AppSketcherGui.cpp" line="135"/> <location filename="../../AppSketcherGui.cpp" line="136"/> <location filename="../../AppSketcherGui.cpp" line="137"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Command.cpp" line="111"/> <source>There are no modes that accept the selected set of subelements</source> <translation>Não existem modos que aceitem o conjunto de sub-elementos selecionado</translation> </message> <message> <location filename="../../Command.cpp" line="114"/> <source>Broken link to support subelements</source> <translation>Ligação perdida aos sub-elementos de suporte</translation> </message> <message> <location filename="../../Command.cpp" line="117"/> <location filename="../../Command.cpp" line="126"/> <source>Unexpected error</source> <translation>Erro inesperado</translation> </message> <message> <location filename="../../Command.cpp" line="121"/> <source>Face is non-planar</source> <translation>A face não é plana</translation> </message> <message> <location filename="../../Command.cpp" line="123"/> <source>Selected shapes are of wrong form (e.g., a curved edge where a straight one is needed)</source> <translation>As formas selecionadas são do tipo errado (por exemplo, uma aresta curva onde uma reta é necessária)</translation> </message> <message> <location filename="../../Command.cpp" line="164"/> <source>Sketch mapping</source> <translation>Mapeamento de esboço</translation> </message> <message> <location filename="../../Command.cpp" line="165"/> <source>Can't map the sketch to selected object. %1.</source> <translation>Não é possível mapear o esboço no objeto selecionado. %1.</translation> </message> <message> <location filename="../../Command.cpp" line="172"/> <location filename="../../Command.cpp" line="590"/> <source>Don't attach</source> <translation>Não anexar</translation> </message> <message> <location filename="../../CommandAlterGeometry.cpp" line="123"/> <location filename="../../CommandAlterGeometry.cpp" line="131"/> <location filename="../../CommandConstraints.cpp" line="127"/> <location filename="../../CommandConstraints.cpp" line="133"/> <location filename="../../CommandConstraints.cpp" line="1085"/> <location filename="../../CommandConstraints.cpp" line="1323"/> <location filename="../../CommandConstraints.cpp" line="1559"/> <location filename="../../CommandConstraints.cpp" line="1582"/> <location filename="../../CommandConstraints.cpp" line="1586"/> <location filename="../../CommandConstraints.cpp" line="1782"/> <location filename="../../CommandConstraints.cpp" line="1812"/> <location filename="../../CommandConstraints.cpp" line="1816"/> <location filename="../../CommandConstraints.cpp" line="2093"/> <location filename="../../CommandConstraints.cpp" line="2104"/> <location filename="../../CommandConstraints.cpp" line="2114"/> <location filename="../../CommandConstraints.cpp" line="2284"/> <location filename="../../CommandConstraints.cpp" line="2295"/> <location filename="../../CommandConstraints.cpp" line="2393"/> <location filename="../../CommandConstraints.cpp" line="2426"/> <location filename="../../CommandConstraints.cpp" line="2518"/> <location filename="../../CommandConstraints.cpp" line="2632"/> <location filename="../../CommandConstraints.cpp" line="2672"/> <location filename="../../CommandConstraints.cpp" line="2678"/> <location filename="../../CommandConstraints.cpp" line="2695"/> <location filename="../../CommandConstraints.cpp" line="2706"/> <location filename="../../CommandConstraints.cpp" line="2754"/> <location filename="../../CommandConstraints.cpp" line="2762"/> <location filename="../../CommandConstraints.cpp" line="2778"/> <location filename="../../CommandConstraints.cpp" line="2839"/> <location filename="../../CommandConstraints.cpp" line="2850"/> <location filename="../../CommandConstraints.cpp" line="2879"/> <location filename="../../CommandConstraints.cpp" line="2929"/> <location filename="../../CommandConstraints.cpp" line="2958"/> <location filename="../../CommandConstraints.cpp" line="2987"/> <location filename="../../CommandConstraints.cpp" line="3091"/> <location filename="../../CommandConstraints.cpp" line="3102"/> <location filename="../../CommandConstraints.cpp" line="3127"/> <location filename="../../CommandConstraints.cpp" line="3177"/> <location filename="../../CommandConstraints.cpp" line="3207"/> <location filename="../../CommandConstraints.cpp" line="3236"/> <location filename="../../CommandConstraints.cpp" line="3337"/> <location filename="../../CommandConstraints.cpp" line="3350"/> <location filename="../../CommandConstraints.cpp" line="3364"/> <location filename="../../CommandConstraints.cpp" line="3380"/> <location filename="../../CommandConstraints.cpp" line="3418"/> <location filename="../../CommandConstraints.cpp" line="3505"/> <location filename="../../CommandConstraints.cpp" line="3518"/> <location filename="../../CommandConstraints.cpp" line="3548"/> <location filename="../../CommandConstraints.cpp" line="3601"/> <location filename="../../CommandConstraints.cpp" line="3638"/> <location filename="../../CommandConstraints.cpp" line="3647"/> <location filename="../../CommandConstraints.cpp" line="3653"/> <location filename="../../CommandConstraints.cpp" line="3677"/> <location filename="../../CommandConstraints.cpp" line="3686"/> <location filename="../../CommandConstraints.cpp" line="3695"/> <location filename="../../CommandConstraints.cpp" line="3823"/> <location filename="../../CommandConstraints.cpp" line="3858"/> <location filename="../../CommandConstraints.cpp" line="3867"/> <location filename="../../CommandConstraints.cpp" line="3876"/> <location filename="../../CommandConstraints.cpp" line="4027"/> <location filename="../../CommandConstraints.cpp" line="4139"/> <location filename="../../CommandConstraints.cpp" line="4152"/> <location filename="../../CommandConstraints.cpp" line="4182"/> <location filename="../../CommandConstraints.cpp" line="4234"/> <location filename="../../CommandConstraints.cpp" line="4255"/> <location filename="../../CommandConstraints.cpp" line="4264"/> <location filename="../../CommandConstraints.cpp" line="4270"/> <location filename="../../CommandConstraints.cpp" line="4294"/> <location filename="../../CommandConstraints.cpp" line="4300"/> <location filename="../../CommandConstraints.cpp" line="4482"/> <location filename="../../CommandConstraints.cpp" line="4518"/> <location filename="../../CommandConstraints.cpp" line="4524"/> <location filename="../../CommandConstraints.cpp" line="4663"/> <location filename="../../CommandConstraints.cpp" line="4699"/> <location filename="../../CommandConstraints.cpp" line="4793"/> <location filename="../../CommandConstraints.cpp" line="4804"/> <location filename="../../CommandConstraints.cpp" line="4865"/> <location filename="../../CommandConstraints.cpp" line="4870"/> <location filename="../../CommandConstraints.cpp" line="5117"/> <location filename="../../CommandConstraints.cpp" line="5304"/> <location filename="../../CommandConstraints.cpp" line="5315"/> <location filename="../../CommandConstraints.cpp" line="5357"/> <location filename="../../CommandConstraints.cpp" line="5372"/> <location filename="../../CommandConstraints.cpp" line="5596"/> <location filename="../../CommandConstraints.cpp" line="5602"/> <location filename="../../CommandConstraints.cpp" line="5890"/> <location filename="../../CommandConstraints.cpp" line="5902"/> <location filename="../../CommandConstraints.cpp" line="5933"/> <location filename="../../CommandConstraints.cpp" line="5995"/> <location filename="../../CommandConstraints.cpp" line="6091"/> <location filename="../../CommandConstraints.cpp" line="6147"/> <location filename="../../CommandConstraints.cpp" line="6287"/> <location filename="../../CommandConstraints.cpp" line="6404"/> <location filename="../../CommandConstraints.cpp" line="6417"/> <location filename="../../CommandConstraints.cpp" line="6433"/> <location filename="../../CommandConstraints.cpp" line="6438"/> <location filename="../../CommandConstraints.cpp" line="6457"/> <location filename="../../CommandConstraints.cpp" line="6487"/> <location filename="../../CommandConstraints.cpp" line="6502"/> <location filename="../../CommandConstraints.cpp" line="6554"/> <location filename="../../CommandConstraints.cpp" line="6636"/> <location filename="../../CommandConstraints.cpp" line="6649"/> <location filename="../../CommandConstraints.cpp" line="6674"/> <location filename="../../CommandConstraints.cpp" line="6696"/> <location filename="../../CommandConstraints.cpp" line="6726"/> <location filename="../../CommandConstraints.cpp" line="6763"/> <location filename="../../CommandConstraints.cpp" line="6786"/> <location filename="../../CommandConstraints.cpp" line="6828"/> <location filename="../../CommandConstraints.cpp" line="6844"/> <location filename="../../CommandConstraints.cpp" line="6971"/> <location filename="../../CommandConstraints.cpp" line="6977"/> <location filename="../../CommandConstraints.cpp" line="7076"/> <location filename="../../CommandConstraints.cpp" line="7089"/> <location filename="../../CommandConstraints.cpp" line="7110"/> <location filename="../../CommandConstraints.cpp" line="7133"/> <location filename="../../CommandConstraints.cpp" line="7155"/> <location filename="../../CommandConstraints.cpp" line="7163"/> <location filename="../../CommandConstraints.cpp" line="7169"/> <location filename="../../CommandConstraints.cpp" line="7325"/> <location filename="../../CommandConstraints.cpp" line="7333"/> <location filename="../../CommandConstraints.cpp" line="7341"/> <location filename="../../CommandConstraints.cpp" line="7347"/> <location filename="../../CommandConstraints.cpp" line="7496"/> <location filename="../../CommandConstraints.cpp" line="7556"/> <location filename="../../CommandConstraints.cpp" line="7564"/> <location filename="../../CommandConstraints.cpp" line="7596"/> <location filename="../../CommandConstraints.cpp" line="7669"/> <location filename="../../CommandConstraints.cpp" line="7680"/> <location filename="../../CommandSketcherBSpline.cpp" line="431"/> <location filename="../../CommandSketcherBSpline.cpp" line="502"/> <location filename="../../CommandSketcherBSpline.cpp" line="580"/> <location filename="../../CommandSketcherBSpline.cpp" line="640"/> <location filename="../../CommandSketcherBSpline.cpp" line="703"/> <location filename="../../CommandSketcherBSpline.cpp" line="794"/> <location filename="../../CommandSketcherBSpline.cpp" line="843"/> <location filename="../../CommandSketcherTools.cpp" line="125"/> <location filename="../../CommandSketcherTools.cpp" line="133"/> <location filename="../../CommandSketcherTools.cpp" line="175"/> <location filename="../../CommandSketcherTools.cpp" line="235"/> <location filename="../../CommandSketcherTools.cpp" line="243"/> <location filename="../../CommandSketcherTools.cpp" line="320"/> <location filename="../../CommandSketcherTools.cpp" line="931"/> <location filename="../../CommandSketcherTools.cpp" line="1024"/> <location filename="../../CommandSketcherTools.cpp" line="1032"/> <location filename="../../CommandSketcherTools.cpp" line="1137"/> <location filename="../../CommandSketcherTools.cpp" line="1146"/> <location filename="../../CommandSketcherTools.cpp" line="1384"/> <location filename="../../CommandSketcherTools.cpp" line="1393"/> <location filename="../../CommandSketcherTools.cpp" line="1452"/> <location filename="../../CommandSketcherTools.cpp" line="1915"/> <location filename="../../CommandSketcherTools.cpp" line="1924"/> <location filename="../../CommandSketcherTools.cpp" line="1986"/> <location filename="../../CommandSketcherVirtualSpace.cpp" line="115"/> <location filename="../../CommandSketcherVirtualSpace.cpp" line="123"/> <location filename="../../CommandSketcherVirtualSpace.cpp" line="146"/> <source>Wrong selection</source> <translation>Seleção errada</translation> </message> <message> <location filename="../../CommandAlterGeometry.cpp" line="124"/> <location filename="../../CommandAlterGeometry.cpp" line="132"/> <source>Select edge(s) from the sketch.</source> <translation>Selecione aresta(s) no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5062"/> <location filename="../../CommandConstraints.cpp" line="5211"/> <location filename="../../CommandConstraints.cpp" line="5541"/> <location filename="../../CommandConstraints.cpp" line="5677"/> <location filename="../../EditDatumDialog.cpp" line="206"/> <source>Dimensional constraint</source> <translation>Restrição de dimensão</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="704"/> <location filename="../../CommandConstraints.cpp" line="715"/> <location filename="../../CommandConstraints.cpp" line="727"/> <source>Only sketch and its support is allowed to select</source> <translation>É permitido selecionar somente um esboço e seu suporte</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="736"/> <source>One of the selected has to be on the sketch</source> <translation>Um dos selecionados tem que ser no esboço</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1086"/> <location filename="../../CommandConstraints.cpp" line="1324"/> <source>Select an edge from the sketch.</source> <translation>Selecione uma aresta do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1111"/> <location filename="../../CommandConstraints.cpp" line="1125"/> <location filename="../../CommandConstraints.cpp" line="1131"/> <location filename="../../CommandConstraints.cpp" line="1150"/> <location filename="../../CommandConstraints.cpp" line="1178"/> <location filename="../../CommandConstraints.cpp" line="1206"/> <location filename="../../CommandConstraints.cpp" line="1220"/> <location filename="../../CommandConstraints.cpp" line="1226"/> <location filename="../../CommandConstraints.cpp" line="1349"/> <location filename="../../CommandConstraints.cpp" line="1363"/> <location filename="../../CommandConstraints.cpp" line="1369"/> <location filename="../../CommandConstraints.cpp" line="1387"/> <location filename="../../CommandConstraints.cpp" line="1413"/> <location filename="../../CommandConstraints.cpp" line="1442"/> <location filename="../../CommandConstraints.cpp" line="1451"/> <location filename="../../CommandConstraints.cpp" line="1462"/> <location filename="../../CommandSketcherTools.cpp" line="165"/> <location filename="../../CommandSketcherTools.cpp" line="267"/> <source>Impossible constraint</source> <translation>Restrição impossível</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1112"/> <location filename="../../CommandConstraints.cpp" line="1207"/> <location filename="../../CommandConstraints.cpp" line="1350"/> <location filename="../../CommandConstraints.cpp" line="1443"/> <source>The selected edge is not a line segment</source> <translation>A aresta selecionada não é um segmento de linha</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1120"/> <location filename="../../CommandConstraints.cpp" line="1215"/> <location filename="../../CommandConstraints.cpp" line="1358"/> <location filename="../../CommandConstraints.cpp" line="1456"/> <location filename="../../CommandConstraints.cpp" line="1826"/> <location filename="../../CommandConstraints.cpp" line="1874"/> <source>Double constraint</source> <translation>Restrição dupla</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1121"/> <location filename="../../CommandConstraints.cpp" line="1216"/> <location filename="../../CommandConstraints.cpp" line="1364"/> <location filename="../../CommandConstraints.cpp" line="1452"/> <source>The selected edge already has a horizontal constraint!</source> <translation>A aresta selecionada já tem uma restrição horizontal!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1126"/> <location filename="../../CommandConstraints.cpp" line="1221"/> <location filename="../../CommandConstraints.cpp" line="1359"/> <location filename="../../CommandConstraints.cpp" line="1457"/> <source>The selected edge already has a vertical constraint!</source> <translation>A aresta selecionada já tem uma restrição vertical!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1132"/> <location filename="../../CommandConstraints.cpp" line="1227"/> <location filename="../../CommandConstraints.cpp" line="1370"/> <location filename="../../CommandConstraints.cpp" line="1463"/> <location filename="../../CommandConstraints.cpp" line="1827"/> <location filename="../../CommandConstraints.cpp" line="1875"/> <source>The selected edge already has a Block constraint!</source> <translation>A aresta selecionada já possui uma restrição de Bloqueio!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1151"/> <source>The selected item(s) can't accept a horizontal constraint!</source> <translation>Os itens selecionados não podem aceitar uma restrição horizontal!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1179"/> <source>There are more than one fixed point selected. Select a maximum of one fixed point!</source> <translation>Mais de um ponto fixo selecionado. Selecione um único ponto fixo!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1388"/> <source>The selected item(s) can't accept a vertical constraint!</source> <translation>Os itens selecionados não podem aceitar uma restrição vertical!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1414"/> <source>There are more than one fixed points selected. Select a maximum of one fixed point!</source> <translation>Há mais de um ponto fixo selecionado. Selecione no máximo um ponto fixo!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1560"/> <location filename="../../CommandConstraints.cpp" line="1783"/> <source>Select vertices from the sketch.</source> <translation>Selecione vértices do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1583"/> <source>Select one vertex from the sketch other than the origin.</source> <translation>Selecione um vértice do esboço que não seja a origem.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1587"/> <source>Select only vertices from the sketch. The last selected vertex may be the origin.</source> <translation>Selecione somente os vértices do esboço. O último vértice selecionado pode ser a origem.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1794"/> <source>Wrong solver status</source> <translation>Erro no status do calculador</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="128"/> <source>Cannot add a constraint between two external geometries.</source> <translation>Não é possível adicionar uma restrição entre duas geometrias externas.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="134"/> <source>Cannot add a constraint between two fixed geometries. Fixed geometries involve external geometry, blocked geometry or special points as B-spline knot points.</source> <translation>Não é possível adicionar uma restrição entre duas geometrias fixas. Geometrias fixas podem ser geometria externa, geometria bloqueada, ou pontos especiais como nós de B-spline.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1795"/> <source>A Block constraint cannot be added if the sketch is unsolved or there are redundant and conflicting constraints.</source> <translation>Uma restrição de bloqueio não pode ser adicionada se o esboço não estiver resolvido ou se existirem restrições redundantes e/ou conflitantes.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1813"/> <source>Select one edge from the sketch.</source> <translation>Selecione uma aresta do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1817"/> <source>Select only edges from the sketch.</source> <translation>Selecione somente arestas do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="1845"/> <location filename="../../CommandConstraints.cpp" line="1890"/> <location filename="../../CommandConstraints.cpp" line="3577"/> <location filename="../../CommandConstraints.cpp" line="4056"/> <location filename="../../CommandConstraints.cpp" line="4211"/> <location filename="../../CommandConstraints.cpp" line="4728"/> <location filename="../../CommandConstraints.cpp" line="7039"/> <location filename="../../CommandSketcherBSpline.cpp" line="833"/> <source>Error</source> <translation>Erro</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2094"/> <source>Select two or more points from the sketch.</source> <translation>Selecione dois ou mais pontos no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2105"/> <location filename="../../CommandConstraints.cpp" line="2115"/> <source>Select two or more vertexes from the sketch.</source> <translation>Selecione dois ou mais vértices do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2168"/> <location filename="../../CommandConstraints.cpp" line="4330"/> <source>Constraint Substitution</source> <translation>Substituição de restrição</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2169"/> <source>Endpoint to endpoint tangency was applied instead.</source> <translation>Uma tangência de ponto a ponto de extremidade foi aplicado em vez disso.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2285"/> <source>Select vertexes from the sketch.</source> <translation>Selecione vértices do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2296"/> <location filename="../../CommandConstraints.cpp" line="2427"/> <source>Select exactly one line or one point and one line or two points from the sketch.</source> <translation>Selecione exatamente uma linha ou um ponto e uma linha ou dois pontos no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2394"/> <source>Cannot add a length constraint on an axis!</source> <translation>Não é possível adicionar uma restrição de comprimento em um eixo!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2519"/> <source>This constraint does not make sense for non-linear curves</source> <translation>Essa restrição não faz sentido para curvas não-lineares</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2633"/> <location filename="../../CommandConstraints.cpp" line="2840"/> <location filename="../../CommandConstraints.cpp" line="3092"/> <location filename="../../CommandConstraints.cpp" line="4794"/> <location filename="../../CommandConstraints.cpp" line="5305"/> <location filename="../../CommandConstraints.cpp" line="5891"/> <source>Select the right things from the sketch.</source> <translation>Selecione as coisas corretas no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2673"/> <location filename="../../CommandConstraints.cpp" line="2755"/> <source>Point on B-spline edge currently unsupported.</source> <translation>Ponto em aresta de Bspline ainda não está suportado.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2779"/> <source>None of the selected points were constrained onto the respective curves, either because they are parts of the same element, or because they are both external geometry.</source> <translation>Nenhum dos pontos selecionados foi restringido para as respectivas curvas, eles são partes do mesmo elemento, ou ambos são geometria externa.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2707"/> <source>Select either one point and several curves, or one curve and several points. You have selected %1 curves and %2 points.</source> <translation>Selecione um ponto e várias curvas, ou uma curva e vários pontos. Você selecionou %1 curvas e %2 pontos.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2679"/> <location filename="../../CommandConstraints.cpp" line="2763"/> <location filename="../../CommandConstraints.cpp" line="3549"/> <location filename="../../CommandConstraints.cpp" line="3654"/> <location filename="../../CommandConstraints.cpp" line="3696"/> <location filename="../../CommandConstraints.cpp" line="3877"/> <location filename="../../CommandConstraints.cpp" line="4028"/> <location filename="../../CommandConstraints.cpp" line="4183"/> <location filename="../../CommandConstraints.cpp" line="4271"/> <location filename="../../CommandConstraints.cpp" line="4301"/> <location filename="../../CommandConstraints.cpp" line="4525"/> <location filename="../../CommandConstraints.cpp" line="4700"/> <location filename="../../CommandConstraints.cpp" line="5358"/> <location filename="../../CommandConstraints.cpp" line="5603"/> <location filename="../../CommandConstraints.cpp" line="5934"/> <location filename="../../CommandConstraints.cpp" line="5996"/> <location filename="../../CommandConstraints.cpp" line="6288"/> <location filename="../../CommandConstraints.cpp" line="6978"/> <source>Select an edge that is not a B-spline weight</source> <translation>Selecione uma aresta que não seja uma peso de B-spline</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2696"/> <source>None of the selected points were constrained onto the respective curves, because they are parts of the same element, because they are both external geometry, or because the edge is not eligible.</source> <translation>Nenhum dos pontos selecionados foi restrito sobre as respectivas curvas, porque elas são partes do mesmo elemento, porque são ambos geometria externa, ou porque a aresta não é elegível.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2851"/> <location filename="../../CommandConstraints.cpp" line="2959"/> <location filename="../../CommandConstraints.cpp" line="3103"/> <location filename="../../CommandConstraints.cpp" line="3208"/> <source>Select exactly one line or up to two points from the sketch.</source> <translation>Selecione exatamente uma linha ou até dois pontos no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2880"/> <source>Cannot add a horizontal length constraint on an axis!</source> <translation>Não é possível adicionar uma restrição de comprimento horizontal em um eixo!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2930"/> <source>Cannot add a fixed x-coordinate constraint on the origin point!</source> <translation>Não é possível adicionar uma restrição de coordenada-x fixa no ponto de origem!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="2988"/> <location filename="../../CommandConstraints.cpp" line="3237"/> <source>This constraint only makes sense on a line segment or a pair of points</source> <translation>Esta restrição só faz sentido num segmento reto ou num par de pontos</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3128"/> <source>Cannot add a vertical length constraint on an axis!</source> <translation>Não é possível adicionar uma restrição de comprimento vertical em um eixo!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3178"/> <source>Cannot add a fixed y-coordinate constraint on the origin point!</source> <translation>Não é possível adicionar uma restrição de coordenada-y fixa no ponto de origem!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3338"/> <source>Select two or more lines from the sketch.</source> <translation>Selecione duas ou mais linhas no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3351"/> <location filename="../../CommandConstraints.cpp" line="6418"/> <source>Select at least two lines from the sketch.</source> <translation>Selecione pelo menos duas linhas no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3365"/> <source>Select a valid line</source> <translation>Selecione uma linha válida</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3381"/> <location filename="../../CommandConstraints.cpp" line="3419"/> <source>The selected edge is not a valid line</source> <translation>A aresta selecionada não é uma linha válida</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3483"/> <source>There is a number of ways this constraint can be applied. Accepted combinations: two curves; an endpoint and a curve; two endpoints; two curves and a point.</source> <comment>perpendicular constraint</comment> <translation>Há um número de maneiras em que essa restrição pode ser aplicada. Combinações possíveis: duas curvas; um ponto de extremidade e uma curva; dois pontos de extremidade; duas curvas e um ponto.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3503"/> <source>Select some geometry from the sketch.</source> <comment>perpendicular constraint</comment> <translation>Selecione alguma geometria do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3516"/> <source>Wrong number of selected objects!</source> <comment>perpendicular constraint</comment> <translation>Número errado de objetos selecionados!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3593"/> <location filename="../../CommandConstraints.cpp" line="4226"/> <source>With 3 objects, there must be 2 curves and 1 point.</source> <comment>tangent constraint</comment> <translation>Com 3 objetos, deve haver 2 curvas e 1 ponto.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3602"/> <location filename="../../CommandConstraints.cpp" line="3639"/> <source>Cannot add a perpendicularity constraint at an unconnected point!</source> <translation>Não é possível adicionar uma restrição de perpendicularidade em um ponto não conectado!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3648"/> <location filename="../../CommandConstraints.cpp" line="3687"/> <location filename="../../CommandConstraints.cpp" line="3868"/> <source>Perpendicular to B-spline edge currently unsupported.</source> <translation>Perpendicular à aresta de Bspline ainda não está suportado.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="3678"/> <location filename="../../CommandConstraints.cpp" line="3859"/> <source>One of the selected edges should be a line.</source> <translation>Uma das arestas selecionadas deve ser uma linha.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4118"/> <source>There are a number of ways this constraint can be applied. Accepted combinations: two curves; an endpoint and a curve; two endpoints; two curves and a point.</source> <comment>tangent constraint</comment> <translation>Há uma quantidade de maneiras de aplicar esta restrição. Combinações possíveis: duas curvas; um ponto de extremidade e uma curva; dois pontos de extremidade; duas curvas e um ponto.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4137"/> <source>Select some geometry from the sketch.</source> <comment>tangent constraint</comment> <translation>Selecione alguma geometria do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4150"/> <source>Wrong number of selected objects!</source> <comment>tangent constraint</comment> <translation>Número errado de objetos selecionados!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4235"/> <location filename="../../CommandConstraints.cpp" line="4256"/> <location filename="../../CommandConstraints.cpp" line="4664"/> <source>Cannot add a tangency constraint at an unconnected point!</source> <translation>Não é possível adicionar uma restrição de tangência em um ponto não conectado!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4265"/> <location filename="../../CommandConstraints.cpp" line="4295"/> <location filename="../../CommandConstraints.cpp" line="4519"/> <source>Tangency to B-spline edge currently unsupported.</source> <translation>Tangência à aresta de Bspline ainda não está suportado.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4331"/> <source>Endpoint to endpoint tangency was applied. The coincident constraint was deleted.</source> <translation>Uma tangência de ponto a ponto foi aplicada. A restrição de coincidência foi excluída.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4805"/> <location filename="../../CommandConstraints.cpp" line="4866"/> <location filename="../../CommandConstraints.cpp" line="5316"/> <location filename="../../CommandConstraints.cpp" line="5373"/> <source>Select one or more arcs or circles from the sketch.</source> <translation>Selecione um ou mais arcos ou círculos no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4871"/> <source>Select either only one or more B-Spline poles or only one or more arcs or circles from the sketch, but not mixed.</source> <translation>Selecione somente um ou mais polos B-Spline ou apenas um ou mais arcos ou círculos do esboço, mas não misturados.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4927"/> <location filename="../../CommandConstraints.cpp" line="5422"/> <source>Constrain equal</source> <translation>Restrição igual</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="4928"/> <source>Do you want to share the same radius for all selected elements?</source> <translation>Deseja compartilhar o mesmo raio para todos os elementos selecionados?</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5118"/> <location filename="../../CommandConstraints.cpp" line="5597"/> <source>Constraint only applies to arcs or circles.</source> <translation>Restrição aplicável somente em arcos ou círculos.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5423"/> <source>Do you want to share the same diameter for all selected elements?</source> <translation>Deseja compartilhar o mesmo diâmetro para todos os elementos selecionados?</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5903"/> <location filename="../../CommandConstraints.cpp" line="6148"/> <source>Select one or two lines from the sketch. Or select two edges and a point.</source> <translation>Selecione uma ou duas linhas no esboço. Ou selecione um ponto e duas arestas.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6059"/> <location filename="../../CommandConstraints.cpp" line="6226"/> <source>Parallel lines</source> <translation>Linhas paralelas</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6060"/> <location filename="../../CommandConstraints.cpp" line="6227"/> <source>An angle constraint cannot be set for two parallel lines.</source> <translation>Uma restrição de ângulo não pode ser aplicada em duas linhas paralelas.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6092"/> <source>Cannot add an angle constraint on an axis!</source> <translation>Não é possível adicionar uma restrição de ângulo em um eixo!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6405"/> <source>Select two edges from the sketch.</source> <translation>Selecione duas arestas no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6434"/> <location filename="../../CommandConstraints.cpp" line="7134"/> <source>Select two or more compatible edges</source> <translation>Selecione duas ou mais arestas compatíveis</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6439"/> <source>Sketch axes cannot be used in equality constraints</source> <translation>Os eixos do esboço não podem ser usados em restrições de igualdade</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6458"/> <source>Equality for B-spline edge currently unsupported.</source> <translation>Igualdade para aresta de Bspline ainda não está suportada.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6488"/> <location filename="../../CommandConstraints.cpp" line="6503"/> <location filename="../../CommandConstraints.cpp" line="6555"/> <source>Select two or more edges of similar type</source> <translation>Selecione duas ou mais arestas do mesmo tipo</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6637"/> <location filename="../../CommandConstraints.cpp" line="6650"/> <location filename="../../CommandConstraints.cpp" line="6697"/> <location filename="../../CommandConstraints.cpp" line="6764"/> <location filename="../../CommandConstraints.cpp" line="6845"/> <source>Select two points and a symmetry line, two points and a symmetry point or a line and a symmetry point from the sketch.</source> <translation>Selecione dois pontos e uma linha de simetria, dois pontos e um ponto de simetria ou uma linha e um ponto de simetria no esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6675"/> <location filename="../../CommandConstraints.cpp" line="6829"/> <source>Cannot add a symmetry constraint between a line and its end points.</source> <translation>Não é possível adicionar uma restrição de simetria entre uma linha e seus pontos finais.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6727"/> <location filename="../../CommandConstraints.cpp" line="6787"/> <source>Cannot add a symmetry constraint between a line and its end points!</source> <translation>Não é possível adicionar uma restrição de simetria entre uma linha e seus pontos finais!</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6907"/> <source>Select two endpoints of lines to act as rays, and an edge representing a boundary. The first selected point corresponds to index n1, second to n2, and datum value sets the ratio n2/n1.</source> <comment>Constraint_SnellsLaw</comment> <translation>Selecione dois pontos finais de linhas para agir como raios e uma aresta que representa um limite. O primeiro ponto selecionado corresponde ao índice n1, o segundo ao n2, e o valor de datum define a proporção n2/n1.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6955"/> <source>Cannot create constraint with external geometry only.</source> <translation>Não é possível criar restrições somente com geometria externa.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6962"/> <source>Incompatible geometry is selected.</source> <translation>Geometria incompatível selecionada.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6972"/> <source>SnellsLaw on B-spline edge is currently unsupported.</source> <translation>Restrições SnellsLaw em arestas de Bspline ainda não são suportadas.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7156"/> <source>You cannot internally constrain an ellipse on another ellipse. Select only one ellipse.</source> <translation>Não é possível restringir internamente uma elipse sobre outra elipse. Selecione apenas uma elipse.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7209"/> <location filename="../../CommandConstraints.cpp" line="7387"/> <source>Currently all internal geometrical elements of the ellipse are already exposed.</source> <translation>Atualmente todos os elementos geométricos internos da elipse já estão expostos.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7557"/> <location filename="../../CommandConstraints.cpp" line="7565"/> <location filename="../../CommandConstraints.cpp" line="7597"/> <location filename="../../CommandConstraints.cpp" line="7670"/> <location filename="../../CommandConstraints.cpp" line="7681"/> <source>Select constraints from the sketch.</source> <translation>Selecione restrições do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6923"/> <source>Selected objects are not just geometry from one sketch.</source> <translation>Objetos selecionados não são apenas geometria de um esboço só.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6933"/> <source>Number of selected objects is not 3 (is %1).</source> <translation>Número de objetos selecionados não é 3 (é %1).</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7077"/> <location filename="../../CommandConstraints.cpp" line="7090"/> <source>Select at least one ellipse and one edge from the sketch.</source> <translation>Selecione pelo menos uma elipse e uma aresta do esboço.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7111"/> <source>Sketch axes cannot be used in internal alignment constraint</source> <translation>Eixos do esboço não podem ser usados para uma restrição de alinhamento interno</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7164"/> <location filename="../../CommandConstraints.cpp" line="7342"/> <source>Maximum 2 points are supported.</source> <translation>Máximo 2 pontos são suportados.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7170"/> <location filename="../../CommandConstraints.cpp" line="7348"/> <source>Maximum 2 lines are supported.</source> <translation>Máximo 2 linhas são suportadas.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7208"/> <location filename="../../CommandConstraints.cpp" line="7386"/> <source>Nothing to constrain</source> <translation>Nada para restringir</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7299"/> <location filename="../../CommandConstraints.cpp" line="7310"/> <location filename="../../CommandConstraints.cpp" line="7477"/> <location filename="../../CommandConstraints.cpp" line="7488"/> <source>Extra elements</source> <translation>Elementos extra</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7300"/> <location filename="../../CommandConstraints.cpp" line="7311"/> <location filename="../../CommandConstraints.cpp" line="7478"/> <source>More elements than possible for the given ellipse were provided. These were ignored.</source> <translation>Foram fornecidos mais elementos do que o possível para a elipse dada. Estes foram ignorados.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7326"/> <source>You cannot internally constrain an arc of ellipse on another arc of ellipse. Select only one arc of ellipse.</source> <translation>Você não pode restringir internamente um arco de elipse em outro arco de elipse. Selecione apenas um arco de elipse.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7334"/> <source>You cannot internally constrain an ellipse on an arc of ellipse. Select only one ellipse or arc of ellipse.</source> <translation>Não é possível restringir internamente uma elipse sobre um arco de elipse. Selecione apenas uma elipse ou um arco de elipse.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7489"/> <source>More elements than possible for the given arc of ellipse were provided. These were ignored.</source> <translation>Foram fornecidos mais elementos do que o possível para o arco de elipse dado. Estes foram ignorados.</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="7497"/> <source>Currently internal geometry is only supported for ellipse or arc of ellipse. The last selected element must be an ellipse or an arc of ellipse.</source> <translation>Atualmente a geometria interna só é suportada para elipses ou arcos de elipse. O último elemento selecionado deve ser uma elipse ou um arco de elipse.</translation> </message> <message> <location filename="../../CommandSketcherVirtualSpace.cpp" line="116"/> <location filename="../../CommandSketcherVirtualSpace.cpp" line="124"/> <location filename="../../CommandSketcherVirtualSpace.cpp" line="147"/> <source>Select constraint(s) from the sketch.</source> <translation>Selecione restrições do desenho.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5233"/> <location filename="../../CommandSketcherBSpline.cpp" line="682"/> <source>CAD Kernel Error</source> <translation>Erro de Kernel CAD</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="432"/> <source>None of the selected elements is an edge.</source> <translation>Nenhum dos elementos selecionados é uma aresta.</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="503"/> <location filename="../../CommandSketcherBSpline.cpp" line="581"/> <source>At least one of the selected objects was not a B-Spline and was ignored.</source> <translation>Pelo menos um dos objetos selecionados não era um B-Spline e foi ignorado.</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="618"/> <location filename="../../CommandSketcherBSpline.cpp" line="772"/> <source>Wrong OCE/OCC version</source> <translation>Versão errada do OCE/OCC</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="619"/> <location filename="../../CommandSketcherBSpline.cpp" line="773"/> <source>This version of OCE/OCC does not support knot operation. You need 6.9.0 or higher</source> <translation>Esta versão do OCE/OCC não suporta operações com nós. Você precisa da versão 6.9.0 ou superior</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="641"/> <location filename="../../CommandSketcherBSpline.cpp" line="795"/> <source>The selection comprises more than one item. Please select just one knot.</source> <translation>A seleção engloba mais de um item. Por favor, selecione apenas um nó.</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="691"/> <source>Input Error</source> <translation>Erro de entrada</translation> </message> <message> <location filename="../../CommandSketcherBSpline.cpp" line="704"/> <location filename="../../CommandSketcherBSpline.cpp" line="844"/> <source>None of the selected elements is a knot of a B-spline</source> <translation>Nenhum dos elementos selecionados é um nó de uma B-spline</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="126"/> <location filename="../../CommandSketcherTools.cpp" line="134"/> <location filename="../../CommandSketcherTools.cpp" line="236"/> <location filename="../../CommandSketcherTools.cpp" line="244"/> <source>Select at least two edges from the sketch.</source> <translation>Selecione pelo menos duas arestas do esboço.</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="166"/> <location filename="../../CommandSketcherTools.cpp" line="268"/> <source>One selected edge is not connectable</source> <translation>Uma aresta selecionada não é conectável</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="176"/> <source>Closing a shape formed by exactly two lines makes no sense.</source> <translation>Fechar uma forma formada por exatamente duas linhas não faz sentido.</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="321"/> <location filename="../../CommandSketcherTools.cpp" line="932"/> <location filename="../../CommandSketcherTools.cpp" line="1025"/> <location filename="../../CommandSketcherTools.cpp" line="1033"/> <location filename="../../CommandSketcherTools.cpp" line="1385"/> <location filename="../../CommandSketcherTools.cpp" line="1394"/> <location filename="../../CommandSketcherTools.cpp" line="1916"/> <location filename="../../CommandSketcherTools.cpp" line="1925"/> <source>Select elements from a single sketch.</source> <translation>Selecione elementos de um esboço único.</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="807"/> <source>No constraint selected</source> <translation>Nenhuma restrição selecionada</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="808"/> <source>At least one constraint must be selected</source> <translation>Pelo menos uma restrição deve ser selecionada</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1138"/> <source>A symmetric construction requires at least two geometric elements, the last geometric element being the reference for the symmetry construction.</source> <translation>Uma construção simétrica requer pelo menos dois elementos geométricos, sendo o último elemento geométrico a referência para a construção da simetria.</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1147"/> <source>The last element must be a point or a line serving as reference for the symmetry construction.</source> <translation>O último elemento deve ser um ponto ou uma linha, servindo como referência para a construção da simetria.</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1453"/> <location filename="../../CommandSketcherTools.cpp" line="1987"/> <source>A copy requires at least one selected non-external geometric element</source> <translation>Uma cópia requer pelo menos um elemento geométrico não-externo selecionado</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2048"/> <source>Delete All Geometry</source> <translation>Excluir toda a geometria</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2049"/> <source>Are you really sure you want to delete all geometry and constraints?</source> <translation>Tem certeza que deseja excluir todas as geometria e restrições?</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2108"/> <source>Delete All Constraints</source> <translation>Excluir todas as restrições</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="2109"/> <source>Are you really sure you want to delete all the constraints?</source> <translation>Tem certeza de que deseja excluir todas as restrições?</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="80"/> <source>Distance constraint</source> <translation>Restrição de distância</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="81"/> <source>Not allowed to edit the datum because the sketch contains conflicting constraints</source> <translation>Não é possível editar o dado porque o esboço contém restrições conflitantes</translation> </message> </context> <context> <name>SketcherGui::CarbonCopySelection</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6075"/> <source>Carbon copy would cause a circular dependency.</source> <translation>Cópia de carbono poderia causar uma dependência circular.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6078"/> <source>This object is in another document.</source> <translation>Este objeto está em um outro documento.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6081"/> <source>This object belongs to another body. Hold Ctrl to allow cross-references.</source> <translation>Este objeto pertence a outro corpo. Pressione a tecla Ctrl para permitir referências cruzadas.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6084"/> <source>This object belongs to another body and it contains external geometry. Cross-reference not allowed.</source> <translation>Este objeto pertence a outro corpo, e contém geometria externa. Referências cruzadas não são permitidas.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6087"/> <source>This object belongs to another part.</source> <translation>Este objeto pertence a outra peça.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6090"/> <source>The selected sketch is not parallel to this sketch. Hold Ctrl+Alt to allow non-parallel sketches.</source> <translation>O esboço selecionado não é paralelo a este esboço. Pressione Ctrl+Alt para permitir esboços não paralelos.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6093"/> <source>The XY axes of the selected sketch do not have the same direction as this sketch. Hold Ctrl+Alt to disregard it.</source> <translation>Os eixos XY do esboço selecionado não tem a mesma direção que este esboço. Pressione Ctrl + Alt para ignorar isto.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6096"/> <source>The origin of the selected sketch is not aligned with the origin of this sketch. Hold Ctrl+Alt to disregard it.</source> <translation>A origem do esboço selecionado não está alinhada com a origem deste esboço. Pressione Ctrl + Alt para ignorar isto.</translation> </message> </context> <context> <name>SketcherGui::ConstraintView</name> <message> <location filename="../../TaskSketcherConstrains.cpp" line="485"/> <source>Change value</source> <translation>Mudar o valor</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="489"/> <source>Toggle to/from reference</source> <translation>Alternar para/de referência</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="492"/> <source>Deactivate</source> <translation>Desativar</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="492"/> <source>Activate</source> <translation>Ativar</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="496"/> <source>Show constraints</source> <translation>Mostrar as restrições</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="498"/> <source>Hide constraints</source> <translation>Ocultar as restrições</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="504"/> <source>Rename</source> <translation>Renomear</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="511"/> <source>Center sketch</source> <translation>Croqui do centro</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="514"/> <source>Delete</source> <translation>Excluir</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="518"/> <source>Swap constraint names</source> <translation>Trocar nomes de restrição</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="616"/> <source>Unnamed constraint</source> <translation>Restrição sem nome</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="617"/> <source>Only the names of named constraints can be swapped.</source> <translation>Apenas os nomes das restrições nomeadas podem ser trocados.</translation> </message> </context> <context> <name>SketcherGui::EditDatumDialog</name> <message> <location filename="../../EditDatumDialog.cpp" line="96"/> <source>Insert angle</source> <translation>Insira o ângulo</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="98"/> <source>Angle:</source> <translation>Ângulo:</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="102"/> <source>Insert radius</source> <translation>Insira o raio</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5015"/> <location filename="../../CommandConstraints.cpp" line="5173"/> <location filename="../../EditDatumDialog.cpp" line="104"/> <source>Radius:</source> <translation>Raio:</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="108"/> <source>Insert diameter</source> <translation>Inserir diâmetro</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5494"/> <location filename="../../CommandConstraints.cpp" line="5641"/> <location filename="../../EditDatumDialog.cpp" line="110"/> <source>Diameter:</source> <translation>Diâmetro:</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="114"/> <source>Insert weight</source> <translation>Inserir peso</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="119"/> <source>Refractive index ratio</source> <comment>Constraint_SnellsLaw</comment> <translation>Relação de índice de refração</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="120"/> <source>Ratio n2/n1:</source> <comment>Constraint_SnellsLaw</comment> <translation>Relação n2/n1:</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="125"/> <source>Insert length</source> <translation>Insira o comprimento</translation> </message> <message> <location filename="../../EditDatumDialog.cpp" line="127"/> <source>Length:</source> <translation>Comprimento:</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5009"/> <location filename="../../CommandConstraints.cpp" line="5167"/> <source>Change weight</source> <translation>Alterar peso</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5010"/> <location filename="../../CommandConstraints.cpp" line="5168"/> <location filename="../../EditDatumDialog.cpp" line="115"/> <source>Weight:</source> <translation>Peso:</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5014"/> <location filename="../../CommandConstraints.cpp" line="5172"/> <source>Change radius</source> <translation>Mudar raio</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="5493"/> <location filename="../../CommandConstraints.cpp" line="5640"/> <source>Change diameter</source> <translation>Alterar o diâmetro</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6989"/> <source>Refractive index ratio</source> <translation>Relação de índice de refração</translation> </message> <message> <location filename="../../CommandConstraints.cpp" line="6990"/> <source>Ratio n2/n1:</source> <translation>Relação n2/n1:</translation> </message> </context> <context> <name>SketcherGui::ElementView</name> <message> <location filename="../../TaskSketcherElements.cpp" line="184"/> <source>Delete</source> <translation>Excluir</translation> </message> </context> <context> <name>SketcherGui::ExternalSelection</name> <message> <location filename="../../CommandCreateGeo.cpp" line="5884"/> <source>Linking this will cause circular dependency.</source> <translation>Esta ligação irá causar dependência circular.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5887"/> <source>This object is in another document.</source> <translation>Este objeto está em um outro documento.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5890"/> <source>This object belongs to another body, can't link.</source> <translation>Este objeto pertence a outro corpo, não é possível vincular.</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="5893"/> <source>This object belongs to another part, can't link.</source> <translation>Este objeto pertence a outra parte, não é possível vincular.</translation> </message> </context> <context> <name>SketcherGui::InsertDatum</name> <message> <location filename="../../InsertDatum.ui" line="23"/> <source>Insert datum</source> <translation>Inserir datum</translation> </message> <message> <location filename="../../InsertDatum.ui" line="31"/> <source>datum:</source> <translation>datum:</translation> </message> <message> <location filename="../../InsertDatum.ui" line="48"/> <source>Name (optional)</source> <translation>Nome (opcional)</translation> </message> <message> <location filename="../../InsertDatum.ui" line="61"/> <source>Constraint name (available for expressions)</source> <translation>Nome da restrição (disponível para expressões)</translation> </message> <message> <location filename="../../InsertDatum.ui" line="76"/> <source>Reference (or constraint) dimension</source> <translation>Dimensão de referência (ou restrição)</translation> </message> <message> <location filename="../../InsertDatum.ui" line="79"/> <source>Reference</source> <translation>Referência</translation> </message> </context> <context> <name>SketcherGui::PropertyConstraintListItem</name> <message> <location filename="../../PropertyConstraintListItem.cpp" line="131"/> <location filename="../../PropertyConstraintListItem.cpp" line="184"/> <source>Unnamed</source> <translation>Sem nome</translation> </message> </context> <context> <name>SketcherGui::SketchMirrorDialog</name> <message> <location filename="../../SketchMirrorDialog.ui" line="14"/> <location filename="../../SketchMirrorDialog.ui" line="20"/> <source>Select Mirror Axis/Point</source> <translation>Selecione o eixo/ponto de simetria</translation> </message> <message> <location filename="../../SketchMirrorDialog.ui" line="26"/> <source>X-Axis</source> <translation>Eixo-X</translation> </message> <message> <location filename="../../SketchMirrorDialog.ui" line="36"/> <source>Y-Axis</source> <translation>Eixo-Y</translation> </message> <message> <location filename="../../SketchMirrorDialog.ui" line="43"/> <source>Origin</source> <translation>Origem</translation> </message> </context> <context> <name>SketcherGui::SketchOrientationDialog</name> <message> <location filename="../../SketchOrientationDialog.ui" line="14"/> <source>Choose orientation</source> <translation>Escolher a orientação</translation> </message> <message> <location filename="../../SketchOrientationDialog.ui" line="20"/> <source>Sketch orientation</source> <translation>Orientação do esboço</translation> </message> <message> <location filename="../../SketchOrientationDialog.ui" line="26"/> <source>XY-Plane</source> <translation>Plano XY</translation> </message> <message> <location filename="../../SketchOrientationDialog.ui" line="36"/> <source>XZ-Plane</source> <translation>Plano XZ</translation> </message> <message> <location filename="../../SketchOrientationDialog.ui" line="43"/> <source>YZ-Plane</source> <translation>Plano YZ</translation> </message> <message> <location filename="../../SketchOrientationDialog.ui" line="72"/> <source>Reverse direction</source> <translation>Inverter direção</translation> </message> <message> <location filename="../../SketchOrientationDialog.ui" line="81"/> <source>Offset:</source> <translation>Offset:</translation> </message> </context> <context> <name>SketcherGui::SketchRectangularArrayDialog</name> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="17"/> <source>Create array</source> <translation>Criar matriz</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="25"/> <source>Columns:</source> <translation>Colunas:</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="32"/> <source>Number of columns of the linear array</source> <translation>Número de colunas da matriz linear</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="52"/> <source>Rows:</source> <translation>Linhas:</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="59"/> <source>Number of rows of the linear array</source> <translation>Número de linhas da matriz linear</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="77"/> <source>Makes the inter-row and inter-col spacing the same if clicked</source> <translation>Faz o espaço entre linhas e entre colunas iguais se ativado</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="80"/> <source>Equal vertical/horizontal spacing</source> <translation>Igualar espaçamento vertical/horizontal</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="93"/> <source>If selected, each element in the array is constrained with respect to the others using construction lines</source> <translation>Se selecionado, cada elemento na rede é restrito em relação aos outros usando linhas de construção</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="116"/> <source>If selected, it substitutes dimensional constraints by geometric constraints in the copies, so that a change in the original element is directly reflected on copies</source> <translation>Se selecionado, substitui restrições de dimensão por restrições geométricas nas cópias, para que uma alteração no elemento original seja refletida diretamente nas cópias</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="100"/> <source>Constrain inter-element separation</source> <translation>Restringir a separação entre elementos</translation> </message> <message> <location filename="../../SketchRectangularArrayDialog.ui" line="121"/> <source>Clone</source> <translation>Clonar</translation> </message> </context> <context> <name>SketcherGui::SketcherGeneralWidget</name> <message> <location filename="../../TaskSketcherGeneral.cpp" line="132"/> <location filename="../../TaskSketcherGeneral.cpp" line="137"/> <location filename="../../TaskSketcherGeneral.cpp" line="142"/> <source>Normal Geometry</source> <translation>Geometria normal</translation> </message> <message> <location filename="../../TaskSketcherGeneral.cpp" line="132"/> <location filename="../../TaskSketcherGeneral.cpp" line="137"/> <location filename="../../TaskSketcherGeneral.cpp" line="142"/> <source>Construction Geometry</source> <translation>Geometria de construção</translation> </message> <message> <location filename="../../TaskSketcherGeneral.cpp" line="132"/> <location filename="../../TaskSketcherGeneral.cpp" line="137"/> <location filename="../../TaskSketcherGeneral.cpp" line="142"/> <source>External Geometry</source> <translation>Geometria externa</translation> </message> </context> <context> <name>SketcherGui::SketcherRegularPolygonDialog</name> <message> <location filename="../../SketcherRegularPolygonDialog.ui" line="17"/> <source>Create array</source> <translation>Criar matriz</translation> </message> <message> <location filename="../../SketcherRegularPolygonDialog.ui" line="25"/> <source>Number of Sides:</source> <translation>Número de lados:</translation> </message> <message> <location filename="../../SketcherRegularPolygonDialog.ui" line="32"/> <source>Number of columns of the linear array</source> <translation>Número de colunas da matriz linear</translation> </message> </context> <context> <name>SketcherGui::SketcherSettings</name> <message> <location filename="../../SketcherSettings.ui" line="14"/> <location filename="../../SketcherSettings.ui" line="109"/> <source>General</source> <translation>Geral</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="27"/> <source>Sketcher solver</source> <translation>Calculador do esboço</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="33"/> <source>Sketcher dialog will have additional section 'Advanced solver control' to adjust solver settings</source> <translation>A caixa de diálogo do esboço terá uma seção adicional 'Controle avançado do calculador' para ajustar as configurações de resolução</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="37"/> <source>Show section 'Advanced solver control' in task dialog</source> <translation>Mostrar seção 'Controle avançado do calculador' no painel de tarefas</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="53"/> <source>Dragging performance</source> <translation>Performance durante a manipulação</translation> </message><|fim▁hole|>Requires to re-enter edit mode to take effect.</source> <translation>Um algoritmo especial do calculador será usado quando arrastar elementos de esboço. Necessita sair e reentrar no modo de edição para ter efeito.</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="63"/> <source>Improve solving while dragging</source> <translation>Melhorar resolução ao arrastar</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="115"/> <source>New constraints that would be redundant will automatically be removed</source> <translation>Novas restrições que seriam redundantes serão automaticamente removidas</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="118"/> <source>Auto remove redundants</source> <translation>Remover redundâncias automaticamente</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="134"/> <source>Allow to leave sketch edit mode when pressing Esc button</source> <translation>Permitir a saída do modo de edição do esboço pressionando o botão Esc</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="137"/> <source>Esc can leave sketch edit mode</source> <translation>Esc para sair do modo de edição</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="153"/> <source>Notifies about automatic constraint substitutions</source> <translation>Notificar substituições automáticas de restrições</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="156"/> <source>Notify automatic constraint substitutions</source> <translation>Notificar substituições automáticas de restrições</translation> </message> <message> <location filename="../../SketcherSettings.ui" line="20"/> <source>Sketcher</source> <translation>Esboço</translation> </message> </context> <context> <name>SketcherGui::SketcherSettingsColors</name> <message> <location filename="../../SketcherSettingsColors.ui" line="14"/> <source>Colors</source> <translation>Cores</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="20"/> <source>Sketcher colors</source> <translation>Cores de esboço</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="28"/> <source>Default edge color</source> <translation>Cor padrão da aresta</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="55"/> <source>Default vertex color</source> <translation>Cor padrão dos vértices</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="82"/> <source>Making line color</source> <translation>Cor de linha sendo desenhada</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="109"/> <source>Edit edge color</source> <translation>Editar cor da aresta</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="136"/> <source>Edit vertex color</source> <translation>Editar a cor do vértice</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="163"/> <source>Construction geometry</source> <translation>Geometria de construção</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="217"/> <source>External geometry</source> <translation>Geometria externa</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="271"/> <source>Fully constrained geometry</source> <translation>Geometria totalmente restrita</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="406"/> <source>Constraint color</source> <translation>Cor das restrições</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="460"/> <source>Expression dependent constraint color</source> <translation>Cor das restrições que dependem de expressões</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="35"/> <source>Color of edges</source> <translation>Cor das bordas</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="62"/> <source>Color of vertices</source> <translation>Cor dos vértices</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="89"/> <source>Color used while new sketch elements are created</source> <translation>Cor usada enquanto novos elementos de esboço são criados</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="116"/> <source>Color of edges being edited</source> <translation>Cor das arestas sendo editadas</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="143"/> <source>Color of vertices being edited</source> <translation>Cor dos vértices sendo editados</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="170"/> <source>Color of construction geometry in edit mode</source> <translation>Cor da geometria de construção no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="190"/> <source>Internal alignment edge color</source> <translation>Cor de alinhamento interno de arestas</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="197"/> <source>Color of edges of internal alignment geometry</source> <translation>Cor das arestas da geometria de alinhamento interno</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="224"/> <source>Color of external geometry in edit mode</source> <translation>Cor da geometria externa no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="244"/> <source>Invalid Sketch</source> <translation>Esboço inválido</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="251"/> <source>Color of geometry indicating an invalid sketch</source> <translation>Cor da geometria indicando um esboço inválido</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="278"/> <source>Color of fully constrained geometry in edit mode</source> <translation>Cor de geometria totalmente restrita no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="298"/> <source>Fully constrained edit edge color</source> <translation>Cor das arestas totalmente restritas sendo editadas</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="305"/> <source>Color of fully constrained edge color in edit mode</source> <translation>Cor das arestas totalmente restritas no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="325"/> <source>Fully constrained edit construction edge color</source> <translation>Cor das arestas de construção totalmente restritas sendo editadas</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="332"/> <source>Color of fully constrained construction edge color in edit mode</source> <translation>Cor das arestas de construção completamente restritas no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="352"/> <source>Fully constrained edit internal alignment edge color</source> <translation>Cor das arestas de alinhamento interno totalmente restritas sendo editadas</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="359"/> <source>Color of fully constrained internal alignment edge color in edit mode</source> <translation>Cor das arestas de alinhamento interno completamente restritas no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="379"/> <source>Fully constrained edit vertex color</source> <translation>Cor dos vértices totalmente restritos sendo editados</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="386"/> <source>Color of fully constrained vertex color in edit mode</source> <translation>Cor dos vértices totalmente restritos no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="413"/> <source>Color of driving constraints in edit mode</source> <translation>Cor das restrições ativas no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="433"/> <source>Reference constraint color</source> <translation>Cor das restrições de referência</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="440"/> <source>Color of reference constraints in edit mode</source> <translation>Cor das restrições de referência no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="467"/> <source>Color of expression dependent constraints in edit mode</source> <translation>Cor das restrições dependentes da expressão no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="487"/> <source>Deactivated constraint color</source> <translation>Cor das restrições desativadas</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="494"/> <source>Color of deactivated constraints in edit mode</source> <translation>Cor das restrições desativadas no modo de edição</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="514"/> <source>Dimensional constraint color</source> <translation>Cor das restrições dimensionais</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="521"/> <source>Color of dimensional driving constraints</source> <translation>Cor das restrições dimensional atuantes</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="541"/> <source>Coordinate text color</source> <translation>Cor do texto das coordenadas</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="548"/> <source>Text color of the coordinates</source> <translation>Cor do texto das coordenadas</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="575"/> <source>Color of crosshair cursor. (The one you get when creating a new sketch element.)</source> <translation>Cor do cursor cruzado do mouse. (Que é usado ao criar um novo elemento de esboço.)</translation> </message> <message> <location filename="../../SketcherSettingsColors.ui" line="568"/> <source>Cursor crosshair color</source> <translation>Cor do ponteiro do mouse</translation> </message> </context> <context> <name>SketcherGui::SketcherSettingsDisplay</name> <message> <location filename="../../SketcherSettingsDisplay.ui" line="14"/> <source>Display</source> <translation>Tela</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="33"/> <source>Sketch editing</source> <translation>Edição de esboço</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="39"/> <source>A dialog will pop up to input a value for new dimensional constraints</source> <translation>Um diálogo irá aparecer para inserir um valor para novas restrições de dimensão</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="42"/> <source>Ask for value after creating a dimensional constraint</source> <translation>Pedir o valor depois de criar uma restrição de dimensão</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="58"/> <source>Segments per geometry</source> <translation>Segmentos por geometria</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="65"/> <source>Current constraint creation tool will remain active after creation</source> <translation>A ferramenta de criação de restrições atual permanecerá ativa após a criação</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="68"/> <source>Constraint creation "Continue Mode"</source> <translation>Modo continuo de criação de restrições</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="84"/> <source>Line pattern used for grid lines</source> <translation>Padrão de linha usado para linhas de grade</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="94"/> <source>Base length units will not be displayed in constraints. Supports all unit systems except 'US customary' and 'Building US/Euro'.</source> <translation>Unidades de comprimento não serão exibidas em restrições. Todos os sistemas de unidades são suportados, exceto 'US customy' e 'Building US/Euro'.</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="98"/> <source>Hide base length units for supported unit systems</source> <translation>Ocultar unidades de comprimento base para sistemas de unidades suportados</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="117"/> <source>Font size</source> <translation>Tamanho da fonte</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="151"/> <source>Visibility automation</source> <translation>Automação de visibilidade</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="157"/> <source>When opening sketch, hide all features that depend on it</source> <translation>Ao abrir um esboço, ocultar todos os objetos que dependem dele</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="160"/> <source>Hide all objects that depend on the sketch</source> <translation>Esconder todos os objetos que dependem o esboço</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="176"/> <source>When opening sketch, show sources for external geometry links</source> <translation>Ao abrir um esboço, mostrar fontes de vínculos de geometria externa</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="179"/> <source>Show objects used for external geometry</source> <translation>Mostrar objetos usados para geometria externa</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="195"/> <source>When opening sketch, show objects the sketch is attached to</source> <translation>Ao abrir um esboço, mostrar os objetos aos quais o esboço está anexado</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="198"/> <source>Show objects that the sketch is attached to</source> <translation>Mostrar objetos aos quais o esboço está anexado</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="239"/> <source>Note: these settings are defaults applied to new sketches. The behavior is remembered for each sketch individually as properties on the View tab.</source> <translation>Nota: estas configurações são padrões aplicados aos novos esboços. Estas configurações são lembradas para cada esboço individualmente, e guardadas como propriedades de vista.</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="347"/> <source>View scale ratio</source> <translation>Proporção de escala de vista</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="354"/> <source>The 3D view is scaled based on this factor</source> <translation>A vista 3D é dimensionada com base neste fator</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="214"/> <source>When closing sketch, move camera back to where it was before sketch was opened</source> <translation>Ao fechar o esboço, a câmera será colocada de volta onde estava antes do esboço ser aberto</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="217"/> <source>Restore camera position after editing</source> <translation>Restaurar a posição da câmara após a edição</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="258"/> <source>Applies current visibility automation settings to all sketches in open documents</source> <translation>Aplica as configurações atuais de automação de vista a todos os esboços em documentos abertos</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="261"/> <source>Apply to existing sketches</source> <translation>Aplicar aos esboços existentes</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="271"/> <source>Font size used for labels and constraints</source> <translation>Tamanho de texto usado para rótulos e restrições</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="274"/> <source>px</source> <translation>px</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="296"/> <source>Current sketcher creation tool will remain active after creation</source> <translation>A ferramenta atual de criação de esboço permanecerá ativa após a criação</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="299"/> <source>Geometry creation "Continue Mode"</source> <translation>Modo continuo de criação da geometria</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="315"/> <source>Grid line pattern</source> <translation>Padrão das linhas da grade</translation> </message> <message> <location filename="../../SketcherSettingsDisplay.ui" line="322"/> <source>Number of polygons for geometry approximation</source> <translation>Número de polígonos para aproximação da geometria</translation> </message> <message> <location filename="../../SketcherSettings.cpp" line="219"/> <source>Unexpected C++ exception</source> <translation>Exceção inesperada de C++</translation> </message> <message> <location filename="../../SketcherSettings.cpp" line="222"/> <source>Sketcher</source> <translation>Esboço</translation> </message> </context> <context> <name>SketcherGui::SketcherValidation</name> <message> <location filename="../../TaskSketcherValidation.cpp" line="139"/> <source>No missing coincidences</source> <translation>Nenhum coincidência faltante</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="140"/> <source>No missing coincidences found</source> <translation>Nenhuma coincidência faltante foi encontrada</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="145"/> <source>Missing coincidences</source> <translation>Coincidências faltantes</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="146"/> <source>%1 missing coincidences found</source> <translation>%1 coincidências faltantes encontradas</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="182"/> <source>No invalid constraints</source> <translation>Nenhuma restrição inválida</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="183"/> <source>No invalid constraints found</source> <translation>Nenhuma restrição inválida encontrada</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="187"/> <source>Invalid constraints</source> <translation>Restrições inválidas</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="188"/> <source>Invalid constraints found</source> <translation>Restrições inválidas encontradas</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="219"/> <location filename="../../TaskSketcherValidation.cpp" line="230"/> <location filename="../../TaskSketcherValidation.cpp" line="237"/> <location filename="../../TaskSketcherValidation.cpp" line="248"/> <source>Reversed external geometry</source> <translation>Geometria externa invertida</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="220"/> <source>%1 reversed external-geometry arcs were found. Their endpoints are encircled in 3d view. %2 constraints are linking to the endpoints. The constraints have been listed in Report view (menu View -&gt; Panels -&gt; Report view). Click "Swap endpoints in constraints" button to reassign endpoints. Do this only once to sketches created in FreeCAD older than v0.15</source> <translation>%1 arcos invertidos foram encontrados na geometria externa. Seus pontos de extremidade são destacados na vista 3d. %2 restrições estão ligadas aos pontos de extremidade. Essas restrições estão indicadas no relatório (menu Vista -&gt; Vistas -&gt; Relatório). Clique em "Trocar pontos de extremidade em restrições" para reatribuir os pontos de extremidade. Faça isto apenas uma vez em esboços criados com versões de FreeCAD anteriores à 0.15</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="231"/> <source>%1 reversed external-geometry arcs were found. Their endpoints are encircled in 3d view. However, no constraints linking to the endpoints were found.</source> <translation>%1 arcos invertidos foram encontrados na geometria externa. Seus pontos de extremidade são destacados na vista 3D. No entanto, nenhuma restrição foi encontrada nos pontos de extremidade.</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="238"/> <source>No reversed external-geometry arcs were found.</source> <translation>Nenhum arco invertido foi encontrado na geometria externa.</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="249"/> <source>%1 changes were made to constraints linking to endpoints of reversed arcs.</source> <translation>%1 alterações foram feitas nas restrições ligadas a pontos de extremidade de arcos invertidos.</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="262"/> <location filename="../../TaskSketcherValidation.cpp" line="276"/> <source>Constraint orientation locking</source> <translation>Restrição de bloqueio de orientação</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="263"/> <source>Orientation locking was enabled and recomputed for %1 constraints. The constraints have been listed in Report view (menu View -&gt; Panels -&gt; Report view).</source> <translation>O bloqueio de orientação foi habilitado e recalculado para %1 restrições. Essas restrições estão indicadas no relatório (menu Vista -&gt; Painéis -&gt; Relatório).</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="277"/> <source>Orientation locking was disabled for %1 constraints. The constraints have been listed in Report view (menu View -&gt; Panels -&gt; Report view). Note that for all future constraints, the locking still defaults to ON.</source> <translation>O bloqueio de orientação foi desativado para %1 restrições. Essas restrições estão indicadas no relatório (menu Vista -&gt; Painéis -&gt; Relatório). Note que para todas as futuras restrições, o bloqueio permanece ativado por padrão.</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="289"/> <location filename="../../TaskSketcherValidation.cpp" line="301"/> <source>Delete constraints to external geom.</source> <translation>Excluir restrições para geometria externa</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="290"/> <source>You are about to delete ALL constraints that deal with external geometry. This is useful to rescue a sketch with broken/changed links to external geometry. Are you sure you want to delete the constraints?</source> <translation>Você está prestes a excluir todas as restrições conectadas com geometria externa. Isso é útil para resgatar um esboço com links para geometria externa quebrados ou alterados. Tem certeza que deseja excluir essas restrições?</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="302"/> <source>All constraints that deal with external geometry were deleted.</source> <translation>Todas as restrições conectadas com geometria externa foram eliminadas.</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="359"/> <source>No degenerated geometry</source> <translation>Nenhuma geometria corrompida</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="360"/> <source>No degenerated geometry found</source> <translation>Nenhuma geometria corrompida encontrada</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="364"/> <source>Degenerated geometry</source> <translation>Geometria corrompida</translation> </message> <message> <location filename="../../TaskSketcherValidation.cpp" line="365"/> <source>%1 degenerated geometry found</source> <translation>%1 geometrias corrompidas encontradas</translation> </message> </context> <context> <name>SketcherGui::TaskSketcherConstrains</name> <message> <location filename="../../TaskSketcherConstrains.ui" line="26"/> <source>Form</source> <translation>Formulário</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="34"/> <source>Filter:</source> <translation>Filtro:</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="45"/> <source>All</source> <translation>Todos</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="50"/> <source>Normal</source> <translation>Normal</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="55"/> <source>Datums</source> <translation>Datums</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="60"/> <source>Named</source> <translation>Nomeado</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="65"/> <source>Reference</source> <translation>Referência</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="75"/> <source>Internal alignments will be hidden</source> <translation>Os alinhamentos internos serão ocultos</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="78"/> <source>Hide internal alignment</source> <translation>Ocultar alinhamento interno</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="94"/> <source>Extended information will be added to the list</source> <translation>Informações adicionais serão acrescidas à lista</translation> </message> <message> <location filename="../../TaskSketcherConstrains.ui" line="97"/> <source>Extended information</source> <translation>Informação adicional</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="638"/> <source>Constraints</source> <translation>Restrições</translation> </message> <message> <location filename="../../TaskSketcherConstrains.cpp" line="856"/> <location filename="../../TaskSketcherConstrains.cpp" line="872"/> <source>Error</source> <translation>Erro</translation> </message> </context> <context> <name>SketcherGui::TaskSketcherElements</name> <message> <location filename="../../TaskSketcherElements.ui" line="14"/> <source>Form</source> <translation>Formulário</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="22"/> <source>Type:</source> <translation>Tipo:</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="42"/> <source>Edge</source> <translation>Aresta</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="47"/> <source>Starting Point</source> <translation>Ponto de partida</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="52"/> <source>End Point</source> <translation>Ponto final</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="57"/> <source>Center Point</source> <translation>Ponto central</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="69"/> <source>Mode:</source> <translation>Modo:</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="89"/> <source>All</source> <translation>Todos</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="94"/> <source>Normal</source> <translation>Normal</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="104"/> <source>External</source> <translation>Externo</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="133"/> <source>Extended naming containing info about element mode</source> <translation>Nomeação estendida, com informações sobre o modo do elemento</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="136"/> <source>Extended naming</source> <translation>Nomeação estendida</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="146"/> <source>Only the type 'Edge' will be available for the list</source> <translation>Somente o tipo 'Aresta' estará disponível para a lista</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="149"/> <source>Auto-switch to Edge</source> <translation>Auto-seleção de aresta</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="257"/> <source>Elements</source> <translation>Elementos</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="278"/> <source>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;&amp;quot;%1&amp;quot;: multiple selection&lt;/p&gt;&lt;p&gt;&amp;quot;%2&amp;quot;: switch to next valid type&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</source> <translation>&lt;html&gt;&lt;head/&gt;&lt;body&gt;&lt;p&gt;&amp;quot;%1&amp;quot;: seleção múltipla&lt;/p&gt;&lt;p&gt;&amp;quot;%2&amp;quot;: mudar para o próximo tipo válido&lt;/p&gt;&lt;/body&gt;&lt;/html&gt;</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="747"/> <location filename="../../TaskSketcherElements.cpp" line="748"/> <location filename="../../TaskSketcherElements.cpp" line="853"/> <location filename="../../TaskSketcherElements.cpp" line="854"/> <source>Point</source> <translation>Ponto</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="750"/> <location filename="../../TaskSketcherElements.cpp" line="752"/> <location filename="../../TaskSketcherElements.cpp" line="856"/> <location filename="../../TaskSketcherElements.cpp" line="857"/> <source>Line</source> <translation>Linha</translation> </message> <message> <location filename="../../TaskSketcherElements.ui" line="99"/> <location filename="../../TaskSketcherElements.cpp" line="751"/> <location filename="../../TaskSketcherElements.cpp" line="755"/> <location filename="../../TaskSketcherElements.cpp" line="759"/> <location filename="../../TaskSketcherElements.cpp" line="763"/> <location filename="../../TaskSketcherElements.cpp" line="767"/> <location filename="../../TaskSketcherElements.cpp" line="771"/> <location filename="../../TaskSketcherElements.cpp" line="775"/> <location filename="../../TaskSketcherElements.cpp" line="779"/> <location filename="../../TaskSketcherElements.cpp" line="783"/> <source>Construction</source> <translation>Construção</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="754"/> <location filename="../../TaskSketcherElements.cpp" line="756"/> <location filename="../../TaskSketcherElements.cpp" line="859"/> <location filename="../../TaskSketcherElements.cpp" line="860"/> <source>Arc</source> <translation>Arco</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="758"/> <location filename="../../TaskSketcherElements.cpp" line="760"/> <location filename="../../TaskSketcherElements.cpp" line="862"/> <location filename="../../TaskSketcherElements.cpp" line="863"/> <source>Circle</source> <translation>Círculo</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="762"/> <location filename="../../TaskSketcherElements.cpp" line="764"/> <location filename="../../TaskSketcherElements.cpp" line="865"/> <location filename="../../TaskSketcherElements.cpp" line="866"/> <source>Ellipse</source> <translation>Elipse</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="766"/> <location filename="../../TaskSketcherElements.cpp" line="768"/> <location filename="../../TaskSketcherElements.cpp" line="868"/> <location filename="../../TaskSketcherElements.cpp" line="869"/> <source>Elliptical Arc</source> <translation>Arco elíptico</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="770"/> <location filename="../../TaskSketcherElements.cpp" line="772"/> <location filename="../../TaskSketcherElements.cpp" line="871"/> <location filename="../../TaskSketcherElements.cpp" line="872"/> <source>Hyperbolic Arc</source> <translation>Arco hiperbólico</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="774"/> <location filename="../../TaskSketcherElements.cpp" line="776"/> <location filename="../../TaskSketcherElements.cpp" line="874"/> <location filename="../../TaskSketcherElements.cpp" line="875"/> <source>Parabolic Arc</source> <translation>Arco parabólico</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="778"/> <location filename="../../TaskSketcherElements.cpp" line="780"/> <location filename="../../TaskSketcherElements.cpp" line="877"/> <location filename="../../TaskSketcherElements.cpp" line="878"/> <source>BSpline</source> <translation>BSpline</translation> </message> <message> <location filename="../../TaskSketcherElements.cpp" line="782"/> <location filename="../../TaskSketcherElements.cpp" line="784"/> <location filename="../../TaskSketcherElements.cpp" line="880"/> <location filename="../../TaskSketcherElements.cpp" line="881"/> <source>Other</source> <translation>Outro</translation> </message> </context> <context> <name>SketcherGui::TaskSketcherGeneral</name> <message> <location filename="../../TaskSketcherGeneral.ui" line="14"/> <source>Form</source> <translation>Formulário</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="23"/> <source>A grid will be shown</source> <translation>Uma grade será mostrada</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="26"/> <source>Show grid</source> <translation>Mostrar grade</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="44"/> <source>Grid size:</source> <translation>Tamanho da grade:</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="51"/> <source>Distance between two subsequent grid lines</source> <translation>Distância entre duas linhas da grade</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="87"/> <source>New points will snap to the nearest grid line. Points must be set closer than a fifth of the grid size to a grid line to snap.</source> <translation>Novos pontos serão colocados no nó mais próximo da grade. Os pontos devem ser mais próximos de um quinto do tamanho de grade para serem colocados em um nó da grade.</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="91"/> <source>Grid snap</source> <translation>Alinhar à grade</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="107"/> <source>Sketcher proposes automatically sensible constraints.</source> <translation>O esboço propõe automaticamente restrições adequadas.</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="110"/> <source>Auto constraints</source> <translation>Restrições automáticas</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="126"/> <source>Sketcher tries not to propose redundant auto constraints</source> <translation>O esboço tenta não propor restrições redundantes automáticamente</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="129"/> <source>Avoid redundant auto constraints</source> <translation>Evitar auto-restrições redundantes</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="145"/> <source>Rendering order (global):</source> <translation>Ordem de renderização (global):</translation> </message> <message> <location filename="../../TaskSketcherGeneral.ui" line="164"/> <source>To change, drag and drop a geometry type to top or bottom</source> <translation>Para alterar, arraste e solte um tipo de geometria para cima ou para baixo</translation> </message> <message> <location filename="../../TaskSketcherGeneral.cpp" line="195"/> <source>Edit controls</source> <translation>Controles de edição</translation> </message> </context> <context> <name>SketcherGui::TaskSketcherMessages</name> <message> <location filename="../../TaskSketcherMessages.cpp" line="51"/> <source>Solver messages</source> <translation>Mensagens</translation> </message> </context> <context> <name>SketcherGui::TaskSketcherSolverAdvanced</name> <message> <location filename="../../TaskSketcherSolverAdvanced.cpp" line="64"/> <source>Advanced solver control</source> <translation>Controle avançado do Solver</translation> </message> </context> <context> <name>SketcherGui::TaskSketcherValidation</name> <message> <location filename="../../TaskSketcherValidation.ui" line="14"/> <source>Sketcher validation</source> <translation>Validação de esboço</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="70"/> <source>Invalid constraints</source> <translation>Restrições inválidas</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="53"/> <location filename="../../TaskSketcherValidation.ui" line="83"/> <location filename="../../TaskSketcherValidation.ui" line="113"/> <source>Fix</source> <translation>Consertar</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="46"/> <location filename="../../TaskSketcherValidation.ui" line="76"/> <location filename="../../TaskSketcherValidation.ui" line="106"/> <location filename="../../TaskSketcherValidation.ui" line="129"/> <source>Find</source> <translation>Procurar</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="90"/> <source>Delete constraints to external geom.</source> <translation>Excluir restrições para geometria externa</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="20"/> <source>Missing coincidences</source> <translation>Coincidências faltantes</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="26"/> <source>Tolerance:</source> <translation>Tolerância:</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="60"/> <source>Highlight open vertexes</source> <translation>Destacar os vértices abertos</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="36"/> <source>Ignore construction geometry</source> <translation>Ignorar a geometria de construção</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="100"/> <source>Degenerated geometry</source> <translation>Geometria corrompida</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="123"/> <source>Reversed external geometry</source> <translation>Geometria externa invertida</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="136"/> <source>Swap endpoints in constraints</source> <translation>Trocar pontos de extremidade em restrições</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="146"/> <source>Constraint orientation locking</source> <translation>Restrição de bloqueio de orientação</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="152"/> <source>Enable/Update</source> <translation>Ativar/Atualizar</translation> </message> <message> <location filename="../../TaskSketcherValidation.ui" line="159"/> <source>Disable</source> <translation>Desativar</translation> </message> </context> <context> <name>SketcherGui::ViewProviderSketch</name> <message> <location filename="../../ViewProviderSketch.cpp" line="6162"/> <source>Edit sketch</source> <translation>Editar esboço</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6176"/> <source>A dialog is already open in the task panel</source> <translation>Uma caixa de diálogo já está aberta no painel de tarefas</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6177"/> <source>Do you want to close this dialog?</source> <translation>Deseja fechar este diálogo?</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6191"/> <source>Invalid sketch</source> <translation>Esboço inválido</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6192"/> <source>Do you want to open the sketch validation tool?</source> <translation>Você quer abrir a ferramenta de validação de esboço?</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6193"/> <source>The sketch is invalid and cannot be edited.</source> <translation>O esboço é inválido e não pode ser editado.</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6407"/> <source>Please remove the following constraint:</source> <translation>Por favor, remova a seguinte restrição:</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6408"/> <source>Please remove at least one of the following constraints:</source> <translation>Por favor remova pelo menos uma das seguintes restrições:</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6414"/> <source>Please remove the following redundant constraint:</source> <translation>Por favor, remova a seguinte restrição redundante:</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6415"/> <source>Please remove the following redundant constraints:</source> <translation>Por favor, remova as seguintes restrições redundantes:</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6421"/> <source>The following constraint is partially redundant:</source> <translation>A restrição seguinte é parcialmente redundante:</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6422"/> <source>The following constraints are partially redundant:</source> <translation>As restrições seguintes são parcialmente redundantes:</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6428"/> <source>Please remove the following malformed constraint:</source> <translation>Por favor remova as seguintes restrições malformadas:</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6429"/> <source>Please remove the following malformed constraints:</source> <translation>Por favor remova as seguintes restrições malformadas:</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6464"/> <source>Empty sketch</source> <translation>Esboço vazio</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6471"/> <source>Over-constrained sketch </source> <translation>Esboço superrestrito </translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6472"/> <location filename="../../ViewProviderSketch.cpp" line="6479"/> <location filename="../../ViewProviderSketch.cpp" line="6486"/> <location filename="../../ViewProviderSketch.cpp" line="6494"/> <location filename="../../ViewProviderSketch.cpp" line="6503"/> <source>(click to select)</source> <translation>(clique para selecionar)</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6478"/> <source>Sketch contains malformed constraints </source> <translation>O esboço contém restrições malformadas </translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6485"/> <source>Sketch contains conflicting constraints </source> <translation>Esboço contém restrições conflitantes </translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6493"/> <source>Sketch contains redundant constraints </source> <translation>Esboço contém restrições redundantes </translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6502"/> <source>Sketch contains partially redundant constraints </source> <translation>O esboço contém restrições parcialmente redundantes </translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6514"/> <source>Fully constrained sketch</source> <translation>Esboço totalmente restrito</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6521"/> <source>Under-constrained sketch with &lt;a href="#dofs"&gt;&lt;span style=" text-decoration: underline; color:#0000ff; background-color: #F8F8FF;"&gt;1 degree&lt;/span&gt;&lt;/a&gt; of freedom. %1</source> <translation>Esboço sub-restrito com &lt;a href="#dofs"&gt;&lt;span style=" text-decoration: underline; color:#0000ff; background-color: #F8F8FF;"&gt;1 grau&lt;/span&gt;&lt;/a&gt; de liberdade. %1</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6524"/> <source>Under-constrained sketch with &lt;a href="#dofs"&gt;&lt;span style=" text-decoration: underline; color:#0000ff; background-color: #F8F8FF;"&gt;%1 degrees&lt;/span&gt;&lt;/a&gt; of freedom. %2</source> <translation>Esboço sub-restrito com &lt;a href="#dofs"&gt;&lt;span style=" text-decoration: underline; color:#0000ff; background-color: #F8F8FF;"&gt;%1 graus&lt;/span&gt;&lt;/a&gt; de liberdade. %2</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6529"/> <source>Solved in %1 sec</source> <translation>Resolvido em %1 seg</translation> </message> <message> <location filename="../../ViewProviderSketch.cpp" line="6532"/> <source>Unsolved (%1 sec)</source> <translation>Não resolvidos (%1 seg)</translation> </message> </context> <context> <name>Sketcher_BSplineComb</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="347"/> <location filename="../../CommandSketcherBSpline.cpp" line="349"/> <source>Switches between showing and hiding the curvature comb for all B-splines</source> <translation>Alterna entre mostrar e ocultar o pente de curvatura para todas as B-splines</translation> </message> </context> <context> <name>Sketcher_BSplineDecreaseKnotMultiplicity</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="970"/> <location filename="../../CommandSketcherBSpline.cpp" line="972"/> <source>Decreases the multiplicity of the selected knot of a B-spline</source> <translation>Diminui a multiplicidade do nó selecionado de uma B-spline</translation> </message> </context> <context> <name>Sketcher_BSplineDegree</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="333"/> <location filename="../../CommandSketcherBSpline.cpp" line="335"/> <source>Switches between showing and hiding the degree for all B-splines</source> <translation>Alterna entre mostrar e ocultar o grau para todas os B-splines</translation> </message> </context> <context> <name>Sketcher_BSplineIncreaseKnotMultiplicity</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="963"/> <location filename="../../CommandSketcherBSpline.cpp" line="965"/> <source>Increases the multiplicity of the selected knot of a B-spline</source> <translation>Aumenta a multiplicidade do nó selecionado de uma B-spline</translation> </message> </context> <context> <name>Sketcher_BSplineKnotMultiplicity</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="354"/> <location filename="../../CommandSketcherBSpline.cpp" line="356"/> <source>Switches between showing and hiding the knot multiplicity for all B-splines</source> <translation>Alterna entre mostrar e ocultar a multiplicidade de nós para todas as B-splines</translation> </message> </context> <context> <name>Sketcher_BSplinePoleWeight</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="362"/> <location filename="../../CommandSketcherBSpline.cpp" line="364"/> <source>Switches between showing and hiding the control point weight for all B-splines</source> <translation>Alterna entre mostrar e ocultar o peso dos pontos de controle para todas as B-splines</translation> </message> </context> <context> <name>Sketcher_BSplinePolygon</name> <message> <location filename="../../CommandSketcherBSpline.cpp" line="340"/> <location filename="../../CommandSketcherBSpline.cpp" line="342"/> <source>Switches between showing and hiding the control polygons for all B-splines</source> <translation>Alterna entre mostrar e ocultar os polígonos de controle para todas as B-splines</translation> </message> </context> <context> <name>Sketcher_Clone</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1708"/> <location filename="../../CommandSketcherTools.cpp" line="1709"/> <source>Creates a clone of the geometry taking as reference the last selected point</source> <translation>Cria um clone da geometria tomando como referência o último ponto selecionado</translation> </message> </context> <context> <name>Sketcher_CompCopy</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1707"/> <source>Clone</source> <translation>Clonar</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1711"/> <source>Copy</source> <translation>Copiar</translation> </message> <message> <location filename="../../CommandSketcherTools.cpp" line="1715"/> <source>Move</source> <translation>Mover</translation> </message> </context> <context> <name>Sketcher_ConstrainDiameter</name> <message> <location filename="../../CommandConstraints.cpp" line="5820"/> <location filename="../../CommandConstraints.cpp" line="5821"/> <source>Fix the diameter of a circle or an arc</source> <translation>Corrigir o diâmetro de um círculo ou arco</translation> </message> </context> <context> <name>Sketcher_ConstrainRadius</name> <message> <location filename="../../CommandConstraints.cpp" line="5816"/> <location filename="../../CommandConstraints.cpp" line="5817"/> <source>Fix the radius of a circle or an arc</source> <translation>Fixar o raio de um círculo ou um arco</translation> </message> </context> <context> <name>Sketcher_Copy</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1712"/> <location filename="../../CommandSketcherTools.cpp" line="1713"/> <source>Creates a simple copy of the geometry taking as reference the last selected point</source> <translation>Cria uma cópia simples da geometria tomando como referência o último ponto selecionado</translation> </message> </context> <context> <name>Sketcher_Create3PointArc</name> <message> <location filename="../../CommandCreateGeo.cpp" line="1893"/> <location filename="../../CommandCreateGeo.cpp" line="1894"/> <source>Create an arc by its end points and a point along the arc</source> <translation>Criar um arco a partir de seus pontos de extremidade e um ponto ao longo do arco</translation> </message> </context> <context> <name>Sketcher_Create3PointCircle</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4847"/> <location filename="../../CommandCreateGeo.cpp" line="4848"/> <source>Create a circle by 3 rim points</source> <translation>Criar um círculo a partir de 3 pontos de borda</translation> </message> </context> <context> <name>Sketcher_CreateArc</name> <message> <location filename="../../CommandCreateGeo.cpp" line="1889"/> <location filename="../../CommandCreateGeo.cpp" line="1890"/> <source>Create an arc by its center and by its end points</source> <translation>Criar um arco a partir do seu centro e por seus pontos de extremidade</translation> </message> </context> <context> <name>Sketcher_CreateArcOfEllipse</name> <message> <location filename="../../CommandCreateGeo.cpp" line="3984"/> <location filename="../../CommandCreateGeo.cpp" line="3985"/> <source>Create an arc of ellipse by its center, major radius, and endpoints</source> <translation>Criar um arco de elipse pelo centro, raio principal e pontos de extremidade</translation> </message> </context> <context> <name>Sketcher_CreateArcOfHyperbola</name> <message> <location filename="../../CommandCreateGeo.cpp" line="3988"/> <location filename="../../CommandCreateGeo.cpp" line="3989"/> <source>Create an arc of hyperbola by its center, major radius, and endpoints</source> <translation>Criar um arco de hipérbole pelo centro, raio principal e pontos de extremidade</translation> </message> </context> <context> <name>Sketcher_CreateArcOfParabola</name> <message> <location filename="../../CommandCreateGeo.cpp" line="3992"/> <location filename="../../CommandCreateGeo.cpp" line="3993"/> <source>Create an arc of parabola by its focus, vertex, and endpoints</source> <translation>Criar um arco de parábola pelo foco, vértice e pontos de extremidade</translation> </message> </context> <context> <name>Sketcher_CreateBSpline</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4531"/> <source>B-spline by control points</source> <translation>B-spline por pontos de controle</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4532"/> <location filename="../../CommandCreateGeo.cpp" line="4533"/> <source>Create a B-spline by control points</source> <translation>Criar uma B-spline por pontos de controle</translation> </message> </context> <context> <name>Sketcher_CreateCircle</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4843"/> <location filename="../../CommandCreateGeo.cpp" line="4844"/> <source>Create a circle by its center and by a rim point</source> <translation>Criar um círculo a partir do seu centro e por um ponto de borda</translation> </message> </context> <context> <name>Sketcher_CreateEllipseBy3Points</name> <message> <location filename="../../CommandCreateGeo.cpp" line="3980"/> <location filename="../../CommandCreateGeo.cpp" line="3981"/> <source>Create a ellipse by periapsis, apoapsis, and minor radius</source> <translation>Criar uma elipse por periapsis apoapsis e menor raio</translation> </message> </context> <context> <name>Sketcher_CreateEllipseByCenter</name> <message> <location filename="../../CommandCreateGeo.cpp" line="3976"/> <location filename="../../CommandCreateGeo.cpp" line="3977"/> <source>Create an ellipse by center, major radius and point</source> <translation>Criar uma elipse pelo centro, raio maior e ponto</translation> </message> </context> <context> <name>Sketcher_CreateFillet</name> <message> <location filename="../../CommandCreateGeo.cpp" line="5418"/> <location filename="../../CommandCreateGeo.cpp" line="5419"/> <source>Creates a radius between two lines</source> <translation>Cria um raio entre duas linhas</translation> </message> </context> <context> <name>Sketcher_CreateHeptagon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6967"/> <location filename="../../CommandCreateGeo.cpp" line="6968"/> <source>Create a heptagon by its center and by one corner</source> <translation>Criar um heptágono pelo seu centro e por um canto</translation> </message> </context> <context> <name>Sketcher_CreateHexagon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6963"/> <location filename="../../CommandCreateGeo.cpp" line="6964"/> <source>Create a hexagon by its center and by one corner</source> <translation>Criar um hexágono por seu centro e um canto</translation> </message> </context> <context> <name>Sketcher_CreateOctagon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6971"/> <location filename="../../CommandCreateGeo.cpp" line="6972"/> <source>Create an octagon by its center and by one corner</source> <translation>Criar um octógono pelo seu centro e por um canto</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="6975"/> <location filename="../../CommandCreateGeo.cpp" line="6976"/> <source>Create a regular polygon by its center and by one corner</source> <translation>Criar um polígono regular pelo seu centro e por um vértice</translation> </message> </context> <context> <name>Sketcher_CreatePentagon</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6959"/> <location filename="../../CommandCreateGeo.cpp" line="6960"/> <source>Create a pentagon by its center and by one corner</source> <translation>Criar um pentágono pelo seu centro e por um canto</translation> </message> </context> <context> <name>Sketcher_CreatePointFillet</name> <message> <location filename="../../CommandCreateGeo.cpp" line="5422"/> <location filename="../../CommandCreateGeo.cpp" line="5423"/> <source>Fillet that preserves constraints and intersection point</source> <translation>Filete que preserva restrições e ponto de interseção</translation> </message> </context> <context> <name>Sketcher_CreateSquare</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6955"/> <location filename="../../CommandCreateGeo.cpp" line="6956"/> <source>Create a square by its center and by one corner</source> <translation>Criar um quadrado pelo seu centro e por um canto</translation> </message> </context> <context> <name>Sketcher_CreateTriangle</name> <message> <location filename="../../CommandCreateGeo.cpp" line="6951"/> <location filename="../../CommandCreateGeo.cpp" line="6952"/> <source>Create an equilateral triangle by its center and by one corner</source> <translation>Criar um triângulo equilátero, pelo seu centro e por um canto</translation> </message> </context> <context> <name>Sketcher_Create_Periodic_BSpline</name> <message> <location filename="../../CommandCreateGeo.cpp" line="4535"/> <source>Periodic B-spline by control points</source> <translation>B-spline periódica por pontos de controle</translation> </message> <message> <location filename="../../CommandCreateGeo.cpp" line="4536"/> <location filename="../../CommandCreateGeo.cpp" line="4537"/> <source>Create a periodic B-spline by control points</source> <translation>Criar uma B-spline periódica por pontos de controle</translation> </message> </context> <context> <name>Sketcher_MapSketch</name> <message> <location filename="../../Command.cpp" line="533"/> <source>No sketch found</source> <translation>Nenhum esboço encontrado</translation> </message> <message> <location filename="../../Command.cpp" line="534"/> <source>The document doesn't have a sketch</source> <translation>O documento não tem um esboço</translation> </message> <message> <location filename="../../Command.cpp" line="543"/> <source>Select sketch</source> <translation>Selecione o esboço</translation> </message> <message> <location filename="../../Command.cpp" line="544"/> <source>Select a sketch from the list</source> <translation>Selecione um esboço da lista</translation> </message> <message> <location filename="../../Command.cpp" line="598"/> <source> (incompatible with selection)</source> <translation> (incompatível com a seleção)</translation> </message> <message> <location filename="../../Command.cpp" line="600"/> <source> (current)</source> <translation> (atual)</translation> </message> <message> <location filename="../../Command.cpp" line="607"/> <source> (suggested)</source> <translation> (sugerido)</translation> </message> <message> <location filename="../../Command.cpp" line="613"/> <source>Sketch attachment</source> <translation>Esboço anexado</translation> </message> <message> <location filename="../../Command.cpp" line="615"/> <source>Current attachment mode is incompatible with the new selection. Select the method to attach this sketch to selected objects.</source> <translation>O modo de anexo atual é incompatível com a nova seleção. Selecione um outro método para anexar este esboço aos objetos selecionados.</translation> </message> <message> <location filename="../../Command.cpp" line="619"/> <source>Select the method to attach this sketch to selected objects.</source> <translation>Selecione o método para anexar este esboço aos objetos selecionados.</translation> </message> <message> <location filename="../../Command.cpp" line="656"/> <source>Map sketch</source> <translation>Esboço de mapa</translation> </message> <message> <location filename="../../Command.cpp" line="657"/> <source>Can't map a sketch to support: %1</source> <translation>Não é possível mapear um esboço para suporte:%1</translation> </message> </context> <context> <name>Sketcher_Move</name> <message> <location filename="../../CommandSketcherTools.cpp" line="1716"/> <location filename="../../CommandSketcherTools.cpp" line="1717"/> <source>Moves the geometry taking as reference the last selected point</source> <translation>Move a geometria usando como referência o último ponto selecionado</translation> </message> </context> <context> <name>Sketcher_NewSketch</name> <message> <location filename="../../Command.cpp" line="180"/> <source>Sketch attachment</source> <translation>Esboço anexado</translation> </message> <message> <location filename="../../Command.cpp" line="181"/> <source>Select the method to attach this sketch to selected object</source> <translation>Selecione o método para anexar este esboço para o objeto selecionado</translation> </message> </context> <context> <name>Sketcher_ReorientSketch</name> <message> <location filename="../../Command.cpp" line="395"/> <source>Sketch has support</source> <translation>O esboço tem suporte</translation> </message> <message> <location filename="../../Command.cpp" line="396"/> <source>Sketch with a support face cannot be reoriented. Do you want to detach it from the support?</source> <translation>Um esboço com uma face de suporte não pode ser reorientado. Deseja separá-lo do seu suporte?</translation> </message> </context> <context> <name>TaskSketcherMessages</name> <message> <location filename="../../TaskSketcherMessages.ui" line="14"/> <source>Form</source> <translation>Formulário</translation> </message> <message> <location filename="../../TaskSketcherMessages.ui" line="20"/> <source>Undefined degrees of freedom</source> <translation>Graus de liberdade indefinidos</translation> </message> <message> <location filename="../../TaskSketcherMessages.ui" line="30"/> <source>Not solved yet</source> <translation>Não resolvido ainda</translation> </message> <message> <location filename="../../TaskSketcherMessages.ui" line="40"/> <source>New constraints that would be redundant will automatically be removed</source> <translation>Novas restrições que seriam redundantes serão automaticamente removidas</translation> </message> <message> <location filename="../../TaskSketcherMessages.ui" line="43"/> <source>Auto remove redundants</source> <translation>Remover redundâncias automaticamente</translation> </message> <message> <location filename="../../TaskSketcherMessages.ui" line="61"/> <source>Executes a recomputation of active document after every sketch action</source> <translation>Executa um recálculo do documento ativo após cada comando</translation> </message> <message> <location filename="../../TaskSketcherMessages.ui" line="64"/> <source>Auto update</source> <translation>Atualização automática</translation> </message> <message> <location filename="../../TaskSketcherMessages.ui" line="80"/> <source>Forces recomputation of active document</source> <translation>Força um recálculo do documento ativo</translation> </message> <message> <location filename="../../TaskSketcherMessages.ui" line="83"/> <source>Update</source> <translation>Atualizar</translation> </message> </context> <context> <name>TaskSketcherSolverAdvanced</name> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="14"/> <source>Form</source> <translation>Formulário</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="22"/> <source>Default algorithm used for Sketch solving</source> <translation>Algoritmo padrão usado para solver o Esboço</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="25"/> <source>Default solver:</source> <translation>Calculador padrão:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="32"/> <source>Solver is used for solving the geometry. LevenbergMarquardt and DogLeg are trust region optimization algorithms. BFGS solver uses the Broyden–Fletcher–Goldfarb–Shanno algorithm.</source> <translation>O calculador usado para resolver a geometria. LevenbergMarquardt e DogLeg são algoritmos de otimização de região de confiança. O calculador BFGS usa o algoritmo Broyden–Fletcher–Goldfarb–Shanno.</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="47"/> <location filename="../../TaskSketcherSolverAdvanced.ui" line="393"/> <source>BFGS</source> <translation>GFGS</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="52"/> <location filename="../../TaskSketcherSolverAdvanced.ui" line="398"/> <source>LevenbergMarquardt</source> <translation>LevenbergMarquardt</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="57"/> <location filename="../../TaskSketcherSolverAdvanced.ui" line="403"/> <source>DogLeg</source> <translation>DogLeg</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="69"/> <source>Type of function to apply in DogLeg for the Gauss step</source> <translation>Tipo de função para aplicar em DogLeg para a etapa de Gauss</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="72"/> <source>DogLeg Gauss step:</source> <translation>DogLeg passo de Gauss:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="79"/> <source>Step type used in the DogLeg algorithm</source> <translation>Tipo de etapa usado no algoritmo de DogLeg</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="92"/> <source>FullPivLU</source> <translation>FullPivLU</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="97"/> <source>LeastNorm-FullPivLU</source> <translation>LeastNorm-FullPivLU</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="102"/> <source>LeastNorm-LDLT</source> <translation>LeastNorm-LDLT</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="114"/> <source>Maximum number of iterations of the default algorithm</source> <translation>Número máximo de iterações do algoritmo padrão</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="117"/> <source>Maximum iterations:</source> <translation>Iterações máximas:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="124"/> <source>Maximum iterations to find convergence before solver is stopped</source> <translation>Máximo de iterações para encontrar a convergência antes do calculador ser interrompido</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="301"/> <source>QR algorithm:</source> <translation>Algoritmo QR:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="308"/> <source>During diagnosing the QR rank of matrix is calculated. Eigen Dense QR is a dense matrix QR with full pivoting; usually slower Eigen Sparse QR algorithm is optimized for sparse matrices; usually faster</source> <translation>Durante o diagnóstico, a classificação QR da matriz é calculada. Eigen Dense QR é uma matriz densa QR com pivô total; geralmente é mais lento o algorítimo Eigen Sparse QR é otimizado para matrizes escassas; geralmente é mais rápido</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="373"/> <source>Redundant solver:</source> <translation>Calculador de redundâncias:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="380"/> <source>Solver used to determine whether a group is redundant or conflicting</source> <translation>Calculador usado para determinar se um grupo é redundante ou conflitante</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="418"/> <source>Redundant max. iterations:</source> <translation>Iterações máximas de redundâncias:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="425"/> <source>Same as 'Maximum iterations', but for redundant solving</source> <translation>O mesmo que 'Iterações máximas', mas para resolução de redundâncias</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="454"/> <source>Redundant sketch size multiplier:</source> <translation>Multiplicador de tamanho do esboço para redundâncias:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="461"/> <source>Same as 'Sketch size multiplier', but for redundant solving</source> <translation>Igual ao 'multiplicador de tamanho do esboço', mas para resolução de redundâncias</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="487"/> <source>Redundant convergence</source> <translation>Convergência redundante</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="494"/> <source>Same as 'Convergence', but for redundant solving</source> <translation>O mesmo que 'Convergência', mas para resolução de redundâncias</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="517"/> <source>Redundant param1</source> <translation>Parâmetro de redundância 1</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="541"/> <source>Redundant param2</source> <translation>Parâmetro de redundância 2</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="565"/> <source>Redundant param3</source> <translation>Parâmetro de redundância 3</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="592"/> <source>Console debug mode:</source> <translation>Modo de depuração do console:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="599"/> <source>Verbosity of console output</source> <translation>Verbosidade da saída do console</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="150"/> <source>If selected, the Maximum iterations value is multiplied by the sketch size</source> <translation>Se selecionado, o valor de iterações máximo é multiplicado pelo tamanho do esboço</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="153"/> <source>Sketch size multiplier:</source> <translation>Multiplicador de tamanho do esboço:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="166"/> <source>Maximum iterations will be multiplied by number of parameters</source> <translation>Máximo de iterações será multiplicado pelo número de parâmetros</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="189"/> <source>Error threshold under which convergence is reached</source> <translation>Limite de erro sob as quais a convergência é alcançada</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="192"/> <source>Convergence:</source> <translation>Convergência:</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="199"/> <source>Threshold for squared error that is used to determine whether a solution converges or not</source> <translation>Limite para o erro ao quadrado que é usado para determinar se uma solução converge ou não</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="226"/> <source>Param1</source> <translation>Param1</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="250"/> <source>Param2</source> <translation>Param2</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="274"/> <source>Param3</source> <translation>Param3</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="298"/> <source>Algorithm used for the rank revealing QR decomposition</source> <translation>Algoritmo usado para a classificação revelando decomposição QR</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="323"/> <source>Eigen Dense QR</source> <translation>Eigen densa QR</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="328"/> <source>Eigen Sparse QR</source> <translation>Eigen esparsas QR</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="340"/> <source>Pivot threshold</source> <translation>Limiar de pivô</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="347"/> <source>During a QR, values under the pivot threshold are treated as zero</source> <translation>Durante um QR, valores abaixo do limiar de pivô são tratados como zero</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="350"/> <source>1E-13</source> <translation>1E-13</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="370"/> <source>Solving algorithm used for determination of Redundant constraints</source> <translation>Resolvendo algoritmo usado para determinação de restrições redundantes</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="415"/> <source>Maximum number of iterations of the solver used for determination of Redundant constraints</source> <translation>Número máximo de iterações do solver usado para determinação de restrições redundantes</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="451"/> <source>If selected, the Maximum iterations value for the redundant algorithm is multiplied by the sketch size</source> <translation>Se selecionado, o valor de iterações máximo para o algoritmo redundante é multiplicado pelo tamanho do esboço</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="484"/> <source>Error threshold under which convergence is reached for the solving of redundant constraints</source> <translation>Limite de erro sob as quais a convergência é alcançada para a resolução de restrições redundantes</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="497"/> <source>1E-10</source> <translation>1E-10</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="589"/> <source>Degree of verbosity of the debug output to the console</source> <translation>Grau de detalhamento da saída de depuração para o console</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="612"/> <source>None</source> <translation>Nenhum</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="617"/> <source>Minimum</source> <translation>Mínimo</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="622"/> <source>Iteration Level</source> <translation>Nível de iteração</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="634"/> <source>Solve</source> <translation>Resolver</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="641"/> <source>Resets all solver values to their default values</source> <translation>Redefine todos os valores do solver para seus valores predefinidos</translation> </message> <message> <location filename="../../TaskSketcherSolverAdvanced.ui" line="644"/> <source>Restore Defaults</source> <translation>Restaurar Predefinições</translation> </message> </context> <context> <name>Workbench</name> <message> <location filename="../../Workbench.cpp" line="37"/> <source>Sketcher</source> <translation>Esboço</translation> </message> <message> <location filename="../../Workbench.cpp" line="38"/> <source>Sketcher geometries</source> <translation>Geometrias do esboço</translation> </message> <message> <location filename="../../Workbench.cpp" line="39"/> <source>Sketcher constraints</source> <translation>Restrições de esboço</translation> </message> <message> <location filename="../../Workbench.cpp" line="40"/> <source>Sketcher tools</source> <translation>Ferramentas de esboço</translation> </message> <message> <location filename="../../Workbench.cpp" line="41"/> <source>Sketcher B-spline tools</source> <translation>Ferramentas de B-spline</translation> </message> <message> <location filename="../../Workbench.cpp" line="42"/> <source>Sketcher virtual space</source> <translation>Espaço virtual de esboço</translation> </message> </context> </TS><|fim▁end|>
<message> <location filename="../../SketcherSettings.ui" line="59"/> <source>Special solver algorithm will be used while dragging sketch elements.