text
stringlengths
2
1.04M
meta
dict
Copyright (c) 2015, core All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of silverstripe-print-pretty nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
{ "content_hash": "359d4fcc9a2845530ee472d8a3895a5c", "timestamp": "", "source": "github", "line_count": 27, "max_line_length": 78, "avg_line_length": 55.074074074074076, "alnum_prop": 0.8117014122394082, "repo_name": "coreiho/silverstripe-print-pretty", "id": "3530da75362625ab22871cf89d4ffc07cab5bb88", "size": "1487", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "license.md", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "PHP", "bytes": "394" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "63b3c9a4e7555cb376e4d6dfcefa1620", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "831c16603417ea5006ca53ef0c6c4ac06d489694", "size": "193", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Caryophyllales/Aizoaceae/Antimima/Antimima hamatilis/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "f62f8397ed40ff540059619ea1807e14", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "b68e683a6d547bd95e36585ab930ba42598dc5f0", "size": "180", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Liliopsida/Poales/Cyperaceae/Scleria/Scleria latifolia/ Syn. Scleria nervosa/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
import { GraphQLID as ID, GraphQLInputObjectType, GraphQLString, } from 'graphql'; const TagInputType = new GraphQLInputObjectType({ name: 'TagInput', fields: { text: { type: GraphQLString, }, deName: { type: GraphQLString, }, itName: { type: GraphQLString, }, lldName: { type: GraphQLString, }, id: { type: ID, description: 'Must be provided for mutations', }, }, }); export default TagInputType;
{ "content_hash": "4fa392c9e05e26447292238f9a2c40d6", "timestamp": "", "source": "github", "line_count": 29, "max_line_length": 52, "avg_line_length": 16.82758620689655, "alnum_prop": 0.5860655737704918, "repo_name": "nambawan/g-old", "id": "1fadc86be50c3f976f35110d69d5e8dc7164d774", "size": "488", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "src/data/types/TagInputType.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "50550" }, { "name": "HTML", "bytes": "36241" }, { "name": "JavaScript", "bytes": "1424466" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="en"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Neutron</source> <translation>About Neutron</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Neutron&lt;/b&gt; version</source> <translation>&lt;b&gt;Neutron&lt;/b&gt; version</translation> </message> <message> <location line="+41"/> <source>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The Neutron developers</source> <translation type="unfinished"></translation> </message> <message> <location line="+15"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Address Book</translation> </message> <message> <location line="+22"/> <source>Double-click to edit address or label</source> <translation>Double-click to edit address or label</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Create a new address</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copy the currently selected address to the system clipboard</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;New Address</translation> </message> <message> <location line="-46"/> <source>These are your Neutron addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>These are your Neutron addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</translation> </message> <message> <location line="+60"/> <source>&amp;Copy Address</source> <translation>&amp;Copy Address</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Show &amp;QR Code</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Neutron address</source> <translation>Sign a message to prove you own a Neutron address</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation></translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation></translation> </message> <message> <location line="-14"/> <source>Verify a message to ensure it was signed with a specified Neutron address</source> <translation>Verify a message to ensure it was signed with a specified Neutron address</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Verify Message</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Delete</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+65"/> <source>Copy &amp;Label</source> <translation>Copy &amp;Label</translation> </message> <message> <location line="+2"/> <source>&amp;Edit</source> <translation>&amp;Edit</translation> </message> <message> <location line="+250"/> <source>Export Address Book Data</source> <translation>Export Address Book Data</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Comma separated file (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Error exporting</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Could not write to file %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Label</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Address</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(no label)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Passphrase Dialog</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Enter passphrase</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>New passphrase</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Repeat new passphrase</translation> </message> <message> <location line="+33"/> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>For staking only</source> <translation type="unfinished"></translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+35"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Encrypt wallet</translation> </message> <message> <location line="+7"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>This operation needs your wallet passphrase to unlock the wallet.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Unlock wallet</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>This operation needs your wallet passphrase to decrypt the wallet.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Decrypt wallet</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Change passphrase</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Enter the old and new passphrase to the wallet.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Confirm wallet encryption</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR COINS&lt;/b&gt;!</source> <translation>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR COINS&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Are you sure you wish to encrypt your wallet?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation></translation> </message> <message> <location line="+103"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Warning: The Caps Lock key is on!</translation> </message> <message> <location line="-133"/> <location line="+60"/> <source>Wallet encrypted</source> <translation>Wallet encrypted</translation> </message> <message> <location line="-58"/> <source>Neutron will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source> <translation>Neutron will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+44"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Wallet encryption failed</translation> </message> <message> <location line="-56"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</translation> </message> <message> <location line="+7"/> <location line="+50"/> <source>The supplied passphrases do not match.</source> <translation>The supplied passphrases do not match.</translation> </message> <message> <location line="-38"/> <source>Wallet unlock failed</source> <translation>Wallet unlock failed</translation> </message> <message> <location line="+1"/> <location line="+12"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>The passphrase entered for the wallet decryption was incorrect.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Wallet decryption failed</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Wallet passphrase was successfully changed.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+280"/> <source>Sign &amp;message...</source> <translation>Sign &amp;message...</translation> </message> <message> <location line="+242"/> <source>Synchronizing with network...</source> <translation>Synchronizing with network...</translation> </message> <message> <location line="-308"/> <source>&amp;Overview</source> <translation>&amp;Overview</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Show general overview of wallet</translation> </message> <message> <location line="+17"/> <source>&amp;Transactions</source> <translation>&amp;Transactions</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Browse transaction history</translation> </message> <message> <location line="+5"/> <source>&amp;Address Book</source> <translation>&amp;Address Book</translation> </message> <message> <location line="+1"/> <source>Edit the list of stored addresses and labels</source> <translation>Edit the list of stored addresses and labels</translation> </message> <message> <location line="-13"/> <source>&amp;Receive coins</source> <translation>&amp;Receive coins</translation> </message> <message> <location line="+1"/> <source>Show the list of addresses for receiving payments</source> <translation>Show the list of addresses for receiving payments</translation> </message> <message> <location line="-7"/> <source>&amp;Send coins</source> <translation>&amp;Send coins</translation> </message> <message> <location line="+35"/> <source>E&amp;xit</source> <translation>E&amp;xit</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Quit application</translation> </message> <message> <location line="+4"/> <source>Show information about Neutron</source> <translation>Show information about Neutron</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>About &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Show information about Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Options...</translation> </message> <message> <location line="+4"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Encrypt Wallet...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Backup Wallet...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Change Passphrase...</translation> </message> <message numerus="yes"> <location line="+250"/> <source>~%n block(s) remaining</source> <translation> <numerusform>~%n block remaining</numerusform> <numerusform>~%n blocks remaining</numerusform> </translation> </message> <message> <location line="+6"/> <source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source> <translation>Downloaded %1 of %2 blocks of transaction history (%3% done).</translation> </message> <message> <location line="-247"/> <source>&amp;Export...</source> <translation>&amp;Export...</translation> </message> <message> <location line="-62"/> <source>Send coins to a Neutron address</source> <translation>Send coins to a Neutron address</translation> </message> <message> <location line="+45"/> <source>Modify configuration options for Neutron</source> <translation>Modify configuration options for Neutron</translation> </message> <message> <location line="+18"/> <source>Export the data in the current tab to a file</source> <translation>Export the data in the current tab to a file</translation> </message> <message> <location line="-14"/> <source>Encrypt or decrypt wallet</source> <translation>Encrypt or decrypt wallet</translation> </message> <message> <location line="+3"/> <source>Backup wallet to another location</source> <translation>Backup wallet to another location</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Change the passphrase used for wallet encryption</translation> </message> <message> <location line="+10"/> <source>&amp;Debug window</source> <translation>&amp;Debug window</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Open debugging and diagnostic console</translation> </message> <message> <location line="-5"/> <source>&amp;Verify message...</source> <translation>&amp;Verify message...</translation> </message> <message> <location line="-200"/> <source>Neutron</source> <translation>Neutron</translation> </message> <message> <location line="+0"/> <source>Wallet</source> <translation>Wallet</translation> </message> <message> <location line="+178"/> <source>&amp;About Neutron</source> <translation>&amp;About Neutron</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Show / Hide</translation> </message> <message> <location line="+9"/> <source>Unlock wallet</source> <translation type="unfinished">Unlock wallet</translation> </message> <message> <location line="+1"/> <source>&amp;Lock Wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Lock wallet</source> <translation type="unfinished"></translation> </message> <message> <location line="+34"/> <source>&amp;File</source> <translation>&amp;File</translation> </message> <message> <location line="+8"/> <source>&amp;Settings</source> <translation>&amp;Settings</translation> </message> <message> <location line="+8"/> <source>&amp;Help</source> <translation>&amp;Help</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Tabs toolbar</translation> </message> <message> <location line="+8"/> <source>Actions toolbar</source> <translation>Actions toolbar</translation> </message> <message> <location line="+13"/> <location line="+9"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+0"/> <location line="+60"/> <source>Neutron client</source> <translation>Neutron client</translation> </message> <message numerus="yes"> <location line="+70"/> <source>%n active connection(s) to Neutron network</source> <translation> <numerusform>%n active connection to Neutron network</numerusform> <numerusform>%n active connections to Neutron network</numerusform> </translation> </message> <message> <location line="+40"/> <source>Downloaded %1 blocks of transaction history.</source> <translation>Downloaded %1 blocks of transaction history.</translation> </message> <message> <location line="+428"/> <source>Staking.&lt;br&gt;Your weight is %1&lt;br&gt;Network weight is %2&lt;br&gt;Expected time to earn reward is %3</source> <translation></translation> </message> <message> <location line="+6"/> <source>Not staking because wallet is locked</source> <translation></translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is offline</source> <translation></translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is syncing</source> <translation></translation> </message> <message> <location line="+2"/> <source>Not staking because you don&apos;t have mature coins</source> <translation></translation> </message> <message numerus="yes"> <location line="-418"/> <source>%n second(s) ago</source> <translation> <numerusform>%n second ago</numerusform> <numerusform>%n seconds ago</numerusform> </translation> </message> <message> <location line="-284"/> <source>&amp;Unlock Wallet...</source> <translation></translation> </message> <message numerus="yes"> <location line="+288"/> <source>%n minute(s) ago</source> <translation> <numerusform>%n minute ago</numerusform> <numerusform>%n minutes ago</numerusform> </translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s) ago</source> <translation> <numerusform>%n hour ago</numerusform> <numerusform>%n hours ago</numerusform> </translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s) ago</source> <translation> <numerusform>%n day ago</numerusform> <numerusform>%n days ago</numerusform> </translation> </message> <message> <location line="+6"/> <source>Up to date</source> <translation>Up to date</translation> </message> <message> <location line="+7"/> <source>Catching up...</source> <translation>Catching up...</translation> </message> <message> <location line="+10"/> <source>Last received block was generated %1.</source> <translation>Last received block was generated %1.</translation> </message> <message> <location line="+59"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation></translation> </message> <message> <location line="+5"/> <source>Confirm transaction fee</source> <translation>Confirm transaction fee</translation> </message> <message> <location line="+27"/> <source>Sent transaction</source> <translation>Sent transaction</translation> </message> <message> <location line="+1"/> <source>Incoming transaction</source> <translation>Incoming transaction</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Date: %1 Amount: %2 Type: %3 Address: %4 </translation> </message> <message> <location line="+100"/> <location line="+15"/> <source>URI handling</source> <translation>URI handling</translation> </message> <message> <location line="-15"/> <location line="+15"/> <source>URI can not be parsed! This can be caused by an invalid Neutron address or malformed URI parameters.</source> <translation>URI can not be parsed! This can be caused by an invalid Neutron address or malformed URI parameters.</translation> </message> <message> <location line="+18"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</translation> </message> <message> <location line="+10"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</translation> </message> <message> <location line="+25"/> <source>Backup Wallet</source> <translation>Backup Wallet</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Wallet Data (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Backup Failed</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>There was an error trying to save the wallet data to the new location.</translation> </message> <message numerus="yes"> <location line="+91"/> <source>%n second(s)</source> <translation> <numerusform>%n second</numerusform> <numerusform>%n seconds</numerusform> </translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n minute(s)</source> <translation> <numerusform>%n minute</numerusform> <numerusform>%n minutes</numerusform> </translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s)</source> <translation> <numerusform>%n hour</numerusform> <numerusform>%n hours</numerusform> </translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation> <numerusform>%n day</numerusform> <numerusform>%n days</numerusform> </translation> </message> <message> <location line="+18"/> <source>Not staking</source> <translation type="unfinished"></translation> </message> <message> <location filename="../bitcoin.cpp" line="+109"/> <source>A fatal error occurred. Neutron can no longer continue safely and will quit.</source> <translation>A fatal error occurred. Neutron can no longer continue safely and will quit.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+90"/> <source>Network Alert</source> <translation>Network Alert</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control</source> <translation type="unfinished"></translation> </message> <message> <location line="+31"/> <source>Quantity:</source> <translation type="unfinished"></translation> </message> <message> <location line="+32"/> <source>Bytes:</source> <translation type="unfinished"></translation> </message> <message> <location line="+48"/> <source>Amount:</source> <translation type="unfinished">Amount:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation type="unfinished"></translation> </message> <message> <location line="+48"/> <source>Fee:</source> <translation type="unfinished"></translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation type="unfinished"></translation> </message> <message> <location filename="../coincontroldialog.cpp" line="+551"/> <source>no</source> <translation type="unfinished"></translation> </message> <message> <location filename="../forms/coincontroldialog.ui" line="+51"/> <source>After Fee:</source> <translation type="unfinished"></translation> </message> <message> <location line="+35"/> <source>Change:</source> <translation type="unfinished"></translation> </message> <message> <location line="+69"/> <source>(un)select all</source> <translation type="unfinished"></translation> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation type="unfinished"></translation> </message> <message> <location line="+16"/> <source>List mode</source> <translation type="unfinished"></translation> </message> <message> <location line="+45"/> <source>Amount</source> <translation type="unfinished">Amount</translation> </message> <message> <location line="+5"/> <source>Label</source> <translation type="unfinished">Label</translation> </message> <message> <location line="+5"/> <source>Address</source> <translation type="unfinished">Address</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation type="unfinished">Date</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation type="unfinished">Confirmed</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation type="unfinished"></translation> </message> <message> <location filename="../coincontroldialog.cpp" line="-515"/> <source>Copy address</source> <translation type="unfinished">Copy address</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation type="unfinished">Copy label</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation type="unfinished">Copy amount</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation type="unfinished">Copy transaction ID</translation> </message> <message> <location line="+24"/> <source>Copy quantity</source> <translation type="unfinished"></translation> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation type="unfinished"></translation> </message> <message> <location line="+317"/> <source>highest</source> <translation></translation> </message> <message> <location line="+1"/> <source>high</source> <translation></translation> </message> <message> <location line="+1"/> <source>medium-high</source> <translation></translation> </message> <message> <location line="+1"/> <source>medium</source> <translation></translation> </message> <message> <location line="+4"/> <source>low-medium</source> <translation></translation> </message> <message> <location line="+1"/> <source>low</source> <translation></translation> </message> <message> <location line="+1"/> <source>lowest</source> <translation></translation> </message> <message> <location line="+155"/> <source>DUST</source> <translation type="unfinished"></translation> </message> <message> <location line="+0"/> <source>yes</source> <translation type="unfinished"></translation> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is bigger than 10000 bytes. This means a fee of at least %1 per kb is required. Can vary +/- 1 Byte per input.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>Transactions with higher priority get more likely into a block. This label turns red, if the priority is smaller than &quot;medium&quot;. This means a fee of at least %1 per kb is required.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>This label turns red, if any recipient receives an amount smaller than %1. This means a fee of at least %2 is required. Amounts below 0.546 times the minimum relay fee are shown as DUST.</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>This label turns red, if the change is smaller than %1. This means a fee of at least %2 is required.</source> <translation type="unfinished"></translation> </message> <message> <location line="+37"/> <location line="+66"/> <source>(no label)</source> <translation type="unfinished">(no label)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation type="unfinished"></translation> </message> <message> <location line="+1"/> <source>(change)</source> <translation type="unfinished"></translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Edit Address</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Label</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>The label associated with this address book entry</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Address</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>The address associated with this address book entry. This can only be modified for sending addresses.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+20"/> <source>New receiving address</source> <translation>New receiving address</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>New sending address</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Edit receiving address</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Edit sending address</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>The entered address &quot;%1&quot; is already in the address book.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Neutron address.</source> <translation>The entered address &quot;%1&quot; is not a valid Neutron address.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Could not unlock wallet.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>New key generation failed.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Neutron-Qt</source> <translation>Neutron-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>version</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Usage:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>command-line options</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>UI options</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Set language, for example &quot;de_DE&quot; (default: system locale)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Start minimized</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Show splash screen on startup (default: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Options</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Main</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 5.00 recommended.</source> <translation>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 5.00 recommended.</translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Pay transaction &amp;fee</translation> </message> <message> <location line="+31"/> <source>Reserved amount does not participate in staking and is therefore spendable at any time.</source> <translation></translation> </message> <message> <location line="+15"/> <source>Reserve</source> <translation></translation> </message> <message> <location line="+31"/> <source>Automatically start Neutron after logging in to the system.</source> <translation>Automatically start Neutron after logging in to the system.</translation> </message> <message> <location line="+3"/> <source>&amp;Start Neutron on system login</source> <translation>&amp;Start Neutron on system login</translation> </message> <message> <location line="+7"/> <source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source> <translation>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</translation> </message> <message> <location line="+3"/> <source>&amp;Detach databases at shutdown</source> <translation>&amp;Detach databases at shutdown</translation> </message> <message> <location line="+21"/> <source>&amp;Network</source> <translation>&amp;Network</translation> </message> <message> <location line="+6"/> <source>Automatically open the Neutron client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Automatically open the Neutron client port on the router. This only works when your router supports UPnP and it is enabled.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Map port using &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Neutron network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Connect to the Neutron network through a SOCKS proxy (e.g. when connecting through Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Connect through SOCKS proxy:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>Proxy &amp;IP:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>IP address of the proxy (e.g. 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Port:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port of the proxy (e.g. 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS &amp;Version:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>SOCKS version of the proxy (e.g. 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Window</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Show only a tray icon after minimizing the window.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimize to the tray instead of the taskbar</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimize on close</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Display</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>User Interface &amp;language:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Neutron.</source> <translation>The user interface language can be set here. This setting will take effect after restarting Neutron.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Unit to show amounts in:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Choose the default subdivision unit to show in the interface and when sending coins.</translation> </message> <message> <location line="+9"/> <source>Whether to show Neutron addresses in the transaction list or not.</source> <translation>Whether to show Neutron addresses in the transaction list or not.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Display addresses in transaction list</translation> </message> <message> <location line="+7"/> <source>Whether to show coin control features or not.</source> <translation></translation> </message> <message> <location line="+3"/> <source>Display coin &amp;control features (experts only!)</source> <translation></translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Cancel</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Apply</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+55"/> <source>default</source> <translation>default</translation> </message> <message> <location line="+149"/> <location line="+9"/> <source>Warning</source> <translation>Warning</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Neutron.</source> <translation>This setting will take effect after restarting Neutron.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>The supplied proxy address is invalid.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Form</translation> </message> <message> <location line="+33"/> <location line="+231"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Neutron network after a connection is established, but this process has not completed yet.</source> <translation>The displayed information may be out of date. Your wallet automatically synchronizes with the Neutron network after a connection is established, but this process has not completed yet.</translation> </message> <message> <location line="-160"/> <source>Stake:</source> <translation>Stake:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Unconfirmed:</translation> </message> <message> <location line="-107"/> <source>Wallet</source> <translation>Wallet</translation> </message> <message> <location line="+49"/> <source>Spendable:</source> <translation type="unfinished"></translation> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation type="unfinished"></translation> </message> <message> <location line="+71"/> <source>Immature:</source> <translation>Immature:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Mined balance that has not yet matured</translation> </message> <message> <location line="+20"/> <source>Total:</source> <translation type="unfinished"></translation> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation type="unfinished"></translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Recent transactions&lt;/b&gt;</translation> </message> <message> <location line="-108"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</translation> </message> <message> <location line="-29"/> <source>Total of coins that was staked, and do not yet count toward the current balance</source> <translation>Total of coins that was staked, and do not yet count toward the current balance</translation> </message> <message> <location filename="../overviewpage.cpp" line="+113"/> <location line="+1"/> <source>out of sync</source> <translation>out of sync</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>QR Code Dialog</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Request Payment</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Amount:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Label:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Message:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Save As...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Error encoding URI into QR Code.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>The entered amount is invalid, please check.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Resulting URI too long, try to reduce the text for label / message.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Save QR Code</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG Images (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Client name</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+348"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Client version</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Information</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Using OpenSSL version</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>Startup time</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Network</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Number of connections</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>On testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Block chain</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Current number of blocks</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Estimated total blocks</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Last block time</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Open</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Command-line options</translation> </message> <message> <location line="+7"/> <source>Show the Neutron-Qt help message to get a list with possible Neutron command-line options.</source> <translation>Show the Neutron-Qt help message to get a list with possible Neutron command-line options.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Show</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Console</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Build date</translation> </message> <message> <location line="-104"/> <source>Neutron - Debug window</source> <translation>Neutron - Debug window</translation> </message> <message> <location line="+25"/> <source>Neutron Core</source> <translation>Neutron Core</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Debug log file</translation> </message> <message> <location line="+7"/> <source>Open the Neutron debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Open the Neutron debug log file from the current data directory. This can take a few seconds for large log files.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Clear console</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-33"/> <source>Welcome to the Neutron RPC console.</source> <translation>Welcome to the Neutron RPC console.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+182"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Send Coins</translation> </message> <message> <location line="+76"/> <source>Coin Control Features</source> <translation type="unfinished"></translation> </message> <message> <location line="+20"/> <source>Inputs...</source> <translation type="unfinished"></translation> </message> <message> <location line="+7"/> <source>automatically selected</source> <translation type="unfinished"></translation> </message> <message> <location line="+19"/> <source>Insufficient funds!</source> <translation type="unfinished"></translation> </message> <message> <location line="+77"/> <source>Quantity:</source> <translation type="unfinished"></translation> </message> <message> <location line="+22"/> <location line="+35"/> <source>0</source> <translation type="unfinished"></translation> </message> <message> <location line="-19"/> <source>Bytes:</source> <translation type="unfinished"></translation> </message> <message> <location line="+51"/> <source>Amount:</source> <translation type="unfinished">Amount:</translation> </message> <message> <location line="+22"/> <location line="+86"/> <location line="+86"/> <location line="+32"/> <source>0.00 BC</source> <translation type="unfinished">123.456 BC {0.00 ?}</translation> </message> <message> <location line="-191"/> <source>Priority:</source> <translation type="unfinished"></translation> </message> <message> <location line="+19"/> <source>medium</source> <translation type="unfinished"></translation> </message> <message> <location line="+32"/> <source>Fee:</source> <translation type="unfinished"></translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation type="unfinished"></translation> </message> <message> <location line="+19"/> <source>no</source> <translation type="unfinished"></translation> </message> <message> <location line="+32"/> <source>After Fee:</source> <translation type="unfinished"></translation> </message> <message> <location line="+35"/> <source>Change</source> <translation type="unfinished"></translation> </message> <message> <location line="+50"/> <source>custom change address</source> <translation type="unfinished"></translation> </message> <message> <location line="+106"/> <source>Send to multiple recipients at once</source> <translation>Send to multiple recipients at once</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation type="unfinished"></translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Remove all transaction fields</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Clear &amp;All</translation> </message> <message> <location line="+28"/> <source>Balance:</source> <translation>Balance:</translation> </message> <message> <location line="+16"/> <source>123.456 BC</source> <translation>123.456 BC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Confirm the send action</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation type="unfinished"></translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-173"/> <source>Enter a Neutron address (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</source> <translation>Enter a Neutron address (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</translation> </message> <message> <location line="+15"/> <source>Copy quantity</source> <translation></translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copy amount</translation> </message> <message> <location line="+1"/> <source>Copy fee</source> <translation></translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation></translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation></translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation></translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation></translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation></translation> </message> <message> <location line="+86"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Confirm send coins</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Are you sure you want to send %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation> and </translation> </message> <message> <location line="+29"/> <source>The recipient address is not valid, please recheck.</source> <translation>The recipient address is not valid, please recheck.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>The amount to pay must be larger than 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>The amount exceeds your balance.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>The total exceeds your balance when the %1 transaction fee is included.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Duplicate address found, can only send to each address once per send operation.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed.</source> <translation>Error: Transaction creation failed.</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</translation> </message> <message> <location line="+251"/> <source>WARNING: Invalid Neutron address</source> <translation></translation> </message> <message> <location line="+13"/> <source>(no label)</source> <translation type="unfinished">(no label)</translation> </message> <message> <location line="+4"/> <source>WARNING: unknown change address</source> <translation></translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Form</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>A&amp;mount:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Pay &amp;To:</translation> </message> <message> <location line="+24"/> <location filename="../sendcoinsentry.cpp" line="+25"/> <source>Enter a label for this address to add it to your address book</source> <translation>Enter a label for this address to add it to your address book</translation> </message> <message> <location line="+9"/> <source>&amp;Label:</source> <translation>&amp;Label:</translation> </message> <message> <location line="+18"/> <source>The address to send the payment to (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</source> <translation></translation> </message> <message> <location line="+10"/> <source>Choose address from address book</source> <translation>Choose address from address book</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Paste address from clipboard</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Remove this recipient</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Neutron address (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</source> <translation>Enter a Neutron address (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Signatures - Sign / Verify a Message</translation> </message> <message> <location line="+13"/> <location line="+124"/> <source>&amp;Sign Message</source> <translation>&amp;Sign Message</translation> </message> <message> <location line="-118"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</source> <translation>The address to sign the message with (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</translation> </message> <message> <location line="+10"/> <location line="+203"/> <source>Choose an address from the address book</source> <translation>Choose an address from the address book</translation> </message> <message> <location line="-193"/> <location line="+203"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-193"/> <source>Paste address from clipboard</source> <translation>Paste address from clipboard</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Enter the message you want to sign here</translation> </message> <message> <location line="+24"/> <source>Copy the current signature to the system clipboard</source> <translation>Copy the current signature to the system clipboard</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Neutron address</source> <translation>Sign the message to prove you own this Neutron address</translation> </message> <message> <location line="+17"/> <source>Reset all sign message fields</source> <translation>Reset all sign message fields</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Clear &amp;All</translation> </message> <message> <location line="-87"/> <location line="+70"/> <source>&amp;Verify Message</source> <translation>&amp;Verify Message</translation> </message> <message> <location line="-64"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</source> <translation>The address the message was signed with (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Neutron address</source> <translation>Verify the message to ensure it was signed with the specified Neutron address</translation> </message> <message> <location line="+17"/> <source>Reset all verify message fields</source> <translation>Reset all verify message fields</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Neutron address (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</source> <translation>Enter a Neutron address (e.g. 2JhbfkAFvXqYkreSgJfrRLS9DepUcxbQci)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Click &quot;Sign Message&quot; to generate signature</translation> </message> <message> <location line="+3"/> <source>Enter Neutron signature</source> <translation>Enter Neutron signature</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>The entered address is invalid.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Please check the address and try again.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>The entered address does not refer to a key.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>Wallet unlock was cancelled.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>Private key for the entered address is not available.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Message signing failed.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Message signed.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>The signature could not be decoded.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Please check the signature and try again.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>The signature did not match the message digest.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Message verification failed.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Message verified.</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+19"/> <source>Open until %1</source> <translation>Open until %1</translation> </message> <message numerus="yes"> <location line="-2"/> <source>Open for %n block(s)</source> <translation> <numerusform>Open for %n block</numerusform> <numerusform>Open for %n blocks</numerusform> </translation> </message> <message> <location line="+8"/> <source>conflicted</source> <translation></translation> </message> <message> <location line="+2"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/unconfirmed</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 confirmations</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Status</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation> <numerusform>, broadcast through %n node</numerusform> <numerusform>, broadcast through %n nodes</numerusform> </translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Date</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Source</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generated</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>From</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>To</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>own address</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>label</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Credit</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation> <numerusform>matures in %n more block</numerusform> <numerusform>matures in %n more blocks</numerusform> </translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>not accepted</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Debit</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Transaction fee</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Net amount</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Message</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Comment</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>Transaction ID</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 110 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Generated coins must mature 110 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Debug information</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transaction</translation> </message> <message> <location line="+5"/> <source>Inputs</source> <translation>Inputs</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Amount</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>true</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>false</translation> </message> <message> <location line="-211"/> <source>, has not been successfully broadcast yet</source> <translation>, has not been successfully broadcast yet</translation> </message> <message> <location line="+35"/> <source>unknown</source> <translation>unknown</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Transaction details</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>This pane shows a detailed description of the transaction</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+226"/> <source>Date</source> <translation>Date</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Type</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Address</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Amount</translation> </message> <message> <location line="+60"/> <source>Open until %1</source> <translation>Open until %1</translation> </message> <message> <location line="+12"/> <source>Confirmed (%1 confirmations)</source> <translation>Confirmed (%1 confirmations)</translation> </message> <message numerus="yes"> <location line="-15"/> <source>Open for %n more block(s)</source> <translation> <numerusform>Open for %n more block</numerusform> <numerusform>Open for %n more blocks</numerusform> </translation> </message> <message> <location line="+6"/> <source>Offline</source> <translation></translation> </message> <message> <location line="+3"/> <source>Unconfirmed</source> <translation></translation> </message> <message> <location line="+3"/> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation></translation> </message> <message> <location line="+6"/> <source>Conflicted</source> <translation></translation> </message> <message> <location line="+3"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation></translation> </message> <message> <location line="+3"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>This block was not received by any other nodes and will probably not be accepted!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generated but not accepted</translation> </message> <message> <location line="+42"/> <source>Received with</source> <translation>Received with</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Received from</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Sent to</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Payment to yourself</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Mined</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+190"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Transaction status. Hover over this field to show number of confirmations.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Date and time that the transaction was received.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Type of transaction.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Destination address of transaction.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Amount removed from or added to balance.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+55"/> <location line="+16"/> <source>All</source> <translation>All</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Today</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>This week</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>This month</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Last month</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>This year</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Range...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Received with</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Sent to</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>To yourself</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Mined</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Other</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Enter address or label to search</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Min amount</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Copy address</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copy label</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copy amount</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Copy transaction ID</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Edit label</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Show transaction details</translation> </message> <message> <location line="+144"/> <source>Export Transaction Data</source> <translation>Export Transaction Data</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Comma separated file (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Confirmed</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Date</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Type</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Label</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Address</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Amount</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Error exporting</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Could not write to file %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Range:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>to</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+206"/> <source>Sending...</source> <translation>Sending...</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+33"/> <source>Neutron version</source> <translation>Neutron version</translation> </message> <message> <location line="+1"/> <source>Usage:</source> <translation>Usage:</translation> </message> <message> <location line="+1"/> <source>Send command to -server or neutrond</source> <translation>Send command to -server or neutrond</translation> </message> <message> <location line="+1"/> <source>List commands</source> <translation>List commands</translation> </message> <message> <location line="+1"/> <source>Get help for a command</source> <translation>Get help for a command</translation> </message> <message> <location line="+2"/> <source>Options:</source> <translation>Options:</translation> </message> <message> <location line="+2"/> <source>Specify configuration file (default: neutron.conf)</source> <translation>Specify configuration file (default: neutron.conf)</translation> </message> <message> <location line="+1"/> <source>Specify pid file (default: neutrond.pid)</source> <translation>Specify pid file (default: neutrond.pid)</translation> </message> <message> <location line="+2"/> <source>Specify wallet file (within data directory)</source> <translation>Specify wallet file (within data directory)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Specify data directory</translation> </message> <message> <location line="+2"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Set database cache size in megabytes (default: 25)</translation> </message> <message> <location line="+1"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation>Set database disk log size in megabytes (default: 100)</translation> </message> <message> <location line="+6"/> <source>Listen for connections on &lt;port&gt; (default: 15714 or testnet: 25714)</source> <translation>Listen for connections on &lt;port&gt; (default: 15714 or testnet: 25714)</translation> </message> <message> <location line="+1"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Maintain at most &lt;n&gt; connections to peers (default: 125)</translation> </message> <message> <location line="+3"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Connect to a node to retrieve peer addresses, and disconnect</translation> </message> <message> <location line="+1"/> <source>Specify your own public address</source> <translation>Specify your own public address</translation> </message> <message> <location line="+5"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation>Bind to given address. Use [host]:port notation for IPv6</translation> </message> <message> <location line="+2"/> <source>Stake your coins to support network and gain reward (default: 1)</source> <translation></translation> </message> <message> <location line="+5"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Threshold for disconnecting misbehaving peers (default: 100)</translation> </message> <message> <location line="+1"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</translation> </message> <message> <location line="-44"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation></translation> </message> <message> <location line="+51"/> <source>Detach block and address databases. Increases shutdown time (default: 0)</source> <translation>Detach block and address databases. Increases shutdown time (default: 0)</translation> </message> <message> <location line="+109"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation></translation> </message> <message> <location line="-5"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source> <translation></translation> </message> <message> <location line="-87"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 15715 or testnet: 25715)</source> <translation></translation> </message> <message> <location line="-11"/> <source>Accept command line and JSON-RPC commands</source> <translation>Accept command line and JSON-RPC commands</translation> </message> <message> <location line="+101"/> <source>Error: Transaction creation failed </source> <translation></translation> </message> <message> <location line="-5"/> <source>Error: Wallet locked, unable to create transaction </source> <translation></translation> </message> <message> <location line="-8"/> <source>Importing blockchain data file.</source> <translation></translation> </message> <message> <location line="+1"/> <source>Importing bootstrap blockchain data file.</source> <translation></translation> </message> <message> <location line="-88"/> <source>Run in the background as a daemon and accept commands</source> <translation>Run in the background as a daemon and accept commands</translation> </message> <message> <location line="+1"/> <source>Use the test network</source> <translation>Use the test network</translation> </message> <message> <location line="-24"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Accept connections from outside (default: 1 if no -proxy or -connect)</translation> </message> <message> <location line="-38"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation></translation> </message> <message> <location line="+117"/> <source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source> <translation></translation> </message> <message> <location line="-20"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</translation> </message> <message> <location line="+11"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</translation> </message> <message> <location line="+61"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Neutron will not work properly.</source> <translation>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Neutron will not work properly.</translation> </message> <message> <location line="-31"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</translation> </message> <message> <location line="-18"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</translation> </message> <message> <location line="-30"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Attempt to recover private keys from a corrupt wallet.dat</translation> </message> <message> <location line="+4"/> <source>Block creation options:</source> <translation>Block creation options:</translation> </message> <message> <location line="-62"/> <source>Connect only to the specified node(s)</source> <translation>Connect only to the specified node(s)</translation> </message> <message> <location line="+4"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Discover own IP address (default: 1 when listening and no -externalip)</translation> </message> <message> <location line="+94"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Failed to listen on any port. Use -listen=0 if you want this.</translation> </message> <message> <location line="-90"/> <source>Find peers using DNS lookup (default: 1)</source> <translation>Find peers using DNS lookup (default: 1)</translation> </message> <message> <location line="+5"/> <source>Sync checkpoints policy (default: strict)</source> <translation>Sync checkpoints policy (default: strict)</translation> </message> <message> <location line="+83"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Invalid -tor address: &apos;%s&apos;</translation> </message> <message> <location line="+4"/> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation></translation> </message> <message> <location line="-82"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</translation> </message> <message> <location line="-16"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</translation> </message> <message> <location line="+28"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Output extra debugging information. Implies all other -debug* options</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Output extra network debugging information</translation> </message> <message> <location line="+1"/> <source>Prepend debug output with timestamp</source> <translation>Prepend debug output with timestamp</translation> </message> <message> <location line="+35"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</translation> </message> <message> <location line="-74"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Select the version of socks proxy to use (4-5, default: 5)</translation> </message> <message> <location line="+41"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Send trace/debug info to console instead of debug.log file</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Send trace/debug info to debugger</translation> </message> <message> <location line="+28"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Set maximum block size in bytes (default: 250000)</translation> </message> <message> <location line="-1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Set minimum block size in bytes (default: 0)</translation> </message> <message> <location line="-29"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Shrink debug.log file on client startup (default: 1 when no -debug)</translation> </message> <message> <location line="-42"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Specify connection timeout in milliseconds (default: 5000)</translation> </message> <message> <location line="+109"/> <source>Unable to sign checkpoint, wrong checkpointkey? </source> <translation></translation> </message> <message> <location line="-80"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Use UPnP to map the listening port (default: 0)</translation> </message> <message> <location line="-1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Use UPnP to map the listening port (default: 1 when listening)</translation> </message> <message> <location line="-25"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Use proxy to reach tor hidden services (default: same as -proxy)</translation> </message> <message> <location line="+42"/> <source>Username for JSON-RPC connections</source> <translation>Username for JSON-RPC connections</translation> </message> <message> <location line="+47"/> <source>Verifying database integrity...</source> <translation></translation> </message> <message> <location line="+57"/> <source>WARNING: syncronized checkpoint violation detected, but skipped!</source> <translation></translation> </message> <message> <location line="+1"/> <source>Warning: Disk space is low!</source> <translation>Warning: Disk space is low!</translation> </message> <message> <location line="-2"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Warning: This version is obsolete, upgrade required!</translation> </message> <message> <location line="-48"/> <source>wallet.dat corrupt, salvage failed</source> <translation></translation> </message> <message> <location line="-54"/> <source>Password for JSON-RPC connections</source> <translation>Password for JSON-RPC connections</translation> </message> <message> <location line="-84"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=Neutronrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Neutron Alert&quot; [email protected] </source> <translation type="unfinished"></translation> </message> <message> <location line="+51"/> <source>Find peers using internet relay chat (default: 0)</source> <translation type="unfinished">Find peers using internet relay chat (default: 1) {0)?}</translation> </message> <message> <location line="+5"/> <source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source> <translation type="unfinished"></translation> </message> <message> <location line="+15"/> <source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source> <translation></translation> </message> <message> <location line="+16"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Allow JSON-RPC connections from specified IP address</translation> </message> <message> <location line="+1"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</translation> </message> <message> <location line="+1"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Execute command when the best block changes (%s in cmd is replaced by block hash)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</translation> </message> <message> <location line="+3"/> <source>Require a confirmations for change (default: 0)</source> <translation></translation> </message> <message> <location line="+1"/> <source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source> <translation></translation> </message> <message> <location line="+2"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"></translation> </message> <message> <location line="+3"/> <source>Upgrade wallet to latest format</source> <translation>Upgrade wallet to latest format</translation> </message> <message> <location line="+1"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Set key pool size to &lt;n&gt; (default: 100)</translation> </message> <message> <location line="+1"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Rescan the block chain for missing wallet transactions</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 2500, 0 = all)</source> <translation>How many blocks to check at startup (default: 2500, 0 = all)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation>How thorough the block verification is (0-6, default: 1)</translation> </message> <message> <location line="+1"/> <source>Imports blocks from external blk000?.dat file</source> <translation>Imports blocks from external blk000?.dat file</translation> </message> <message> <location line="+8"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Use OpenSSL (https) for JSON-RPC connections</translation> </message> <message> <location line="+1"/> <source>Server certificate file (default: server.cert)</source> <translation>Server certificate file (default: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Server private key (default: server.pem)</translation> </message> <message> <location line="+1"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+53"/> <source>Error: Wallet unlocked for staking only, unable to create transaction.</source> <translation type="unfinished"></translation> </message> <message> <location line="+18"/> <source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source> <translation></translation> </message> <message> <location line="-158"/> <source>This help message</source> <translation>This help message</translation> </message> <message> <location line="+95"/> <source>Wallet %s resides outside data directory %s.</source> <translation>Wallet %s resides outside data directory %s.</translation> </message> <message> <location line="+1"/> <source>Cannot obtain a lock on data directory %s. Neutron is probably already running.</source> <translation>Cannot obtain a lock on data directory %s. Neutron is probably already running.</translation> </message> <message> <location line="-98"/> <source>Neutron</source> <translation>Neutron</translation> </message> <message> <location line="+140"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Unable to bind to %s on this computer (bind returned error %d, %s)</translation> </message> <message> <location line="-130"/> <source>Connect through socks proxy</source> <translation>Connect through socks proxy</translation> </message> <message> <location line="+3"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Allow DNS lookups for -addnode, -seednode and -connect</translation> </message> <message> <location line="+122"/> <source>Loading addresses...</source> <translation>Loading addresses...</translation> </message> <message> <location line="-15"/> <source>Error loading blkindex.dat</source> <translation>Error loading blkindex.dat</translation> </message> <message> <location line="+2"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Error loading wallet.dat: Wallet corrupted</translation> </message> <message> <location line="+4"/> <source>Error loading wallet.dat: Wallet requires newer version of Neutron</source> <translation>Error loading wallet.dat: Wallet requires newer version of Neutron</translation> </message> <message> <location line="+1"/> <source>Wallet needed to be rewritten: restart Neutron to complete</source> <translation>Wallet needed to be rewritten: restart Neutron to complete</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat</source> <translation>Error loading wallet.dat</translation> </message> <message> <location line="-16"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Invalid -proxy address: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Unknown network specified in -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>Unknown -socks proxy version requested: %i</translation> </message> <message> <location line="+4"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>Cannot resolve -bind address: &apos;%s&apos;</translation> </message> <message> <location line="+2"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>Cannot resolve -externalip address: &apos;%s&apos;</translation> </message> <message> <location line="-24"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Error: could not start node</source> <translation>Error: could not start node</translation> </message> <message> <location line="+11"/> <source>Sending...</source> <translation>Sending...</translation> </message> <message> <location line="+5"/> <source>Invalid amount</source> <translation>Invalid amount</translation> </message> <message> <location line="+1"/> <source>Insufficient funds</source> <translation>Insufficient funds</translation> </message> <message> <location line="-34"/> <source>Loading block index...</source> <translation>Loading block index...</translation> </message> <message> <location line="-103"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Add a node to connect to and attempt to keep the connection open</translation> </message> <message> <location line="+122"/> <source>Unable to bind to %s on this computer. Neutron is probably already running.</source> <translation>Unable to bind to %s on this computer. Neutron is probably already running.</translation> </message> <message> <location line="-97"/> <source>Fee per KB to add to transactions you send</source> <translation>Fee per KB to add to transactions you send</translation> </message> <message> <location line="+55"/> <source>Invalid amount for -mininput=&lt;amount&gt;: &apos;%s&apos;</source> <translation></translation> </message> <message> <location line="+25"/> <source>Loading wallet...</source> <translation>Loading wallet...</translation> </message> <message> <location line="+8"/> <source>Cannot downgrade wallet</source> <translation>Cannot downgrade wallet</translation> </message> <message> <location line="+1"/> <source>Cannot initialize keypool</source> <translation>Cannot initialize keypool</translation> </message> <message> <location line="+1"/> <source>Cannot write default address</source> <translation>Cannot write default address</translation> </message> <message> <location line="+1"/> <source>Rescanning...</source> <translation>Rescanning...</translation> </message> <message> <location line="+5"/> <source>Done loading</source> <translation>Done loading</translation> </message> <message> <location line="-167"/> <source>To use the %s option</source> <translation>To use the %s option</translation> </message> <message> <location line="+14"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="+6"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</translation> </message> </context> </TS>
{ "content_hash": "4e2f243d9bb05de239e26b08c9559358", "timestamp": "", "source": "github", "line_count": 3330, "max_line_length": 405, "avg_line_length": 37.12792792792793, "alnum_prop": 0.6158319583292892, "repo_name": "audiotopix/Neutron", "id": "d0bb7ee5bd32f65a24770693c14a4e9b5d383a4e", "size": "123639", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/qt/locale/bitcoin_en.ts", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "51312" }, { "name": "C", "bytes": "31428" }, { "name": "C++", "bytes": "2915631" }, { "name": "CSS", "bytes": "1127" }, { "name": "HTML", "bytes": "50620" }, { "name": "Makefile", "bytes": "429161" }, { "name": "NSIS", "bytes": "5781" }, { "name": "Objective-C", "bytes": "858" }, { "name": "Objective-C++", "bytes": "5931" }, { "name": "Python", "bytes": "69973" }, { "name": "QMake", "bytes": "16825" }, { "name": "Roff", "bytes": "12553" }, { "name": "Shell", "bytes": "40379" } ], "symlink_target": "" }
use std::marker; struct Heap; struct Vec<T, A = Heap>( marker::PhantomData<(T,A)>); fn main() { let _: Vec<isize, Heap, bool>; //~^ ERROR wrong number of type arguments: expected at most 2, found 3 [E0107] }
{ "content_hash": "38e609bf4abfc641a1ad815bc41cfeda", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 82, "avg_line_length": 20.272727272727273, "alnum_prop": 0.6188340807174888, "repo_name": "GBGamer/rust", "id": "84c3a2f2230242022c41f01127989deaf158f3f0", "size": "690", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "src/test/ui/generic/generic-type-more-params-with-defaults.rs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "27014" }, { "name": "Awk", "bytes": "271" }, { "name": "C", "bytes": "391581" }, { "name": "C++", "bytes": "64107" }, { "name": "CSS", "bytes": "27753" }, { "name": "HTML", "bytes": "496" }, { "name": "JavaScript", "bytes": "45051" }, { "name": "Lex", "bytes": "9270" }, { "name": "Makefile", "bytes": "306032" }, { "name": "PHP", "bytes": "265" }, { "name": "Pascal", "bytes": "13456" }, { "name": "Puppet", "bytes": "3296" }, { "name": "Python", "bytes": "197397" }, { "name": "Rust", "bytes": "21265061" }, { "name": "Shell", "bytes": "270571" }, { "name": "Yacc", "bytes": "81068" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "705c8305e1a853ce7a92e99ae7977982", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "2caa08ecfe5d462545747e5f1b6c00e5ea1b7cca", "size": "179", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Lamiales/Lamiaceae/Phlomis/Phlomis vierhapperi/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<?php class ItemTest extends PHPUnit\Framework\TestCase { /** * Run a test using a sprintf template and data * * @param string $template */ protected function checkFromTemplate($template, $data) { if (!is_array($data)) { $data = [$data]; } $xml = vsprintf($template, $data); $feed = new SimplePie(); $feed->set_raw_data($xml); $feed->enable_cache(false); $feed->init(); return $feed; } public static function titleprovider() { return [ ['Feed Title', 'Feed Title'], // RSS Profile tests ['AT&amp;T', 'AT&amp;T'], ['AT&#x26;T', 'AT&amp;T'], ["Bill &amp; Ted's Excellent Adventure", "Bill &amp; Ted's Excellent Adventure"], ["Bill &#x26; Ted's Excellent Adventure", "Bill &amp; Ted's Excellent Adventure"], ['The &amp; entity', 'The &amp; entity'], ['The &#x26; entity', 'The &amp; entity'], ['The &amp;amp; entity', 'The &amp;amp; entity'], ['The &#x26;amp; entity', 'The &amp;amp; entity'], ['I &lt;3 Phil Ringnalda', 'I &lt;3 Phil Ringnalda'], ['I &#x3C;3 Phil Ringnalda', 'I &lt;3 Phil Ringnalda'], ['A &lt; B', 'A &lt; B'], ['A &#x3C; B', 'A &lt; B'], ['A&lt;B', 'A&lt;B'], ['A&#x3C;B', 'A&lt;B'], ["Nice &lt;gorilla&gt; what's he weigh?", "Nice &lt;gorilla&gt; what's he weigh?"], ["Nice &#x3C;gorilla&gt; what's he weigh?", "Nice &lt;gorilla&gt; what's he weigh?"], ]; } /** * @dataProvider titleprovider */ public function testTitleRSS20($title, $expected) { $data = '<rss version="2.0"> <channel> <title>%s</title> </channel> </rss>'; $feed = $this->checkFromTemplate($data, $title); $this->assertSame($expected, $feed->get_title()); } /** * @dataProvider titleprovider */ public function testTitleRSS20WithDC10($title, $expected) { $data = '<rss version="2.0" xmlns:dc="http://purl.org/dc/elements/1.0/"> <channel> <dc:title>%s</dc:title> </channel> </rss>'; $feed = $this->checkFromTemplate($data, $title); $this->assertSame($expected, $feed->get_title()); } /** * @dataProvider titleprovider */ public function testTitleRSS20WithDC11($title, $expected) { $data = '<rss version="2.0" xmlns:dc="http://purl.org/dc/elements/1.1/"> <channel> <dc:title>%s</dc:title> </channel> </rss>'; $feed = $this->checkFromTemplate($data, $title); $this->assertSame($expected, $feed->get_title()); } /** * @dataProvider titleprovider */ public function testTitleRSS20WithAtom03($title, $expected) { $data = '<rss version="2.0" xmlns:a="http://purl.org/atom/ns#"> <channel> <a:title>%s</a:title> </channel> </rss>'; $feed = $this->checkFromTemplate($data, $title); $this->assertSame($expected, $feed->get_title()); } /** * @dataProvider titleprovider */ public function testTitleRSS20WithAtom10($title, $expected) { $data = '<rss version="2.0" xmlns:a="http://www.w3.org/2005/Atom"> <channel> <a:title>%s</a:title> </channel> </rss>'; $feed = $this->checkFromTemplate($data, $title); $this->assertSame($expected, $feed->get_title()); } /** * Based on a test from old bug 18 * * @dataProvider titleprovider */ public function testTitleRSS20WithImageTitle($title, $expected) { $data = '<rss version="2.0"> <channel> <title>%s</title>  </channel> </rss>'; $feed = $this->checkFromTemplate($data, $title); $this->assertSame($expected, $feed->get_title()); } /** * Based on a test from old bug 18 * * @dataProvider titleprovider */ public function testTitleRSS20WithImageTitleReversed($title, $expected) { $data = '<rss version="2.0"> <channel>  <title>%s</title> </channel> </rss>'; $feed = $this->checkFromTemplate($data, $title); $this->assertSame($expected, $feed->get_title()); } public function testItemWithEmptyContent() { $data = '<rss version="2.0" xmlns:content="http://purl.org/rss/1.0/modules/content/"> <channel> <item> <description>%s</description> <content:encoded><![CDATA[ <script> ]]></content:encoded> </item> </channel> </rss>'; $content = 'item description'; $feed = $this->checkFromTemplate($data, $content); $item = $feed->get_item(); $this->assertSame($content, $item->get_content()); } }
{ "content_hash": "2396b7ffbc4ec4e636a4a2a6f783fdfd", "timestamp": "", "source": "github", "line_count": 181, "max_line_length": 97, "avg_line_length": 26.69060773480663, "alnum_prop": 0.5450217346305113, "repo_name": "FreshRSS/simplepie", "id": "117330d1117d4147b3001589c835da95c22b64f8", "size": "6889", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "tests/ItemTest.php", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "HTML", "bytes": "5746" }, { "name": "PHP", "bytes": "2280453" } ], "symlink_target": "" }
""" pyhik.constants ~~~~~~~~~~~~~~~~~~~~ Constants list Copyright (c) 2016-2021 John Mihalic <https://github.com/mezz64> Licensed under the MIT license. """ MAJOR_VERSION = 0 MINOR_VERSION = 3 SUB_MINOR_VERSION = 1 __version__ = '{}.{}.{}'.format( MAJOR_VERSION, MINOR_VERSION, SUB_MINOR_VERSION) CONNECT_TIMEOUT = 10 READ_TIMEOUT = 60 DEFAULT_PORT = 80 XML_ENCODING = 'UTF-8' XML_NAMESPACE = 'http://www.hikvision.com/ver20/XMLSchema' DEFAULT_HEADERS = { 'Content-Type': "application/xml; charset='UTF-8'", 'Accept': "*/*" } SENSOR_MAP = { 'vmd': 'Motion', 'linedetection': 'Line Crossing', 'fielddetection': 'Field Detection', 'tamperdetection': 'Tamper Detection', 'shelteralarm': 'Tamper Detection', 'defocus': 'Tamper Detection', 'diskfull': 'Disk Full', 'diskerror': 'Disk Error', 'nicbroken': 'Net Interface Broken', 'ipconflict': 'IP Conflict', 'illaccess': 'Illegal Access', 'videomismatch': 'Video Mismatch', 'badvideo': 'Bad Video', 'pir': 'PIR Alarm', 'facedetection': 'Face Detection', 'scenechangedetection': 'Scene Change Detection', 'io': 'I/O', 'unattendedbaggage': 'Unattended Baggage', 'attendedbaggage': 'Attended Baggage', 'recordingfailure': 'Recording Failure', 'regionexiting': "Exiting Region", 'regionentrance': "Entering Region", 'duration': "Ongoing Events" } # The name 'id' should always be last CHANNEL_NAMES = ['dynVideoInputChannelID', 'videoInputChannelID', 'dynInputIOPortID', 'inputIOPortID', 'id'] ID_TYPES = ['channelID', 'dynChannelID', 'inputIOPortID', 'dynInputIOPortID'] CAM_DEVICE = 'CAM' NVR_DEVICE = 'NVR' CONTEXT_INFO = 'INFO' CONTEXT_TRIG = 'TRIGGERS' CONTEXT_ALERT = 'ALERTS' CONTEXT_MOTION = 'MOTION'
{ "content_hash": "b3c3f6a3891ebffb55c472c09aeef341", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 65, "avg_line_length": 27, "alnum_prop": 0.6434494195688225, "repo_name": "mezz64/pyHik", "id": "fbad3e15501d4087647e6ab40dab0f41c6174dc4", "size": "1809", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pyhik/constants.py", "mode": "33261", "license": "mit", "language": [ { "name": "Python", "bytes": "33500" } ], "symlink_target": "" }
package kr.ac.kumoh.railroApplication.classes; import java.util.ArrayList; /** * Created by Woocha on 2015-09-07. */ public class WeatherConditionList { public ArrayList<WeatherCondition> mListSnow; public ArrayList<WeatherCondition> mListClearSky; public ArrayList<WeatherCondition> mListFew_Clouds; public ArrayList<WeatherCondition> mListScattered_Clouds; public ArrayList<WeatherCondition> mListBroken_Clouds; public ArrayList<WeatherCondition> mListShower_Rain; public ArrayList<WeatherCondition> mListRain; public ArrayList<WeatherCondition> mListThunderStorm; public ArrayList<WeatherCondition> mListMist; public ArrayList<WeatherCondition> mListWind; public ArrayList<WeatherCondition> mListSnowToHangeul; public ArrayList<WeatherCondition> mListClearSkyToHangeul; public ArrayList<WeatherCondition> mListFew_CloudsToHangeul; public ArrayList<WeatherCondition> mListScattered_CloudsToHangeul; public ArrayList<WeatherCondition> mListBroken_CloudsToHangeul; public ArrayList<WeatherCondition> mListShower_RainToHangeul; public ArrayList<WeatherCondition> mListRainToHangeul; public ArrayList<WeatherCondition> mListThunderStormToHangeul; public ArrayList<WeatherCondition> mListMistToHangeul; public ArrayList<WeatherCondition> mListWindToHangeul; public WeatherConditionList() { //http://openweathermap.org/weather-conditions mListThunderStorm = new ArrayList<WeatherCondition>(); // 11 mListMist = new ArrayList<WeatherCondition>(); // 50 mListRain = new ArrayList<WeatherCondition>(); // 10 mListShower_Rain = new ArrayList<WeatherCondition>(); // 09 mListBroken_Clouds = new ArrayList<WeatherCondition>(); // 04 mListScattered_Clouds = new ArrayList<WeatherCondition>(); // 03 mListFew_Clouds = new ArrayList<WeatherCondition>(); // 02 mListClearSky = new ArrayList<WeatherCondition>(); // 01 mListSnow = new ArrayList<WeatherCondition>(); // 13 mListWind = new ArrayList<WeatherCondition>(); mListThunderStormToHangeul = new ArrayList<WeatherCondition>(); // 11 mListMistToHangeul = new ArrayList<WeatherCondition>(); // 50 mListRainToHangeul = new ArrayList<WeatherCondition>(); // 10 mListShower_RainToHangeul = new ArrayList<WeatherCondition>(); // 09 mListBroken_CloudsToHangeul = new ArrayList<WeatherCondition>(); // 04 mListScattered_CloudsToHangeul = new ArrayList<WeatherCondition>(); // 03 mListFew_CloudsToHangeul = new ArrayList<WeatherCondition>(); // 02 mListClearSkyToHangeul = new ArrayList<WeatherCondition>(); // 01 mListSnowToHangeul = new ArrayList<WeatherCondition>(); // 13 mListWindToHangeul = new ArrayList<WeatherCondition>(); //-------------ThunderStrom------------------// mListThunderStorm.add(new WeatherCondition("200","thunderstorm with light rain")); mListThunderStorm.add(new WeatherCondition("201","thunderstorm with rain")); mListThunderStorm.add(new WeatherCondition("202","thunderstorm with heavy rain")); mListThunderStorm.add(new WeatherCondition("210","light thunderstorm")); mListThunderStorm.add(new WeatherCondition("211","thunderstorm")); mListThunderStorm.add(new WeatherCondition("212","heavy thunderstorm")); mListThunderStorm.add(new WeatherCondition("221","ragged thunderstorm")); mListThunderStorm.add(new WeatherCondition("230","thunderstorm with light drizzle")); mListThunderStorm.add(new WeatherCondition("231","thunderstorm with drizzle")); mListThunderStorm.add(new WeatherCondition("232","thunderstorm with heavy drizzle")); mListThunderStormToHangeul.add(new WeatherCondition("200","번개와 보슬비")); mListThunderStormToHangeul.add(new WeatherCondition("201","번개와 비")); mListThunderStormToHangeul.add(new WeatherCondition("202","번개와 집중 호우")); mListThunderStormToHangeul.add(new WeatherCondition("210","천둥")); mListThunderStormToHangeul.add(new WeatherCondition("211","천둥 번개")); mListThunderStormToHangeul.add(new WeatherCondition("212","강한 천둥 번개")); mListThunderStormToHangeul.add(new WeatherCondition("221","매우 강한 천둥 번개")); mListThunderStormToHangeul.add(new WeatherCondition("230","번개와 가벼운 이슬비")); mListThunderStormToHangeul.add(new WeatherCondition("231","번개와 이슬비")); mListThunderStormToHangeul.add(new WeatherCondition("232","번개와 집중 호우")); //------------Drizzle-------------------// mListShower_Rain.add(new WeatherCondition("300","light intensity drizzle")); mListShower_Rain.add(new WeatherCondition("301","drizzle")); mListShower_Rain.add(new WeatherCondition("302","heavy intensity drizzle")); mListShower_Rain.add(new WeatherCondition("310","light intensity drizzle rain")); mListShower_Rain.add(new WeatherCondition("311","drizzle rain")); mListShower_Rain.add(new WeatherCondition("312","heavy intensity drizzle rain")); mListShower_Rain.add(new WeatherCondition("313","shower rain and drizzle")); mListShower_Rain.add(new WeatherCondition("314","heavy shower rain and drizzle")); mListShower_Rain.add(new WeatherCondition("321","shower drizzle")); mListShower_RainToHangeul.add(new WeatherCondition("300","약한 이슬비")); mListShower_RainToHangeul.add(new WeatherCondition("301","이슬비")); mListShower_RainToHangeul.add(new WeatherCondition("302","강한 이슬비")); mListShower_RainToHangeul.add(new WeatherCondition("310","약한 이슬비")); mListShower_RainToHangeul.add(new WeatherCondition("311","이슬비")); mListShower_RainToHangeul.add(new WeatherCondition("312","강한 이슬비")); mListShower_RainToHangeul.add(new WeatherCondition("313","소나기")); mListShower_RainToHangeul.add(new WeatherCondition("314","강한 소나기")); mListShower_RainToHangeul.add(new WeatherCondition("321","소나기")); //------------Rain----------------------// mListRain.add(new WeatherCondition("500","light rain")); mListRain.add(new WeatherCondition("501","moderate rain")); mListRain.add(new WeatherCondition("502","heavy intensity rain")); mListRain.add(new WeatherCondition("503","very heavy rain")); mListRain.add(new WeatherCondition("504","extreme rain")); mListSnow.add(new WeatherCondition("511","freezing rain")); mListShower_Rain.add(new WeatherCondition("520","light intensity shower rain")); mListShower_Rain.add(new WeatherCondition("521","shower rain")); mListShower_Rain.add(new WeatherCondition("522","heavy intensity shower rain")); mListShower_Rain.add(new WeatherCondition("531","ragged shower rain")); mListRainToHangeul.add(new WeatherCondition("500","가벼운 비")); mListRainToHangeul.add(new WeatherCondition("501","비")); mListRainToHangeul.add(new WeatherCondition("502","강한 비")); mListRainToHangeul.add(new WeatherCondition("503","집중 호우")); mListRainToHangeul.add(new WeatherCondition("504","집중 호우")); mListSnowToHangeul.add(new WeatherCondition("511","어는 비")); mListShower_RainToHangeul.add(new WeatherCondition("520","가벼운 소나기")); mListShower_RainToHangeul.add(new WeatherCondition("521","소나기")); mListShower_RainToHangeul.add(new WeatherCondition("522","강한 소나기")); mListShower_RainToHangeul.add(new WeatherCondition("531","매우 강한 소나기")); //------------Snow----------------------// mListSnow.add(new WeatherCondition("600","light snow")); mListSnow.add(new WeatherCondition("601","snow")); mListSnow.add(new WeatherCondition("602","heavy snow")); mListSnow.add(new WeatherCondition("611","sleet")); mListSnow.add(new WeatherCondition("612","shower sleet")); mListSnow.add(new WeatherCondition("615","light rain and snow")); mListSnow.add(new WeatherCondition("616","rain and snow")); mListSnow.add(new WeatherCondition("620","light shower snow")); mListSnow.add(new WeatherCondition("621","shower snow")); mListSnow.add(new WeatherCondition("622","heavy shower snow")); mListSnowToHangeul.add(new WeatherCondition("600","약한 눈")); mListSnowToHangeul.add(new WeatherCondition("601","눈")); mListSnowToHangeul.add(new WeatherCondition("602","거센 눈")); mListSnowToHangeul.add(new WeatherCondition("611","진눈 깨비")); mListSnowToHangeul.add(new WeatherCondition("612","급 진눈 깨비")); mListSnowToHangeul.add(new WeatherCondition("615","약한 눈과 비")); mListSnowToHangeul.add(new WeatherCondition("616","눈과 비")); mListSnowToHangeul.add(new WeatherCondition("620","눈")); mListSnowToHangeul.add(new WeatherCondition("621","소낙눈")); mListSnowToHangeul.add(new WeatherCondition("622","강한 소낙눈")); //------------Atmosphere----------------------// mListMist.add(new WeatherCondition("701","mist")); mListMist.add(new WeatherCondition("711","smoke")); mListMist.add(new WeatherCondition("721","haze")); mListMist.add(new WeatherCondition("731","sand, dust whirls")); mListMist.add(new WeatherCondition("741","fog")); mListMist.add(new WeatherCondition("751","sand")); mListMist.add(new WeatherCondition("761","dust")); mListMist.add(new WeatherCondition("762","volcanic ash")); mListMist.add(new WeatherCondition("771","squalls")); mListMist.add(new WeatherCondition("781","tornado")); mListMistToHangeul.add(new WeatherCondition("701","안개")); mListMistToHangeul.add(new WeatherCondition("711","연기")); mListMistToHangeul.add(new WeatherCondition("721","실안개")); mListMistToHangeul.add(new WeatherCondition("731","황사 바람")); mListMistToHangeul.add(new WeatherCondition("741","안개")); mListMistToHangeul.add(new WeatherCondition("751","황사")); mListMistToHangeul.add(new WeatherCondition("761","황사")); mListMistToHangeul.add(new WeatherCondition("762","화산재")); mListMistToHangeul.add(new WeatherCondition("771","돌풍")); mListMistToHangeul.add(new WeatherCondition("781","태풍")); //------------clouds----------------------// mListClearSky.add(new WeatherCondition("800","clear sky")); mListFew_Clouds.add(new WeatherCondition("801","few clouds")); mListScattered_Clouds.add(new WeatherCondition("802","scattered clouds")); mListBroken_Clouds.add(new WeatherCondition("803","broken clouds")); mListBroken_Clouds.add(new WeatherCondition("804","overcast clouds")); mListClearSkyToHangeul.add(new WeatherCondition("800","맑은 하늘")); mListFew_CloudsToHangeul.add(new WeatherCondition("801","구름 조금")); mListScattered_CloudsToHangeul.add(new WeatherCondition("802","조각 구름")); mListBroken_CloudsToHangeul.add(new WeatherCondition("803","조각 구름")); mListBroken_CloudsToHangeul.add(new WeatherCondition("804","흐림")); // //------------Extreme----------------------// // mList.add(new WeatherCondition("900","tornado")); // mList.add(new WeatherCondition("901","tropical storm")); // mList.add(new WeatherCondition("902","hurricane")); // mList.add(new WeatherCondition("903","cold")); // mList.add(new WeatherCondition("904","hot")); // mList.add(new WeatherCondition("905","windy")); // mList.add(new WeatherCondition("906","hail")); // // //-----------------Additional----------// mListWind.add(new WeatherCondition("951","calm")); mListWind.add(new WeatherCondition("952","light breeze")); mListWind.add(new WeatherCondition("953","gentle breeze")); mListWind.add(new WeatherCondition("954","moderate breeze")); mListWind.add(new WeatherCondition("955","fresh breeze")); mListWind.add(new WeatherCondition("956","strong breeze")); mListWind.add(new WeatherCondition("957","high wind, near gale")); mListWind.add(new WeatherCondition("958","gale")); mListWind.add(new WeatherCondition("959","severe gale")); mListWind.add(new WeatherCondition("960","storm")); mListWind.add(new WeatherCondition("961","violent storm")); mListWind.add(new WeatherCondition("962","hurricane")); mListWindToHangeul.add(new WeatherCondition("951","바람 없음")); mListWindToHangeul.add(new WeatherCondition("952","산들 바람")); mListWindToHangeul.add(new WeatherCondition("953","gentle breeze")); mListWindToHangeul.add(new WeatherCondition("954","moderate breeze")); mListWindToHangeul.add(new WeatherCondition("955","fresh breeze")); mListWindToHangeul.add(new WeatherCondition("956","strong breeze")); mListWindToHangeul.add(new WeatherCondition("957","high wind, near gale")); mListWindToHangeul.add(new WeatherCondition("958","gale")); mListWindToHangeul.add(new WeatherCondition("959","severe gale")); mListWindToHangeul.add(new WeatherCondition("960","storm")); mListWindToHangeul.add(new WeatherCondition("961","violent storm")); mListWindToHangeul.add(new WeatherCondition("962","hurricane")); } public class WeatherCondition { String id; String meaning; public WeatherCondition(String id, String meaning) { this.id = id; this.meaning = meaning; } public String getId() { return id; } public String getMeaning() { return meaning; } } }
{ "content_hash": "9d78e056983d3ed1ce16d5c1add58b5b", "timestamp": "", "source": "github", "line_count": 239, "max_line_length": 93, "avg_line_length": 57.33054393305439, "alnum_prop": 0.6764705882352942, "repo_name": "kumohND/RailroApplication", "id": "c73b105792ba9a0561e4fcb96f7022cfa15e73e9", "size": "14140", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/src/main/java/kr/ac/kumoh/railroApplication/classes/WeatherConditionList.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "378900" } ], "symlink_target": "" }
/** * @file 简单字形绘制 * @author mengke01([email protected]) */ define( function (require) { var ShapeConstructor = require('./Shape'); var drawPath = require('../util/drawPath'); var proto = { type: 'glyf', adjust: function (shape, camera) { var center = camera.center; var ratio = camera.ratio; shape.x = ratio * (shape.x - center.x) + center.x; shape.y = ratio * (shape.y - center.y) + center.y; return shape; }, move: function (shape, mx, my) { shape.x += mx; shape.y += my; return shape; }, getRect: function (shape) { return false; }, isIn: function (shape, x, y) { return false; }, draw: function (ctx, shape, camera) { ctx.save(); ctx.translate(shape.x, shape.y); ctx.scale(camera.scale, -camera.scale); var transform = shape.transform; ctx.transform( transform.a, transform.b, transform.c, transform.d, transform.e, transform.f ); var contours = shape.glyf.contours; for (var i = 0, l = contours.length; i < l; i++) { drawPath(ctx, contours[i]); } ctx.restore(); } }; return ShapeConstructor.derive(proto); } );
{ "content_hash": "40db298e63846cddd2abba05e049bc2f", "timestamp": "", "source": "github", "line_count": 72, "max_line_length": 66, "avg_line_length": 23.36111111111111, "alnum_prop": 0.4084423305588585, "repo_name": "moyogo/fonteditor", "id": "2eb15c4f8142fd6d1bf993a31590dc03eafcd553", "size": "1694", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "src/render/shape/Glyf.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "34237" }, { "name": "HTML", "bytes": "60240" }, { "name": "JavaScript", "bytes": "1382608" }, { "name": "PHP", "bytes": "7600" }, { "name": "Shell", "bytes": "908" }, { "name": "Smarty", "bytes": "38787" } ], "symlink_target": "" }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2011.12.14 at 03:30:44 PM CET // package ch.epfl.bbp.uima.xml.archivearticle3; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAnyAttribute; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlID; import javax.xml.bind.annotation.XmlIDREF; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; import javax.xml.bind.annotation.adapters.CollapsedStringAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import javax.xml.namespace.QName; /** * <p>Java class for prsubset.type complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="prsubset.type"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;attGroup ref="{http://www.w3.org/1998/Math/MathML}prsubset.attlist"/> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "prsubset.type", namespace = "http://www.w3.org/1998/Math/MathML") public class PrsubsetType { @XmlAttribute(name = "class") @XmlSchemaType(name = "NMTOKENS") protected List<String> clazz; @XmlAttribute(name = "style") protected String style; @XmlAttribute(name = "xref") @XmlIDREF @XmlSchemaType(name = "IDREF") protected Object xref; @XmlAttribute(name = "id") @XmlJavaTypeAdapter(CollapsedStringAdapter.class) @XmlID @XmlSchemaType(name = "ID") protected String id; @XmlAttribute(name = "href", namespace = "http://www.w3.org/1999/xlink") @XmlSchemaType(name = "anySimpleType") protected String href; @XmlAttribute(name = "encoding") protected String encoding; @XmlAttribute(name = "definitionURL") @XmlSchemaType(name = "anyURI") protected String definitionURL; @XmlAnyAttribute private Map<QName, String> otherAttributes = new HashMap<QName, String>(); /** * Gets the value of the clazz property. * * <p> * This accessor method returns a reference to the live list, * not a snapshot. Therefore any modification you make to the * returned list will be present inside the JAXB object. * This is why there is not a <CODE>set</CODE> method for the clazz property. * * <p> * For example, to add a new item, do as follows: * <pre> * getClazz().add(newItem); * </pre> * * * <p> * Objects of the following type(s) are allowed in the list * {@link String } * * */ public List<String> getClazz() { if (clazz == null) { clazz = new ArrayList<String>(); } return this.clazz; } /** * Gets the value of the style property. * * @return * possible object is * {@link String } * */ public String getStyle() { return style; } /** * Sets the value of the style property. * * @param value * allowed object is * {@link String } * */ public void setStyle(String value) { this.style = value; } /** * Gets the value of the xref property. * * @return * possible object is * {@link Object } * */ public Object getXref() { return xref; } /** * Sets the value of the xref property. * * @param value * allowed object is * {@link Object } * */ public void setXref(Object value) { this.xref = value; } /** * Gets the value of the id property. * * @return * possible object is * {@link String } * */ public String getId() { return id; } /** * Sets the value of the id property. * * @param value * allowed object is * {@link String } * */ public void setId(String value) { this.id = value; } /** * Gets the value of the href property. * * @return * possible object is * {@link String } * */ public String getHref() { return href; } /** * Sets the value of the href property. * * @param value * allowed object is * {@link String } * */ public void setHref(String value) { this.href = value; } /** * Gets the value of the encoding property. * * @return * possible object is * {@link String } * */ public String getEncoding() { return encoding; } /** * Sets the value of the encoding property. * * @param value * allowed object is * {@link String } * */ public void setEncoding(String value) { this.encoding = value; } /** * Gets the value of the definitionURL property. * * @return * possible object is * {@link String } * */ public String getDefinitionURL() { return definitionURL; } /** * Sets the value of the definitionURL property. * * @param value * allowed object is * {@link String } * */ public void setDefinitionURL(String value) { this.definitionURL = value; } /** * Gets a map that contains attributes that aren't bound to any typed property on this class. * * <p> * the map is keyed by the name of the attribute and * the value is the string value of the attribute. * * the map returned by this method is live, and you can add new attribute * by updating the map directly. Because of this design, there's no setter. * * * @return * always non-null */ public Map<QName, String> getOtherAttributes() { return otherAttributes; } }
{ "content_hash": "ccad74331760873f44ec6a0cdd3f7c02", "timestamp": "", "source": "github", "line_count": 265, "max_line_length": 109, "avg_line_length": 24.739622641509435, "alnum_prop": 0.5817571690054911, "repo_name": "BlueBrain/bluima", "id": "2c5ac5717c84f3b9813ab784ca89d06938d86642", "size": "6556", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/bluima_xml/src/main/java/ch/epfl/bbp/uima/xml/archivearticle3/PrsubsetType.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "962" }, { "name": "C", "bytes": "84537" }, { "name": "CSS", "bytes": "61917" }, { "name": "Groff", "bytes": "2511" }, { "name": "HTML", "bytes": "240735" }, { "name": "Java", "bytes": "15064102" }, { "name": "JavaScript", "bytes": "365145" }, { "name": "Makefile", "bytes": "964" }, { "name": "Matlab", "bytes": "587" }, { "name": "Perl", "bytes": "9852" }, { "name": "Python", "bytes": "910928" }, { "name": "R", "bytes": "16070" }, { "name": "Scala", "bytes": "117040" }, { "name": "Shell", "bytes": "1481466" }, { "name": "Tcl", "bytes": "15" }, { "name": "TeX", "bytes": "61732" }, { "name": "Web Ontology Language", "bytes": "6749276" }, { "name": "XSLT", "bytes": "19324" } ], "symlink_target": "" }
<?php /** * IdentityChangeWhatsAppApi * PHP version 7.2 * * @category Class * @package Infobip * @author Infobip Support * @link https://www.infobip.com */ /** * Infobip Client API Libraries OpenAPI Specification * * OpenAPI specification containing public endpoints supported in client API libraries. * * Contact: [email protected] * * NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). * Do not edit the class manually. */ namespace Infobip\Api; use GuzzleHttp\Client; use GuzzleHttp\ClientInterface; use GuzzleHttp\Exception\RequestException; use GuzzleHttp\Psr7\MultipartStream; use GuzzleHttp\Psr7\Request; use GuzzleHttp\Psr7\Utils; use GuzzleHttp\RequestOptions; use Infobip\ApiException; use Infobip\Configuration; use Infobip\HeaderSelector; use Infobip\ObjectSerializer; /** * IdentityChangeWhatsAppApi Class Doc Comment * * @category Class * @package Infobip * @author Infobip Support * @link https://www.infobip.com */ class IdentityChangeWhatsAppApi { /** * @var ClientInterface */ protected $client; /** * @var Configuration */ protected $config; /** * @var HeaderSelector */ protected $headerSelector; /** * @param ClientInterface $client * @param Configuration $config * @param HeaderSelector $selector */ public function __construct( ClientInterface $client = null, Configuration $config = null, HeaderSelector $selector = null ) { $this->client = $client ?: new Client(); $this->config = $config ?: new Configuration(); $this->headerSelector = $selector ?: new HeaderSelector(); } /** * @return Configuration */ public function getConfig() { return $this->config; } /** * Operation confirmWhatsappIdentity * * Confirm identity * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * @param \Infobip\Model\WhatsAppIdentityConfirmation $whatsAppIdentityConfirmation whatsAppIdentityConfirmation (required) * * @throws \Infobip\ApiException on non-2xx response * @throws \InvalidArgumentException * @return void */ public function confirmWhatsappIdentity($sender, $userNumber, $whatsAppIdentityConfirmation) { $this->confirmWhatsappIdentityWithHttpInfo($sender, $userNumber, $whatsAppIdentityConfirmation); } /** * Operation confirmWhatsappIdentityWithHttpInfo * * Confirm identity * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * @param \Infobip\Model\WhatsAppIdentityConfirmation $whatsAppIdentityConfirmation (required) * * @throws \Infobip\ApiException on non-2xx response * @throws \InvalidArgumentException * @return array of null, HTTP status code, HTTP response headers (array of strings) */ public function confirmWhatsappIdentityWithHttpInfo($sender, $userNumber, $whatsAppIdentityConfirmation) { $request = $this->confirmWhatsappIdentityRequest($sender, $userNumber, $whatsAppIdentityConfirmation); try { $options = $this->createHttpClientOption(); try { $response = $this->client->send($request, $options); return $this->confirmWhatsappIdentityResponse($response, $request->getUri()); } catch (RequestException $e) { throw new ApiException( "[{$e->getCode()}] {$e->getMessage()}", $e->getCode(), $e->getResponse() ? $e->getResponse()->getHeaders() : null, $e->getResponse() ? (string) $e->getResponse()->getBody() : null ); } } catch (ApiException $e) { throw $this->confirmWhatsappIdentityApiException($e); } } /** * Operation confirmWhatsappIdentityAsync * * Confirm identity * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * @param \Infobip\Model\WhatsAppIdentityConfirmation $whatsAppIdentityConfirmation (required) * * @throws \InvalidArgumentException * @return \GuzzleHttp\Promise\PromiseInterface */ public function confirmWhatsappIdentityAsync($sender, $userNumber, $whatsAppIdentityConfirmation) { return $this->confirmWhatsappIdentityAsyncWithHttpInfo($sender, $userNumber, $whatsAppIdentityConfirmation) ->then( function ($response) { return $response[0]; } ); } /** * Operation confirmWhatsappIdentityAsyncWithHttpInfo * * Confirm identity * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * @param \Infobip\Model\WhatsAppIdentityConfirmation $whatsAppIdentityConfirmation (required) * * @throws \InvalidArgumentException * @return \GuzzleHttp\Promise\PromiseInterface */ public function confirmWhatsappIdentityAsyncWithHttpInfo($sender, $userNumber, $whatsAppIdentityConfirmation) { $request = $this->confirmWhatsappIdentityRequest($sender, $userNumber, $whatsAppIdentityConfirmation); return $this->client ->sendAsync($request, $this->createHttpClientOption()) ->then( function ($response) use ($request) { return $this->confirmWhatsappIdentityResponse($response, $request->getUri()); }, function ($exception) { $statusCode = $exception->getCode(); $response = $exception->getResponse(); $e = new ApiException( "[{$statusCode}] {$exception->getMessage()}", $statusCode, $response ? $response->getHeaders() : null, $response ? (string) $response->getBody() : null ); throw $this->confirmWhatsappIdentityApiException($e); } ); } /** * Create request for operation 'confirmWhatsappIdentity' * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * @param \Infobip\Model\WhatsAppIdentityConfirmation $whatsAppIdentityConfirmation (required) * * @throws \InvalidArgumentException * @return \GuzzleHttp\Psr7\Request */ protected function confirmWhatsappIdentityRequest($sender, $userNumber, $whatsAppIdentityConfirmation) { // verify the required parameter 'sender' is set if ($sender === null || (is_array($sender) && count($sender) === 0)) { throw new \InvalidArgumentException( 'Missing the required parameter $sender when calling confirmWhatsappIdentity' ); } // verify the required parameter 'userNumber' is set if ($userNumber === null || (is_array($userNumber) && count($userNumber) === 0)) { throw new \InvalidArgumentException( 'Missing the required parameter $userNumber when calling confirmWhatsappIdentity' ); } // verify the required parameter 'whatsAppIdentityConfirmation' is set if ($whatsAppIdentityConfirmation === null || (is_array($whatsAppIdentityConfirmation) && count($whatsAppIdentityConfirmation) === 0)) { throw new \InvalidArgumentException( 'Missing the required parameter $whatsAppIdentityConfirmation when calling confirmWhatsappIdentity' ); } $resourcePath = '/whatsapp/1/{sender}/contacts/{userNumber}/identity'; $formParams = []; $queryParams = []; $headerParams = []; $httpBody = ''; // path params if ($sender !== null) { $resourcePath = str_replace( '{' . 'sender' . '}', ObjectSerializer::toPathValue($sender), $resourcePath ); } // path params if ($userNumber !== null) { $resourcePath = str_replace( '{' . 'userNumber' . '}', ObjectSerializer::toPathValue($userNumber), $resourcePath ); } $headers = $this->headerSelector->selectHeaders( ['application/json'], ['application/json'] ); // for model (json/xml) if (isset($whatsAppIdentityConfirmation)) { if ($headers['Content-Type'] === 'application/json') { $httpBody = \GuzzleHttp\json_encode(ObjectSerializer::sanitizeForSerialization($whatsAppIdentityConfirmation)); } else { $httpBody = $whatsAppIdentityConfirmation; } } elseif (count($formParams) > 0) { if ($headers['Content-Type'] === 'multipart/form-data') { $boundary = '----'.hash('sha256', uniqid('', true)); $headers['Content-Type'] .= '; boundary=' . $boundary; $multipartContents = []; foreach ($formParams as $formParamName => $formParamValue) { $formParamValueItems = is_array($formParamValue) ? $formParamValue : [$formParamValue]; foreach ($formParamValueItems as $formParamValueItem) { $multipartContents[] = [ 'name' => $formParamName, 'contents' => $formParamValueItem ]; } } // for HTTP post (form) $httpBody = new MultipartStream($multipartContents, $boundary); } elseif ($headers['Content-Type'] === 'application/json') { $httpBody = \GuzzleHttp\json_encode($formParams); } else { // for HTTP post (form) $httpBody = \GuzzleHttp\Psr7\Query::build($formParams); } } // this endpoint requires API key authentication $apiKey = $this->config->getApiKeyWithPrefix('Authorization'); if ($apiKey !== null) { $headers['Authorization'] = $apiKey; } // this endpoint requires HTTP basic authentication if (!empty($this->config->getUsername()) || !(empty($this->config->getPassword()))) { $headers['Authorization'] = 'Basic ' . base64_encode($this->config->getUsername() . ":" . $this->config->getPassword()); } // this endpoint requires API key authentication $apiKey = $this->config->getApiKeyWithPrefix('Authorization'); if ($apiKey !== null) { $headers['Authorization'] = $apiKey; } // this endpoint requires OAuth (access token) if (!empty($this->config->getAccessToken())) { $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); } $defaultHeaders = []; if ($this->config->getUserAgent()) { $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); } $headers = array_merge( $defaultHeaders, $headerParams, $headers ); $query = \GuzzleHttp\Psr7\Query::build($queryParams); return new Request( 'PUT', $this->config->getHost() . $resourcePath . ($query ? "?{$query}" : ''), $headers, $httpBody ); } /** * Create response for operation 'confirmWhatsappIdentity' * * @param \GuzzleHttp\Psr7\Response $response * @param string $requestUri * * @throws \Infobip\ApiException on non-2xx response * @return array of , HTTP status code, HTTP response headers (array of strings) */ protected function confirmWhatsappIdentityResponse($response, $requestUri) { $statusCode = $response->getStatusCode(); $responseBody = $response->getBody(); $responseHeaders = $response->getHeaders(); if ($statusCode < 200 || $statusCode > 299) { throw new ApiException( sprintf('[%d] Error connecting to the API (%s)', $statusCode, $requestUri), $statusCode, $responseHeaders, $responseBody ); } $responseObject = null; return [ $responseObject, $statusCode, $responseHeaders ]; } /** * Adapt given \Infobip\ApiException for operation 'confirmWhatsappIdentity' * * @param \Infobip\ApiException $apiException * * @return \Infobip\ApiException */ protected function confirmWhatsappIdentityApiException($apiException) { $statusCode = $apiException->getCode(); if ($statusCode === 401) { $data = ObjectSerializer::deserialize( $apiException->getResponseBody(), '\Infobip\Model\WhatsAppApiException', $apiException->getResponseHeaders() ); $apiException->setResponseObject($data); return $apiException; } if ($statusCode === 429) { $data = ObjectSerializer::deserialize( $apiException->getResponseBody(), '\Infobip\Model\WhatsAppApiException', $apiException->getResponseHeaders() ); $apiException->setResponseObject($data); return $apiException; } return $apiException; } /** * Operation getWhatsappIdentity * * Get identity * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * * @throws \Infobip\ApiException on non-2xx response * @throws \InvalidArgumentException * @return \Infobip\Model\WhatsAppIdentityInfo|\Infobip\Model\WhatsAppApiException|\Infobip\Model\WhatsAppApiException */ public function getWhatsappIdentity($sender, $userNumber) { list($response) = $this->getWhatsappIdentityWithHttpInfo($sender, $userNumber); return $response; } /** * Operation getWhatsappIdentityWithHttpInfo * * Get identity * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * * @throws \Infobip\ApiException on non-2xx response * @throws \InvalidArgumentException * @return array of \Infobip\Model\WhatsAppIdentityInfo|\Infobip\Model\WhatsAppApiException|\Infobip\Model\WhatsAppApiException, HTTP status code, HTTP response headers (array of strings) */ public function getWhatsappIdentityWithHttpInfo($sender, $userNumber) { $request = $this->getWhatsappIdentityRequest($sender, $userNumber); try { $options = $this->createHttpClientOption(); try { $response = $this->client->send($request, $options); return $this->getWhatsappIdentityResponse($response, $request->getUri()); } catch (RequestException $e) { throw new ApiException( "[{$e->getCode()}] {$e->getMessage()}", $e->getCode(), $e->getResponse() ? $e->getResponse()->getHeaders() : null, $e->getResponse() ? (string) $e->getResponse()->getBody() : null ); } } catch (ApiException $e) { throw $this->getWhatsappIdentityApiException($e); } } /** * Operation getWhatsappIdentityAsync * * Get identity * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * * @throws \InvalidArgumentException * @return \GuzzleHttp\Promise\PromiseInterface */ public function getWhatsappIdentityAsync($sender, $userNumber) { return $this->getWhatsappIdentityAsyncWithHttpInfo($sender, $userNumber) ->then( function ($response) { return $response[0]; } ); } /** * Operation getWhatsappIdentityAsyncWithHttpInfo * * Get identity * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * * @throws \InvalidArgumentException * @return \GuzzleHttp\Promise\PromiseInterface */ public function getWhatsappIdentityAsyncWithHttpInfo($sender, $userNumber) { $request = $this->getWhatsappIdentityRequest($sender, $userNumber); return $this->client ->sendAsync($request, $this->createHttpClientOption()) ->then( function ($response) use ($request) { return $this->getWhatsappIdentityResponse($response, $request->getUri()); }, function ($exception) { $statusCode = $exception->getCode(); $response = $exception->getResponse(); $e = new ApiException( "[{$statusCode}] {$exception->getMessage()}", $statusCode, $response ? $response->getHeaders() : null, $response ? (string) $response->getBody() : null ); throw $this->getWhatsappIdentityApiException($e); } ); } /** * Create request for operation 'getWhatsappIdentity' * * @param string $sender Registered WhatsApp sender number. Must be in international format. (required) * @param string $userNumber End user&#39;s number. Must be in international format. (required) * * @throws \InvalidArgumentException * @return \GuzzleHttp\Psr7\Request */ protected function getWhatsappIdentityRequest($sender, $userNumber) { // verify the required parameter 'sender' is set if ($sender === null || (is_array($sender) && count($sender) === 0)) { throw new \InvalidArgumentException( 'Missing the required parameter $sender when calling getWhatsappIdentity' ); } // verify the required parameter 'userNumber' is set if ($userNumber === null || (is_array($userNumber) && count($userNumber) === 0)) { throw new \InvalidArgumentException( 'Missing the required parameter $userNumber when calling getWhatsappIdentity' ); } $resourcePath = '/whatsapp/1/{sender}/contacts/{userNumber}/identity'; $formParams = []; $queryParams = []; $headerParams = []; $httpBody = ''; // path params if ($sender !== null) { $resourcePath = str_replace( '{' . 'sender' . '}', ObjectSerializer::toPathValue($sender), $resourcePath ); } // path params if ($userNumber !== null) { $resourcePath = str_replace( '{' . 'userNumber' . '}', ObjectSerializer::toPathValue($userNumber), $resourcePath ); } $headers = $this->headerSelector->selectHeaders( ['application/json'], [] ); // for model (json/xml) if (count($formParams) > 0) { if ($headers['Content-Type'] === 'multipart/form-data') { $boundary = '----'.hash('sha256', uniqid('', true)); $headers['Content-Type'] .= '; boundary=' . $boundary; $multipartContents = []; foreach ($formParams as $formParamName => $formParamValue) { $formParamValueItems = is_array($formParamValue) ? $formParamValue : [$formParamValue]; foreach ($formParamValueItems as $formParamValueItem) { $multipartContents[] = [ 'name' => $formParamName, 'contents' => $formParamValueItem ]; } } // for HTTP post (form) $httpBody = new MultipartStream($multipartContents, $boundary); } elseif ($headers['Content-Type'] === 'application/json') { $httpBody = \GuzzleHttp\json_encode($formParams); } else { // for HTTP post (form) $httpBody = \GuzzleHttp\Psr7\Query::build($formParams); } } // this endpoint requires API key authentication $apiKey = $this->config->getApiKeyWithPrefix('Authorization'); if ($apiKey !== null) { $headers['Authorization'] = $apiKey; } // this endpoint requires HTTP basic authentication if (!empty($this->config->getUsername()) || !(empty($this->config->getPassword()))) { $headers['Authorization'] = 'Basic ' . base64_encode($this->config->getUsername() . ":" . $this->config->getPassword()); } // this endpoint requires API key authentication $apiKey = $this->config->getApiKeyWithPrefix('Authorization'); if ($apiKey !== null) { $headers['Authorization'] = $apiKey; } // this endpoint requires OAuth (access token) if (!empty($this->config->getAccessToken())) { $headers['Authorization'] = 'Bearer ' . $this->config->getAccessToken(); } $defaultHeaders = []; if ($this->config->getUserAgent()) { $defaultHeaders['User-Agent'] = $this->config->getUserAgent(); } $headers = array_merge( $defaultHeaders, $headerParams, $headers ); $query = \GuzzleHttp\Psr7\Query::build($queryParams); return new Request( 'GET', $this->config->getHost() . $resourcePath . ($query ? "?{$query}" : ''), $headers, $httpBody ); } /** * Create response for operation 'getWhatsappIdentity' * * @param \GuzzleHttp\Psr7\Response $response * @param string $requestUri * * @throws \Infobip\ApiException on non-2xx response * @return array of \Infobip\Model\WhatsAppIdentityInfo|\Infobip\Model\WhatsAppApiException|\Infobip\Model\WhatsAppApiException|null, HTTP status code, HTTP response headers (array of strings) */ protected function getWhatsappIdentityResponse($response, $requestUri) { $statusCode = $response->getStatusCode(); $responseBody = $response->getBody(); $responseHeaders = $response->getHeaders(); if ($statusCode < 200 || $statusCode > 299) { throw new ApiException( sprintf('[%d] Error connecting to the API (%s)', $statusCode, $requestUri), $statusCode, $responseHeaders, $responseBody ); } $responseObject = null; if ($statusCode === 200) { $type = '\Infobip\Model\WhatsAppIdentityInfo'; if ($type === '\SplFileObject') { $content = $responseBody; //stream goes to serializer } else { $content = (string) $responseBody; } $responseObject = ObjectSerializer::deserialize($content, $type, $responseHeaders); return [ $responseObject, $statusCode, $responseHeaders ]; } return [ $responseObject, $statusCode, $responseHeaders ]; } /** * Adapt given \Infobip\ApiException for operation 'getWhatsappIdentity' * * @param \Infobip\ApiException $apiException * * @return \Infobip\ApiException */ protected function getWhatsappIdentityApiException($apiException) { $statusCode = $apiException->getCode(); if ($statusCode === 401) { $data = ObjectSerializer::deserialize( $apiException->getResponseBody(), '\Infobip\Model\WhatsAppApiException', $apiException->getResponseHeaders() ); $apiException->setResponseObject($data); return $apiException; } if ($statusCode === 429) { $data = ObjectSerializer::deserialize( $apiException->getResponseBody(), '\Infobip\Model\WhatsAppApiException', $apiException->getResponseHeaders() ); $apiException->setResponseObject($data); return $apiException; } return $apiException; } /** * Create http client option * * @throws \RuntimeException on file opening failure * @return array of http client options */ protected function createHttpClientOption() { $options = []; if ($this->config->getDebug()) { $options[RequestOptions::DEBUG] = fopen($this->config->getDebugFile(), 'a'); if (!$options[RequestOptions::DEBUG]) { throw new \RuntimeException('Failed to open the debug file: ' . $this->config->getDebugFile()); } } return $options; } }
{ "content_hash": "052f2eaf77390380d81ca5619031927f", "timestamp": "", "source": "github", "line_count": 720, "max_line_length": 196, "avg_line_length": 36.86666666666667, "alnum_prop": 0.5733499095840868, "repo_name": "infobip/infobip-api-php-client", "id": "9eed326411ed0e45e5db3c710621d04aa4d1a650", "size": "26544", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Infobip/Api/IdentityChangeWhatsAppApi.php", "mode": "33188", "license": "mit", "language": [ { "name": "PHP", "bytes": "2630558" } ], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="de"> <head> <!-- Generated by javadoc (version 1.7.0_55) on Thu Sep 25 12:22:17 CEST 2014 --> <meta http-equiv="Content-Type" content="text/html" charset="UTF-8"> <title>at.irian.ankor.application (Ankor - Project 0.4-SNAPSHOT API)</title> <meta name="date" content="2014-09-25"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> </head> <body> <script type="text/javascript"><!-- if (location.href.indexOf('is-external=true') == -1) { parent.document.title="at.irian.ankor.application (Ankor - Project 0.4-SNAPSHOT API)"; } //--> </script> <noscript> <div>JavaScript is disabled on your browser.</div> </noscript> <!-- ========= START OF TOP NAVBAR ======= --> <div class="topNav"><a name="navbar_top"> <!-- --> </a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../at/irian/ankor/annotation/package-summary.html">Prev Package</a></li> <li><a href="../../../../at/irian/ankor/base/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?at/irian/ankor/application/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_top"> <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_top"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_top"> <!-- --> </a></div> <!-- ========= END OF TOP NAVBAR ========= --> <div class="header"> <h1 title="Package" class="title">Package&nbsp;at.irian.ankor.application</h1> </div> <div class="contentContainer"> <ul class="blockList"> <li class="blockList"> <table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Interface Summary table, listing interfaces, and an explanation"> <caption><span>Interface Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Interface</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../at/irian/ankor/application/Application.html" title="interface in at.irian.ankor.application">Application</a></td> <td class="colLast"> <div class="block">By means of an <a href="../../../../at/irian/ankor/application/Application.html" title="interface in at.irian.ankor.application"><code>Application</code></a> the developer tells the Ankor framework the main infos about his Ankor application, like: name of the application (for logging) model lookup behaviour init/cleanup behaviour stateless/stateful </div> </td> </tr> </tbody> </table> </li> <li class="blockList"> <table class="packageSummary" border="0" cellpadding="3" cellspacing="0" summary="Class Summary table, listing classes, and an explanation"> <caption><span>Class Summary</span><span class="tabEnd">&nbsp;</span></caption> <tr> <th class="colFirst" scope="col">Class</th> <th class="colLast" scope="col">Description</th> </tr> <tbody> <tr class="altColor"> <td class="colFirst"><a href="../../../../at/irian/ankor/application/BaseApplication.html" title="class in at.irian.ankor.application">BaseApplication</a></td> <td class="colLast"> <div class="block">Convenient base type for Ankor Applications.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../at/irian/ankor/application/CollaborationSingleRootApplication.html" title="class in at.irian.ankor.application">CollaborationSingleRootApplication</a></td> <td class="colLast"> <div class="block">Convenient base class for Ankor applications that support model instance sharing (i.e. collaboration).</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../at/irian/ankor/application/GenericApplication.html" title="class in at.irian.ankor.application">GenericApplication</a></td> <td class="colLast"> <div class="block">Convenient <a href="../../../../at/irian/ankor/application/Application.html" title="interface in at.irian.ankor.application"><code>Application</code></a> implementation, that is easily configurable and implements reasonable default behaviour.</div> </td> </tr> <tr class="rowColor"> <td class="colFirst"><a href="../../../../at/irian/ankor/application/SimpleClientApplication.html" title="class in at.irian.ankor.application">SimpleClientApplication</a></td> <td class="colLast"> <div class="block">Application implementation for Java-based Ankor clients.</div> </td> </tr> <tr class="altColor"> <td class="colFirst"><a href="../../../../at/irian/ankor/application/SimpleSingleRootApplication.html" title="class in at.irian.ankor.application">SimpleSingleRootApplication</a></td> <td class="colLast"> <div class="block">Convenient Application implementation that supports an arbitrary number of model instances that all have the same model root name (typically called "root").</div> </td> </tr> </tbody> </table> </li> </ul> </div> <!-- ======= START OF BOTTOM NAVBAR ====== --> <div class="bottomNav"><a name="navbar_bottom"> <!-- --> </a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> <!-- --> </a> <ul class="navList" title="Navigation"> <li><a href="../../../../overview-summary.html">Overview</a></li> <li class="navBarCell1Rev">Package</li> <li>Class</li> <li><a href="package-use.html">Use</a></li> <li><a href="package-tree.html">Tree</a></li> <li><a href="../../../../deprecated-list.html">Deprecated</a></li> <li><a href="../../../../index-all.html">Index</a></li> <li><a href="../../../../help-doc.html">Help</a></li> </ul> </div> <div class="subNav"> <ul class="navList"> <li><a href="../../../../at/irian/ankor/annotation/package-summary.html">Prev Package</a></li> <li><a href="../../../../at/irian/ankor/base/package-summary.html">Next Package</a></li> </ul> <ul class="navList"> <li><a href="../../../../index.html?at/irian/ankor/application/package-summary.html" target="_top">Frames</a></li> <li><a href="package-summary.html" target="_top">No Frames</a></li> </ul> <ul class="navList" id="allclasses_navbar_bottom"> <li><a href="../../../../allclasses-noframe.html">All Classes</a></li> </ul> <div> <script type="text/javascript"><!-- allClassesLink = document.getElementById("allclasses_navbar_bottom"); if(window==top) { allClassesLink.style.display = "block"; } else { allClassesLink.style.display = "none"; } //--> </script> </div> <a name="skip-navbar_bottom"> <!-- --> </a></div> <!-- ======== END OF BOTTOM NAVBAR ======= --> <p class="legalCopy"><small>Copyright &#169; 2014 <a href="https://github.com/orgs/ankor-io/teams/ankor-developers">Ankor Developers Team</a>. All rights reserved.</small></p> </body> </html>
{ "content_hash": "0deeb54296842795cf0839473b224aaa", "timestamp": "", "source": "github", "line_count": 186, "max_line_length": 267, "avg_line_length": 40.95161290322581, "alnum_prop": 0.6640409610082709, "repo_name": "ankor-io/ankor-framework", "id": "69f6c3f651050b6bec118bc7e9bec6f3a45c92f6", "size": "7617", "binary": false, "copies": "1", "ref": "refs/heads/stable", "path": "website/ankorsite/static/javadoc/apidocs-0.4/at/irian/ankor/application/package-summary.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "526" }, { "name": "C", "bytes": "12173" }, { "name": "C#", "bytes": "157078" }, { "name": "CSS", "bytes": "10636" }, { "name": "CoffeeScript", "bytes": "48194" }, { "name": "HTML", "bytes": "17806" }, { "name": "Java", "bytes": "751745" }, { "name": "JavaScript", "bytes": "290740" }, { "name": "Makefile", "bytes": "811" }, { "name": "Objective-C", "bytes": "114772" }, { "name": "PowerShell", "bytes": "3261" }, { "name": "Python", "bytes": "17835" }, { "name": "Shell", "bytes": "36" } ], "symlink_target": "" }
package com.breakersoft.plow.dao.pgsql; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.UUID; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.PreparedStatementCreator; import org.springframework.jdbc.core.RowMapper; import org.springframework.stereotype.Repository; import com.breakersoft.plow.Service; import com.breakersoft.plow.ServiceE; import com.breakersoft.plow.ServiceFull; import com.breakersoft.plow.dao.AbstractDao; import com.breakersoft.plow.dao.ServiceDao; import com.breakersoft.plow.thrift.ServiceT; import com.breakersoft.plow.util.JdbcUtils; import com.breakersoft.plow.util.PlowUtils; @Repository public class ServiceDaoImpl extends AbstractDao implements ServiceDao { public static final RowMapper<Service> MAPPER = new RowMapper<Service>() { @Override public Service mapRow(ResultSet rs, int rowNum) throws SQLException { ServiceE service = new ServiceE(); service.setServiceId((UUID) rs.getObject(1)); service.setName(rs.getString(2)); return service; } }; public static final RowMapper<ServiceFull> MAPPER_FULL = new RowMapper<ServiceFull>() { @Override public ServiceFull mapRow(ResultSet rs, int rowNum) throws SQLException { final ServiceFull service = new ServiceFull(); service.setServiceId((UUID) rs.getObject("pk_service")); service.setName(rs.getString("str_name")); if (rs.getBoolean("isset_int_cores_min")) { service.setMinCores(rs.getInt("int_cores_min")); } if (rs.getBoolean("isset_int_cores_max")) { service.setMaxCores(rs.getInt("int_cores_max")); } if (rs.getBoolean("isset_int_ram_min")) { service.setMinRam(rs.getInt("int_ram_min")); } if (rs.getBoolean("isset_int_ram_max")) { service.setMaxRam(rs.getInt("int_ram_max")); } if (rs.getBoolean("isset_bool_threadable")) { service.setThreadable(rs.getBoolean("bool_threadable")); } if (rs.getBoolean("isset_str_tags")) { service.setTags(JdbcUtils.toList(rs.getArray("str_tags"))); } if (rs.getBoolean("isset_int_retries_max")) { service.setMaxRetries(rs.getInt("int_retries_max")); } return service; } }; @Override public ServiceFull getServiceFull(String name) { try { return jdbc.queryForObject("SELECT * FROM plow.service WHERE str_name=?", MAPPER_FULL, name); } catch (EmptyResultDataAccessException e) { return null; } } private static final String GET = "SELECT " + "pk_service," + "str_name " + "FROM " + "service "; @Override public Service get(UUID id) { return jdbc.queryForObject(GET + "WHERE pk_service=?", MAPPER, id); } @Override public Service get(String name) { return jdbc.queryForObject(GET + "WHERE str_name=?", MAPPER, name); } @Override public boolean exists(String name) { return jdbc.queryForObject("SELECT COUNT(1) FROM plow.service WHERE str_name=?", Integer.class, name) > 0; } private static final String INSERT = JdbcUtils.Insert( "plow.service", "pk_service", "str_name", "str_tags", "int_cores_min", "int_cores_max", "int_ram_min", "int_ram_max", "int_retries_max", "bool_threadable", "isset_int_cores_min", "isset_int_cores_max", "isset_int_ram_min", "isset_int_ram_max", "isset_str_tags", "isset_bool_threadable", "isset_int_retries_max"); @Override public Service create(final ServiceT service) { final UUID id = UUID.randomUUID(); jdbc.update(new PreparedStatementCreator() { @Override public PreparedStatement createPreparedStatement(final Connection conn) throws SQLException { final PreparedStatement ret = conn.prepareStatement(INSERT); ret.setObject(1, id); ret.setString(2, service.getName()); if (PlowUtils.isValid(service.getTags())) { ret.setArray(3, conn.createArrayOf("text", PlowUtils.uniquify(service.getTags()))); } else { ret.setArray(3, null); } ret.setInt(4, service.getMinCores()); ret.setInt(5, service.getMaxCores()); ret.setInt(6, service.getMinRam()); ret.setInt(7, service.getMaxRam()); ret.setInt(8, service.getMaxRetries()); ret.setBoolean(9, service.isThreadable()); ret.setBoolean(10, service.isSetMinCores()); ret.setBoolean(11, service.isSetMaxCores()); ret.setBoolean(12, service.isSetMinRam()); ret.setBoolean(13, service.isSetMaxRam()); ret.setBoolean(14, service.isSetTags()); ret.setBoolean(15, service.isSetThreadable()); ret.setBoolean(16, service.isSetMaxRetries()); return ret; } }); service.id = id.toString(); ServiceE svc = new ServiceE(); svc.setServiceId(id); svc.setName(service.name); return svc; } private static final String UPDATE = "UPDATE " + "service " + "SET " + "str_name = ?," + "str_tags = ?,"+ "int_ram_min = ?,"+ "int_ram_max = ?,"+ "int_cores_min = ?,"+ "int_cores_max = ?,"+ "int_retries_max = ?, "+ "bool_threadable = ?,"+ "isset_int_cores_min=?,"+ "isset_int_cores_max=?,"+ "isset_int_ram_min=?,"+ "isset_int_ram_max=?,"+ "isset_str_tags=?,"+ "isset_bool_threadable=?,"+ "isset_int_retries_max=?" + "WHERE " + "pk_service = ?"; @Override public boolean update(final ServiceT service) { return jdbc.update(new PreparedStatementCreator() { @Override public PreparedStatement createPreparedStatement(final Connection conn) throws SQLException { final PreparedStatement ret = conn.prepareStatement(UPDATE); ret.setString(1, service.name); if (PlowUtils.isValid(service.getTags())) { ret.setArray(2, conn.createArrayOf("text", PlowUtils.uniquify(service.getTags()))); } else { ret.setArray(2, null); } ret.setInt(3, service.getMinRam()); ret.setInt(4, service.getMaxRam()); ret.setInt(5, service.getMinCores()); ret.setInt(6, service.getMaxCores()); ret.setInt(7, service.getMaxRetries()); ret.setBoolean(8, service.isThreadable()); ret.setBoolean(9, service.isSetMinCores()); ret.setBoolean(10, service.isSetMaxCores()); ret.setBoolean(11, service.isSetMinRam()); ret.setBoolean(12, service.isSetMaxRam()); ret.setBoolean(13, service.isSetTags()); ret.setBoolean(14, service.isSetThreadable()); ret.setBoolean(15, service.isSetMaxRetries()); ret.setObject(16, UUID.fromString(service.id)); return ret; } }) == 1; } @Override public boolean delete(UUID id) { return jdbc.update("DELETE FROM plow.service WHERE pk_service=?", id) == 1; } }
{ "content_hash": "c77503c800cd59d413119a1cbab91fbb", "timestamp": "", "source": "github", "line_count": 237, "max_line_length": 114, "avg_line_length": 35.72573839662447, "alnum_prop": 0.5340734616747372, "repo_name": "Br3nda/plow", "id": "974c7208014750ccb238d42f49d7f6fe62ed4133", "size": "8467", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "server/src/main/java/com/breakersoft/plow/dao/pgsql/ServiceDaoImpl.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C++", "bytes": "85468" }, { "name": "Java", "bytes": "646022" }, { "name": "JavaScript", "bytes": "125402" }, { "name": "Python", "bytes": "577774" }, { "name": "Scala", "bytes": "577" }, { "name": "Shell", "bytes": "7324" } ], "symlink_target": "" }
<?php namespace Omnipay\Migs\Message; /** * Migs Purchase Request */ class TwoPartyPurchaseRequest extends AbstractRequest { protected $action = 'pay'; public function getData() { $this->validate('amount', 'transactionId', 'card'); $this->getCard()->validate(); $data = $this->getBaseData(); $data['vpc_CardNum'] = $this->getCard()->getNumber(); $data['vpc_CardExp'] = $this->getCard()->getExpiryDate('ym'); $data['vpc_CardSecurityCode'] = $this->getCard()->getCvv(); $data['vpc_SecureHash'] = $this->calculateHash($data); return $data; } public function send() { $httpResponse = $this->httpClient->post($this->getEndpoint(), null, $this->getData())->send(); return $this->response = new Response($this, $httpResponse->getBody()); } public function getEndpoint() { return $this->endpoint.'vpcdps'; } }
{ "content_hash": "83f35211162e0939f7d976c77f5b3ab2", "timestamp": "", "source": "github", "line_count": 38, "max_line_length": 102, "avg_line_length": 24.789473684210527, "alnum_prop": 0.5902335456475584, "repo_name": "williangringo/php-framework", "id": "086d268d7ae392883cfb7fe3017c22fd412e7165", "size": "942", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "lib/Omnipay/Migs/Message/TwoPartyPurchaseRequest.php", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
"use strict"; (function(){ function init() { } function handleQuery(key, val) { Espruino.Core.Code.switchToCode(); // if in blockly switch(key){ case "code": Espruino.Core.EditorJavaScript.setCode(val); break; case "upload": Espruino.Core.MenuPortSelector.ensureConnected(function() { Espruino.Core.Terminal.focus(); // give the terminal focus Espruino.callProcessor("sending"); Espruino.Core.CodeWriter.writeToEspruino(val); Espruino.Core.EditorJavaScript.setCode(val); }); break; case "gist": Espruino.Core.EditorJavaScript.setCode("Loading..."); $.getJSON("https://api.github.com/gists/"+ val, function(data){ if(data && data.files){ var keys = Object.keys(data.files); if(keys.length > 0){ Espruino.Core.EditorJavaScript.setCode(data.files[keys[0]].content); } } }).error(function(){ Espruino.Core.EditorJavaScript.setCode("ERROR"); }); break; } } function handle(url) { console.log("Handling URL "+JSON.stringify(url)); url = (url); var q = url.indexOf("?"); if (q<0) return; var query = url.substr(q+1).split("&"); for (var i in query) { var eq = query[i].split("="); if (eq.length==1) handleQuery(eq[0],undefined); else if (eq.length==2) handleQuery(decodeURIComponent(eq[0]),decodeURIComponent(eq[1])); else console.warn("Didn't understand query section "+JSON.stringify(query[i])); } } Espruino.Plugins.URLHandler = { init : init, handle : handle, // handle a URL }; }());
{ "content_hash": "34760887c12e2231246d67917360fd10", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 82, "avg_line_length": 28.59016393442623, "alnum_prop": 0.5665137614678899, "repo_name": "SMACproject/SMAC-IRP", "id": "3e59ab5d129924765fb61f0cae91cf039a8779c2", "size": "2257", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "js/plugins/urlHandler.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "35748" }, { "name": "HTML", "bytes": "28171" }, { "name": "JavaScript", "bytes": "984170" }, { "name": "Shell", "bytes": "431" } ], "symlink_target": "" }
from django.forms import CharField, Form, ModelChoiceField from django.utils.translation import ugettext_lazy as _ from sendinel.backend.authhelper import format_and_validate_phonenumber from sendinel.backend.models import get_enabled_wocs from sendinel.infoservices.models import InfoMessage, InfoService class RegisterPatientForMedicineForm(Form): phone_number = CharField(validators = [format_and_validate_phonenumber], error_messages={'required':_('Please enter a phone number')}) way_of_communication = ModelChoiceField( queryset = get_enabled_wocs(), error_messages={'required': \ _('Please choose a way of communication')}) medicine = ModelChoiceField( queryset=InfoService.objects.filter(type='medicine'), error_messages={'required': \ _('Please choose a medicine'), 'invalid_choice': _('Please choose a medicine')}) class MedicineMessageValidationForm(Form): medicine = ModelChoiceField( queryset=InfoService.objects.filter(type='medicine'), error_messages={'required': \ _('Please choose a medicine'), \ 'invalid_choice': \ _('Please choose a medicine')}) text = CharField(error_messages={ \ 'required': _('Please enter a text to send'), \ 'invalid': _('The text contains invalid characters')})
{ "content_hash": "fd9f3c9d8371a00981e9f4bf9f12ea3a", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 91, "avg_line_length": 51.53125, "alnum_prop": 0.5651910248635537, "repo_name": "Sendinel/Sendinel", "id": "02d7b76bb7189e06826d7245fe189090b1d136e7", "size": "1649", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "sendinel/medicines/forms.py", "mode": "33188", "license": "mit", "language": [ { "name": "Java", "bytes": "15388" }, { "name": "JavaScript", "bytes": "25980" }, { "name": "Python", "bytes": "175933" }, { "name": "Shell", "bytes": "22210" } ], "symlink_target": "" }
package org.elasticsearch.index.shard; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.resync.ResyncReplicationRequest; import org.elasticsearch.action.resync.ResyncReplicationResponse; import org.elasticsearch.action.resync.TransportResyncReplicationAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static java.util.Objects.requireNonNull; public class PrimaryReplicaSyncer { private static final Logger logger = LogManager.getLogger(PrimaryReplicaSyncer.class); private final TaskManager taskManager; private final SyncAction syncAction; public static final ByteSizeValue DEFAULT_CHUNK_SIZE = new ByteSizeValue(512, ByteSizeUnit.KB); private volatile ByteSizeValue chunkSize = DEFAULT_CHUNK_SIZE; @Inject public PrimaryReplicaSyncer(TransportService transportService, TransportResyncReplicationAction syncAction) { this(transportService.getTaskManager(), syncAction); } // for tests public PrimaryReplicaSyncer(TaskManager taskManager, SyncAction syncAction) { this.taskManager = taskManager; this.syncAction = syncAction; } void setChunkSize(ByteSizeValue chunkSize) { // only settable for tests if (chunkSize.bytesAsInt() <= 0) { throw new IllegalArgumentException("chunkSize must be > 0"); } this.chunkSize = chunkSize; } public void resync(final IndexShard indexShard, final ActionListener<ResyncTask> listener) { Translog.Snapshot snapshot = null; try { final long startingSeqNo = indexShard.getLastKnownGlobalCheckpoint() + 1; final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); final ShardId shardId = indexShard.shardId(); // Wrap translog snapshot to make it synchronized as it is accessed by different threads through SnapshotSender. // Even though those calls are not concurrent, snapshot.next() uses non-synchronized state and is not multi-thread-compatible // Also fail the resync early if the shard is shutting down snapshot = indexShard.getHistoryOperations("resync", startingSeqNo); final Translog.Snapshot originalSnapshot = snapshot; final Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { @Override public synchronized void close() throws IOException { originalSnapshot.close(); } @Override public synchronized int totalOperations() { return originalSnapshot.totalOperations(); } @Override public synchronized Translog.Operation next() throws IOException { IndexShardState state = indexShard.state(); if (state == IndexShardState.CLOSED) { throw new IndexShardClosedException(shardId); } else { assert state == IndexShardState.STARTED : "resync should only happen on a started shard, but state was: " + state; } return originalSnapshot.next(); } }; final ActionListener<ResyncTask> resyncListener = new ActionListener<ResyncTask>() { @Override public void onResponse(final ResyncTask resyncTask) { try { wrappedSnapshot.close(); listener.onResponse(resyncTask); } catch (final Exception e) { onFailure(e); } } @Override public void onFailure(final Exception e) { try { wrappedSnapshot.close(); } catch (final Exception inner) { e.addSuppressed(inner); } finally { listener.onFailure(e); } } }; // We must capture the timestamp after snapshotting a snapshot of operations to make sure // that the auto_id_timestamp of every operation in the snapshot is at most this value. final long maxSeenAutoIdTimestamp = indexShard.getMaxSeenAutoIdTimestamp(); resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPendingPrimaryTerm(), wrappedSnapshot, startingSeqNo, maxSeqNo, maxSeenAutoIdTimestamp, resyncListener); } catch (Exception e) { try { IOUtils.close(snapshot); } catch (IOException inner) { e.addSuppressed(inner); } finally { listener.onFailure(e); } } } private void resync(final ShardId shardId, final String primaryAllocationId, final long primaryTerm, final Translog.Snapshot snapshot, long startingSeqNo, long maxSeqNo, long maxSeenAutoIdTimestamp, ActionListener<ResyncTask> listener) { ResyncRequest request = new ResyncRequest(shardId, primaryAllocationId); ResyncTask resyncTask = (ResyncTask) taskManager.register("transport", "resync", request); // it's not transport :-) ActionListener<Void> wrappedListener = new ActionListener<Void>() { @Override public void onResponse(Void ignore) { resyncTask.setPhase("finished"); taskManager.unregister(resyncTask); listener.onResponse(resyncTask); } @Override public void onFailure(Exception e) { resyncTask.setPhase("finished"); taskManager.unregister(resyncTask); listener.onFailure(e); } }; try { new SnapshotSender(syncAction, resyncTask, shardId, primaryAllocationId, primaryTerm, snapshot, chunkSize.bytesAsInt(), startingSeqNo, maxSeqNo, maxSeenAutoIdTimestamp, wrappedListener).run(); } catch (Exception e) { wrappedListener.onFailure(e); } } public interface SyncAction { void sync(ResyncReplicationRequest request, Task parentTask, String primaryAllocationId, long primaryTerm, ActionListener<ResyncReplicationResponse> listener); } static class SnapshotSender extends AbstractRunnable implements ActionListener<ResyncReplicationResponse> { private final Logger logger; private final SyncAction syncAction; private final ResyncTask task; // to track progress private final String primaryAllocationId; private final long primaryTerm; private final ShardId shardId; private final Translog.Snapshot snapshot; private final long startingSeqNo; private final long maxSeqNo; private final long maxSeenAutoIdTimestamp; private final int chunkSizeInBytes; private final ActionListener<Void> listener; private final AtomicBoolean firstMessage = new AtomicBoolean(true); private final AtomicInteger totalSentOps = new AtomicInteger(); private final AtomicInteger totalSkippedOps = new AtomicInteger(); private final AtomicBoolean closed = new AtomicBoolean(); SnapshotSender(SyncAction syncAction, ResyncTask task, ShardId shardId, String primaryAllocationId, long primaryTerm, Translog.Snapshot snapshot, int chunkSizeInBytes, long startingSeqNo, long maxSeqNo, long maxSeenAutoIdTimestamp, ActionListener<Void> listener) { this.logger = PrimaryReplicaSyncer.logger; this.syncAction = syncAction; this.task = task; this.shardId = shardId; this.primaryAllocationId = primaryAllocationId; this.primaryTerm = primaryTerm; this.snapshot = snapshot; this.chunkSizeInBytes = chunkSizeInBytes; this.startingSeqNo = startingSeqNo; this.maxSeqNo = maxSeqNo; this.maxSeenAutoIdTimestamp = maxSeenAutoIdTimestamp; this.listener = listener; task.setTotalOperations(snapshot.totalOperations()); } @Override public void onResponse(ResyncReplicationResponse response) { run(); } @Override public void onFailure(Exception e) { if (closed.compareAndSet(false, true)) { listener.onFailure(e); } } private static final Translog.Operation[] EMPTY_ARRAY = new Translog.Operation[0]; @Override protected void doRun() throws Exception { long size = 0; final List<Translog.Operation> operations = new ArrayList<>(); task.setPhase("collecting_ops"); task.setResyncedOperations(totalSentOps.get()); task.setSkippedOperations(totalSkippedOps.get()); Translog.Operation operation; while ((operation = snapshot.next()) != null) { final long seqNo = operation.seqNo(); if (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO || seqNo < startingSeqNo) { totalSkippedOps.incrementAndGet(); continue; } assert operation.seqNo() >= 0 : "sending operation with unassigned sequence number [" + operation + "]"; operations.add(operation); size += operation.estimateSize(); totalSentOps.incrementAndGet(); // check if this request is past bytes threshold, and if so, send it off if (size >= chunkSizeInBytes) { break; } } final long trimmedAboveSeqNo = firstMessage.get() ? maxSeqNo : SequenceNumbers.UNASSIGNED_SEQ_NO; // have to send sync request even in case of there are no operations to sync - have to sync trimmedAboveSeqNo at least if (!operations.isEmpty() || trimmedAboveSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO) { task.setPhase("sending_ops"); ResyncReplicationRequest request = new ResyncReplicationRequest(shardId, trimmedAboveSeqNo, maxSeenAutoIdTimestamp, operations.toArray(EMPTY_ARRAY)); logger.trace("{} sending batch of [{}][{}] (total sent: [{}], skipped: [{}])", shardId, operations.size(), new ByteSizeValue(size), totalSentOps.get(), totalSkippedOps.get()); firstMessage.set(false); syncAction.sync(request, task, primaryAllocationId, primaryTerm, this); } else if (closed.compareAndSet(false, true)) { logger.trace("{} resync completed (total sent: [{}], skipped: [{}])", shardId, totalSentOps.get(), totalSkippedOps.get()); listener.onResponse(null); } } } public static class ResyncRequest extends ActionRequest { private final ShardId shardId; private final String allocationId; public ResyncRequest(ShardId shardId, String allocationId) { this.shardId = shardId; this.allocationId = allocationId; } @Override public Task createTask(long id, String type, String action, TaskId parentTaskId, Map<String, String> headers) { return new ResyncTask(id, type, action, getDescription(), parentTaskId, headers); } @Override public String getDescription() { return toString(); } @Override public String toString() { return "ResyncRequest{ " + shardId + ", " + allocationId + " }"; } @Override public ActionRequestValidationException validate() { return null; } } public static class ResyncTask extends Task { private volatile String phase = "starting"; private volatile int totalOperations; private volatile int resyncedOperations; private volatile int skippedOperations; public ResyncTask(long id, String type, String action, String description, TaskId parentTaskId, Map<String, String> headers) { super(id, type, action, description, parentTaskId, headers); } /** * Set the current phase of the task. */ public void setPhase(String phase) { this.phase = phase; } /** * Get the current phase of the task. */ public String getPhase() { return phase; } /** * total number of translog operations that were captured by translog snapshot */ public int getTotalOperations() { return totalOperations; } public void setTotalOperations(int totalOperations) { this.totalOperations = totalOperations; } /** * number of operations that have been successfully replicated */ public int getResyncedOperations() { return resyncedOperations; } public void setResyncedOperations(int resyncedOperations) { this.resyncedOperations = resyncedOperations; } /** * number of translog operations that have been skipped */ public int getSkippedOperations() { return skippedOperations; } public void setSkippedOperations(int skippedOperations) { this.skippedOperations = skippedOperations; } @Override public ResyncTask.Status getStatus() { return new ResyncTask.Status(phase, totalOperations, resyncedOperations, skippedOperations); } public static class Status implements Task.Status { public static final String NAME = "resync"; private final String phase; private final int totalOperations; private final int resyncedOperations; private final int skippedOperations; public Status(StreamInput in) throws IOException { phase = in.readString(); totalOperations = in.readVInt(); resyncedOperations = in.readVInt(); skippedOperations = in.readVInt(); } public Status(String phase, int totalOperations, int resyncedOperations, int skippedOperations) { this.phase = requireNonNull(phase, "Phase cannot be null"); this.totalOperations = totalOperations; this.resyncedOperations = resyncedOperations; this.skippedOperations = skippedOperations; } @Override public String getWriteableName() { return NAME; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("phase", phase); builder.field("totalOperations", totalOperations); builder.field("resyncedOperations", resyncedOperations); builder.field("skippedOperations", skippedOperations); builder.endObject(); return builder; } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(phase); out.writeVLong(totalOperations); out.writeVLong(resyncedOperations); out.writeVLong(skippedOperations); } @Override public String toString() { return Strings.toString(this); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; if (totalOperations != status.totalOperations) return false; if (resyncedOperations != status.resyncedOperations) return false; if (skippedOperations != status.skippedOperations) return false; return phase.equals(status.phase); } @Override public int hashCode() { int result = phase.hashCode(); result = 31 * result + totalOperations; result = 31 * result + resyncedOperations; result = 31 * result + skippedOperations; return result; } } } }
{ "content_hash": "80943696312c956e2eb66bd11dbd3436", "timestamp": "", "source": "github", "line_count": 431, "max_line_length": 138, "avg_line_length": 41.438515081206496, "alnum_prop": 0.6170212765957447, "repo_name": "coding0011/elasticsearch", "id": "f4cd1cdb8115ef8f2c1401ba011c85ea64d858f6", "size": "18648", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "11081" }, { "name": "Batchfile", "bytes": "18064" }, { "name": "Emacs Lisp", "bytes": "3341" }, { "name": "FreeMarker", "bytes": "45" }, { "name": "Groovy", "bytes": "312193" }, { "name": "HTML", "bytes": "5519" }, { "name": "Java", "bytes": "41505710" }, { "name": "Perl", "bytes": "7271" }, { "name": "Python", "bytes": "55163" }, { "name": "Shell", "bytes": "119286" } ], "symlink_target": "" }
description: Hints, tips and guidelines for writing clean, reliable Dockerfiles keywords: parent image, images, dockerfile, best practices, hub, official image redirect_from: - /articles/dockerfile_best-practices/ - /engine/articles/dockerfile_best-practices/ - /docker-cloud/getting-started/intermediate/optimize-dockerfiles/ - /docker-cloud/tutorials/optimize-dockerfiles/ - /engine/userguide/eng-image/dockerfile_best-practices/ title: Best practices for writing Dockerfiles --- This document covers recommended best practices and methods for building efficient images. Docker builds images automatically by reading the instructions from a `Dockerfile` -- a text file that contains all commands, in order, needed to build a given image. A `Dockerfile` adheres to a specific format and set of instructions which you can find at [Dockerfile reference](/engine/reference/builder/). A Docker image consists of read-only layers each of which represents a Dockerfile instruction. The layers are stacked and each one is a delta of the changes from the previous layer. Consider this `Dockerfile`: ```Dockerfile FROM ubuntu:18.04 COPY . /app RUN make /app CMD python /app/app.py ``` Each instruction creates one layer: - `FROM` creates a layer from the `ubuntu:18.04` Docker image. - `COPY` adds files from your Docker client's current directory. - `RUN` builds your application with `make`. - `CMD` specifies what command to run within the container. When you run an image and generate a container, you add a new _writable layer_ (the "container layer") on top of the underlying layers. All changes made to the running container, such as writing new files, modifying existing files, and deleting files, are written to this thin writable container layer. For more on image layers (and how Docker builds and stores images), see [About storage drivers](/storage/storagedriver/). ## General guidelines and recommendations ### Create ephemeral containers The image defined by your `Dockerfile` should generate containers that are as ephemeral as possible. By "ephemeral", we mean that the container can be stopped and destroyed, then rebuilt and replaced with an absolute minimum set up and configuration. Refer to [Processes](https://12factor.net/processes) under _The Twelve-factor App_ methodology to get a feel for the motivations of running containers in such a stateless fashion. ### Understand build context When you issue a `docker build` command, the current working directory is called the _build context_. By default, the Dockerfile is assumed to be located here, but you can specify a different location with the file flag (`-f`). Regardless of where the `Dockerfile` actually lives, all recursive contents of files and directories in the current directory are sent to the Docker daemon as the build context. > Build context example > > Create a directory for the build context and `cd` into it. Write "hello" into > a text file named `hello` and create a Dockerfile that runs `cat` on it. Build > the image from within the build context (`.`): > > ```shell > mkdir myproject && cd myproject > echo "hello" > hello > echo -e "FROM busybox\nCOPY /hello /\nRUN cat /hello" > Dockerfile > docker build -t helloapp:v1 . > ``` > > Move `Dockerfile` and `hello` into separate directories and build a second > version of the image (without relying on cache from the last build). Use `-f` > to point to the Dockerfile and specify the directory of the build context: > > ```shell > mkdir -p dockerfiles context > mv Dockerfile dockerfiles && mv hello context > docker build --no-cache -t helloapp:v2 -f dockerfiles/Dockerfile context > ``` Inadvertently including files that are not necessary for building an image results in a larger build context and larger image size. This can increase the time to build the image, time to pull and push it, and the container runtime size. To see how big your build context is, look for a message like this when building your `Dockerfile`: ```none Sending build context to Docker daemon 187.8MB ``` ### Pipe Dockerfile through `stdin` Docker has the ability to build images by piping `Dockerfile` through `stdin` with a _local or remote build context_. Piping a `Dockerfile` through `stdin` can be useful to perform one-off builds without writing a Dockerfile to disk, or in situations where the `Dockerfile` is generated, and should not persist afterwards. > The examples in this section use [here documents](http://tldp.org/LDP/abs/html/here-docs.html) > for convenience, but any method to provide the `Dockerfile` on `stdin` can be > used. > > For example, the following commands are equivalent: > > ```bash > echo -e 'FROM busybox\nRUN echo "hello world"' | docker build - > ``` > > ```bash > docker build -<<EOF > FROM busybox > RUN echo "hello world" > EOF > ``` > > You can substitute the examples with your preferred approach, or the approach > that best fits your use-case. #### Build an image using a Dockerfile from stdin, without sending build context Use this syntax to build an image using a `Dockerfile` from `stdin`, without sending additional files as build context. The hyphen (`-`) takes the position of the `PATH`, and instructs Docker to read the build context (which only contains a `Dockerfile`) from `stdin` instead of a directory: ```bash docker build [OPTIONS] - ``` The following example builds an image using a `Dockerfile` that is passed through `stdin`. No files are sent as build context to the daemon. ```bash docker build -t myimage:latest -<<EOF FROM busybox RUN echo "hello world" EOF ``` Omitting the build context can be useful in situations where your `Dockerfile` does not require files to be copied into the image, and improves the build-speed, as no files are sent to the daemon. If you want to improve the build-speed by excluding _some_ files from the build- context, refer to [exclude with .dockerignore](#exclude-with-dockerignore). > **Note**: Attempting to build a Dockerfile that uses `COPY` or `ADD` will fail > if this syntax is used. The following example illustrates this: > > ```bash > # create a directory to work in > mkdir example > cd example > > # create an example file > touch somefile.txt > > docker build -t myimage:latest -<<EOF > FROM busybox > COPY somefile.txt . > RUN cat /somefile.txt > EOF > > # observe that the build fails > ... > Step 2/3 : COPY somefile.txt . > COPY failed: stat /var/lib/docker/tmp/docker-builder249218248/somefile.txt: no such file or directory > ``` #### Build from a local build context, using a Dockerfile from stdin Use this syntax to build an image using files on your local filesystem, but using a `Dockerfile` from `stdin`. The syntax uses the `-f` (or `--file`) option to specify the `Dockerfile` to use, using a hyphen (`-`) as filename to instruct Docker to read the `Dockerfile` from `stdin`: ```bash docker build [OPTIONS] -f- PATH ``` The example below uses the current directory (`.`) as the build context, and builds an image using a `Dockerfile` that is passed through `stdin` using a [here document](http://tldp.org/LDP/abs/html/here-docs.html). ```bash # create a directory to work in mkdir example cd example # create an example file touch somefile.txt # build an image using the current directory as context, and a Dockerfile passed through stdin docker build -t myimage:latest -f- . <<EOF FROM busybox COPY somefile.txt . RUN cat /somefile.txt EOF ``` #### Build from a remote build context, using a Dockerfile from stdin Use this syntax to build an image using files from a remote `git` repository, using a `Dockerfile` from `stdin`. The syntax uses the `-f` (or `--file`) option to specify the `Dockerfile` to use, using a hyphen (`-`) as filename to instruct Docker to read the `Dockerfile` from `stdin`: ```bash docker build [OPTIONS] -f- PATH ``` This syntax can be useful in situations where you want to build an image from a repository does not contain a `Dockerfile`, or if you want to build with a custom `Dockerfile`, without maintaining your own fork of the repository. The example below builds an image using a `Dockerfile` from `stdin`, and adds the `README.md` file from the ["hello-world" Git repository on GitHub](https://github.com/docker-library/hello-world). ```bash docker build -t myimage:latest -f- https://github.com/docker-library/hello-world.git <<EOF FROM busybox COPY README.md . EOF ``` > **Under the hood** > > When building an image using a remote Git repository as build context, Docker > performs a `git clone` of the repository on the local machine, and sends > those files as build context to the daemon. This feature requires `git` to be > installed on the host where you run the `docker build` command. ### Exclude with .dockerignore To exclude files not relevant to the build (without restructuring your source repository) use a `.dockerignore` file. This file supports exclusion patterns similar to `.gitignore` files. For information on creating one, see the [.dockerignore file](/engine/reference/builder.md#dockerignore-file). ### Use multi-stage builds [Multi-stage builds](multistage-build.md) allow you to drastically reduce the size of your final image, without struggling to reduce the number of intermediate layers and files. Because an image is built during the final stage of the build process, you can minimize image layers by [leveraging build cache](#leverage-build-cache). For example, if your build contains several layers, you can order them from the less frequently changed (to ensure the build cache is reusable) to the more frequently changed: * Install tools you need to build your application * Install or update library dependencies * Generate your application A Dockerfile for a Go application could look like: ```Dockerfile FROM golang:1.11-alpine AS build # Install tools required for project # Run `docker build --no-cache .` to update dependencies RUN apk add --no-cache git RUN go get github.com/golang/dep/cmd/dep # List project dependencies with Gopkg.toml and Gopkg.lock # These layers are only re-built when Gopkg files are updated COPY Gopkg.lock Gopkg.toml /go/src/project/ WORKDIR /go/src/project/ # Install library dependencies RUN dep ensure -vendor-only # Copy the entire project and build it # This layer is rebuilt when a file changes in the project directory COPY . /go/src/project/ RUN go build -o /bin/project # This results in a single layer image FROM scratch COPY --from=build /bin/project /bin/project ENTRYPOINT ["/bin/project"] CMD ["--help"] ``` ### Don't install unnecessary packages To reduce complexity, dependencies, file sizes, and build times, avoid installing extra or unnecessary packages just because they might be "nice to have." For example, you don’t need to include a text editor in a database image. ### Decouple applications Each container should have only one concern. Decoupling applications into multiple containers makes it easier to scale horizontally and reuse containers. For instance, a web application stack might consist of three separate containers, each with its own unique image, to manage the web application, database, and an in-memory cache in a decoupled manner. Limiting each container to one process is a good rule of thumb, but it is not a hard and fast rule. For example, not only can containers be [spawned with an init process](/engine/reference/run.md#specify-an-init-process), some programs might spawn additional processes of their own accord. For instance, [Celery](http://www.celeryproject.org/) can spawn multiple worker processes, and [Apache](https://httpd.apache.org/) can create one process per request. Use your best judgment to keep containers as clean and modular as possible. If containers depend on each other, you can use [Docker container networks](/engine/userguide/networking/) to ensure that these containers can communicate. ### Minimize the number of layers In older versions of Docker, it was important that you minimized the number of layers in your images to ensure they were performant. The following features were added to reduce this limitation: - Only the instructions `RUN`, `COPY`, `ADD` create layers. Other instructions create temporary intermediate images, and do not increase the size of the build. - Where possible, use [multi-stage builds](multistage-build.md), and only copy the artifacts you need into the final image. This allows you to include tools and debug information in your intermediate build stages without increasing the size of the final image. ### Sort multi-line arguments Whenever possible, ease later changes by sorting multi-line arguments alphanumerically. This helps to avoid duplication of packages and make the list much easier to update. This also makes PRs a lot easier to read and review. Adding a space before a backslash (`\`) helps as well. Here’s an example from the [`buildpack-deps` image](https://github.com/docker-library/buildpack-deps): ```Dockerfile RUN apt-get update && apt-get install -y \ bzr \ cvs \ git \ mercurial \ subversion ``` ### Leverage build cache When building an image, Docker steps through the instructions in your `Dockerfile`, executing each in the order specified. As each instruction is examined, Docker looks for an existing image in its cache that it can reuse, rather than creating a new (duplicate) image. If you do not want to use the cache at all, you can use the `--no-cache=true` option on the `docker build` command. However, if you do let Docker use its cache, it is important to understand when it can, and cannot, find a matching image. The basic rules that Docker follows are outlined below: - Starting with a parent image that is already in the cache, the next instruction is compared against all child images derived from that base image to see if one of them was built using the exact same instruction. If not, the cache is invalidated. - In most cases, simply comparing the instruction in the `Dockerfile` with one of the child images is sufficient. However, certain instructions require more examination and explanation. - For the `ADD` and `COPY` instructions, the contents of the file(s) in the image are examined and a checksum is calculated for each file. The last-modified and last-accessed times of the file(s) are not considered in these checksums. During the cache lookup, the checksum is compared against the checksum in the existing images. If anything has changed in the file(s), such as the contents and metadata, then the cache is invalidated. - Aside from the `ADD` and `COPY` commands, cache checking does not look at the files in the container to determine a cache match. For example, when processing a `RUN apt-get -y update` command the files updated in the container are not examined to determine if a cache hit exists. In that case just the command string itself is used to find a match. Once the cache is invalidated, all subsequent `Dockerfile` commands generate new images and the cache is not used. ## Dockerfile instructions These recommendations are designed to help you create an efficient and maintainable `Dockerfile`. ### FROM [Dockerfile reference for the FROM instruction](/engine/reference/builder.md#from) Whenever possible, use current official images as the basis for your images. We recommend the [Alpine image](https://hub.docker.com/_/alpine/) as it is tightly controlled and small in size (currently under 5 MB), while still being a full Linux distribution. ### LABEL [Understanding object labels](/config/labels-custom-metadata.md) You can add labels to your image to help organize images by project, record licensing information, to aid in automation, or for other reasons. For each label, add a line beginning with `LABEL` and with one or more key-value pairs. The following examples show the different acceptable formats. Explanatory comments are included inline. > Strings with spaces must be quoted **or** the spaces must be escaped. Inner > quote characters (`"`), must also be escaped. ```Dockerfile # Set one or more individual labels LABEL com.example.version="0.0.1-beta" LABEL vendor1="ACME Incorporated" LABEL vendor2=ZENITH\ Incorporated LABEL com.example.release-date="2015-02-12" LABEL com.example.version.is-production="" ``` An image can have more than one label. Prior to Docker 1.10, it was recommended to combine all labels into a single `LABEL` instruction, to prevent extra layers from being created. This is no longer necessary, but combining labels is still supported. ```Dockerfile # Set multiple labels on one line LABEL com.example.version="0.0.1-beta" com.example.release-date="2015-02-12" ``` The above can also be written as: ```Dockerfile # Set multiple labels at once, using line-continuation characters to break long lines LABEL vendor=ACME\ Incorporated \ com.example.is-beta= \ com.example.is-production="" \ com.example.version="0.0.1-beta" \ com.example.release-date="2015-02-12" ``` See [Understanding object labels](/config/labels-custom-metadata.md) for guidelines about acceptable label keys and values. For information about querying labels, refer to the items related to filtering in [Managing labels on objects](/config/labels-custom-metadata.md#managing-labels-on-objects). See also [LABEL](/engine/reference/builder/#label) in the Dockerfile reference. ### RUN [Dockerfile reference for the RUN instruction](/engine/reference/builder.md#run) Split long or complex `RUN` statements on multiple lines separated with backslashes to make your `Dockerfile` more readable, understandable, and maintainable. #### apt-get Probably the most common use-case for `RUN` is an application of `apt-get`. Because it installs packages, the `RUN apt-get` command has several gotchas to look out for. Avoid `RUN apt-get upgrade` and `dist-upgrade`, as many of the "essential" packages from the parent images cannot upgrade inside an [unprivileged container](/engine/reference/run.md#security-configuration). If a package contained in the parent image is out-of-date, contact its maintainers. If you know there is a particular package, `foo`, that needs to be updated, use `apt-get install -y foo` to update automatically. Always combine `RUN apt-get update` with `apt-get install` in the same `RUN` statement. For example: ```Dockerfile RUN apt-get update && apt-get install -y \ package-bar \ package-baz \ package-foo ``` Using `apt-get update` alone in a `RUN` statement causes caching issues and subsequent `apt-get install` instructions fail. For example, say you have a Dockerfile: ```Dockerfile FROM ubuntu:18.04 RUN apt-get update RUN apt-get install -y curl ``` After building the image, all layers are in the Docker cache. Suppose you later modify `apt-get install` by adding extra package: ```Dockerfile FROM ubuntu:18.04 RUN apt-get update RUN apt-get install -y curl nginx ``` Docker sees the initial and modified instructions as identical and reuses the cache from previous steps. As a result the `apt-get update` is _not_ executed because the build uses the cached version. Because the `apt-get update` is not run, your build can potentially get an outdated version of the `curl` and `nginx` packages. Using `RUN apt-get update && apt-get install -y` ensures your Dockerfile installs the latest package versions with no further coding or manual intervention. This technique is known as "cache busting". You can also achieve cache-busting by specifying a package version. This is known as version pinning, for example: ```Dockerfile RUN apt-get update && apt-get install -y \ package-bar \ package-baz \ package-foo=1.3.* ``` Version pinning forces the build to retrieve a particular version regardless of what’s in the cache. This technique can also reduce failures due to unanticipated changes in required packages. Below is a well-formed `RUN` instruction that demonstrates all the `apt-get` recommendations. ```Dockerfile RUN apt-get update && apt-get install -y \ aufs-tools \ automake \ build-essential \ curl \ dpkg-sig \ libcap-dev \ libsqlite3-dev \ mercurial \ reprepro \ ruby1.9.1 \ ruby1.9.1-dev \ s3cmd=1.1.* \ && rm -rf /var/lib/apt/lists/* ``` The `s3cmd` argument specifies a version `1.1.*`. If the image previously used an older version, specifying the new one causes a cache bust of `apt-get update` and ensures the installation of the new version. Listing packages on each line can also prevent mistakes in package duplication. In addition, when you clean up the apt cache by removing `/var/lib/apt/lists` it reduces the image size, since the apt cache is not stored in a layer. Since the `RUN` statement starts with `apt-get update`, the package cache is always refreshed prior to `apt-get install`. > Official Debian and Ubuntu images [automatically run `apt-get clean`](https://github.com/moby/moby/blob/03e2923e42446dbb830c654d0eec323a0b4ef02a/contrib/mkimage/debootstrap#L82-L105), > so explicit invocation is not required. #### Using pipes Some `RUN` commands depend on the ability to pipe the output of one command into another, using the pipe character (`|`), as in the following example: ```Dockerfile RUN wget -O - https://some.site | wc -l > /number ``` Docker executes these commands using the `/bin/sh -c` interpreter, which only evaluates the exit code of the last operation in the pipe to determine success. In the example above this build step succeeds and produces a new image so long as the `wc -l` command succeeds, even if the `wget` command fails. If you want the command to fail due to an error at any stage in the pipe, prepend `set -o pipefail &&` to ensure that an unexpected error prevents the build from inadvertently succeeding. For example: ```Dockerfile RUN set -o pipefail && wget -O - https://some.site | wc -l > /number ``` > Not all shells support the `-o pipefail` option. > > In cases such as the `dash` shell on > Debian-based images, consider using the _exec_ form of `RUN` to explicitly > choose a shell that does support the `pipefail` option. For example: > > ```Dockerfile > RUN ["/bin/bash", "-c", "set -o pipefail && wget -O - https://some.site | wc -l > /number"] > ``` ### CMD [Dockerfile reference for the CMD instruction](/engine/reference/builder.md#cmd) The `CMD` instruction should be used to run the software contained by your image, along with any arguments. `CMD` should almost always be used in the form of `CMD ["executable", "param1", "param2"…]`. Thus, if the image is for a service, such as Apache and Rails, you would run something like `CMD ["apache2","-DFOREGROUND"]`. Indeed, this form of the instruction is recommended for any service-based image. In most other cases, `CMD` should be given an interactive shell, such as bash, python and perl. For example, `CMD ["perl", "-de0"]`, `CMD ["python"]`, or `CMD ["php", "-a"]`. Using this form means that when you execute something like `docker run -it python`, you’ll get dropped into a usable shell, ready to go. `CMD` should rarely be used in the manner of `CMD ["param", "param"]` in conjunction with [`ENTRYPOINT`](/engine/reference/builder.md#entrypoint), unless you and your expected users are already quite familiar with how `ENTRYPOINT` works. ### EXPOSE [Dockerfile reference for the EXPOSE instruction](/engine/reference/builder.md#expose) The `EXPOSE` instruction indicates the ports on which a container listens for connections. Consequently, you should use the common, traditional port for your application. For example, an image containing the Apache web server would use `EXPOSE 80`, while an image containing MongoDB would use `EXPOSE 27017` and so on. For external access, your users can execute `docker run` with a flag indicating how to map the specified port to the port of their choice. For container linking, Docker provides environment variables for the path from the recipient container back to the source (ie, `MYSQL_PORT_3306_TCP`). ### ENV [Dockerfile reference for the ENV instruction](/engine/reference/builder.md#env) To make new software easier to run, you can use `ENV` to update the `PATH` environment variable for the software your container installs. For example, `ENV PATH /usr/local/nginx/bin:$PATH` ensures that `CMD ["nginx"]` just works. The `ENV` instruction is also useful for providing required environment variables specific to services you wish to containerize, such as Postgres’s `PGDATA`. Lastly, `ENV` can also be used to set commonly used version numbers so that version bumps are easier to maintain, as seen in the following example: ```Dockerfile ENV PG_MAJOR 9.3 ENV PG_VERSION 9.3.4 RUN curl -SL http://example.com/postgres-$PG_VERSION.tar.xz | tar -xJC /usr/src/postgress && … ENV PATH /usr/local/postgres-$PG_MAJOR/bin:$PATH ``` Similar to having constant variables in a program (as opposed to hard-coding values), this approach lets you change a single `ENV` instruction to auto-magically bump the version of the software in your container. Each `ENV` line creates a new intermediate layer, just like `RUN` commands. This means that even if you unset the environment variable in a future layer, it still persists in this layer and its value can be dumped. You can test this by creating a Dockerfile like the following, and then building it. ```Dockerfile FROM alpine ENV ADMIN_USER="mark" RUN echo $ADMIN_USER > ./mark RUN unset ADMIN_USER ``` ```bash $ docker run --rm test sh -c 'echo $ADMIN_USER' mark ``` To prevent this, and really unset the environment variable, use a `RUN` command with shell commands, to set, use, and unset the variable all in a single layer. You can separate your commands with `;` or `&&`. If you use the second method, and one of the commands fails, the `docker build` also fails. This is usually a good idea. Using `\` as a line continuation character for Linux Dockerfiles improves readability. You could also put all of the commands into a shell script and have the `RUN` command just run that shell script. ```Dockerfile FROM alpine RUN export ADMIN_USER="mark" \ && echo $ADMIN_USER > ./mark \ && unset ADMIN_USER CMD sh ``` ```bash $ docker run --rm test sh -c 'echo $ADMIN_USER' ``` ### ADD or COPY - [Dockerfile reference for the ADD instruction](/engine/reference/builder.md#add) - [Dockerfile reference for the COPY instruction](/engine/reference/builder.md#copy) Although `ADD` and `COPY` are functionally similar, generally speaking, `COPY` is preferred. That’s because it’s more transparent than `ADD`. `COPY` only supports the basic copying of local files into the container, while `ADD` has some features (like local-only tar extraction and remote URL support) that are not immediately obvious. Consequently, the best use for `ADD` is local tar file auto-extraction into the image, as in `ADD rootfs.tar.xz /`. If you have multiple `Dockerfile` steps that use different files from your context, `COPY` them individually, rather than all at once. This ensures that each step's build cache is only invalidated (forcing the step to be re-run) if the specifically required files change. For example: ```Dockerfile COPY requirements.txt /tmp/ RUN pip install --requirement /tmp/requirements.txt COPY . /tmp/ ``` Results in fewer cache invalidations for the `RUN` step, than if you put the `COPY . /tmp/` before it. Because image size matters, using `ADD` to fetch packages from remote URLs is strongly discouraged; you should use `curl` or `wget` instead. That way you can delete the files you no longer need after they've been extracted and you don't have to add another layer in your image. For example, you should avoid doing things like: ```Dockerfile ADD http://example.com/big.tar.xz /usr/src/things/ RUN tar -xJf /usr/src/things/big.tar.xz -C /usr/src/things RUN make -C /usr/src/things all ``` And instead, do something like: ```Dockerfile RUN mkdir -p /usr/src/things \ && curl -SL http://example.com/big.tar.xz \ | tar -xJC /usr/src/things \ && make -C /usr/src/things all ``` For other items (files, directories) that do not require `ADD`’s tar auto-extraction capability, you should always use `COPY`. ### ENTRYPOINT [Dockerfile reference for the ENTRYPOINT instruction](/engine/reference/builder.md#entrypoint) The best use for `ENTRYPOINT` is to set the image's main command, allowing that image to be run as though it was that command (and then use `CMD` as the default flags). Let's start with an example of an image for the command line tool `s3cmd`: ```Dockerfile ENTRYPOINT ["s3cmd"] CMD ["--help"] ``` Now the image can be run like this to show the command's help: ```bash $ docker run s3cmd ``` Or using the right parameters to execute a command: ```bash $ docker run s3cmd ls s3://mybucket ``` This is useful because the image name can double as a reference to the binary as shown in the command above. The `ENTRYPOINT` instruction can also be used in combination with a helper script, allowing it to function in a similar way to the command above, even when starting the tool may require more than one step. For example, the [Postgres Official Image](https://hub.docker.com/_/postgres/) uses the following script as its `ENTRYPOINT`: ```bash #!/bin/bash set -e if [ "$1" = 'postgres' ]; then chown -R postgres "$PGDATA" if [ -z "$(ls -A "$PGDATA")" ]; then gosu postgres initdb fi exec gosu postgres "$@" fi exec "$@" ``` > Configure app as PID 1 > > This script uses [the `exec` Bash command](http://wiki.bash-hackers.org/commands/builtin/exec) > so that the final running application becomes the container's PID 1. This > allows the application to receive any Unix signals sent to the container. > For more, see the [`ENTRYPOINT` reference](/engine/reference/builder.md#entrypoint). The helper script is copied into the container and run via `ENTRYPOINT` on container start: ```Dockerfile COPY ./docker-entrypoint.sh / ENTRYPOINT ["/docker-entrypoint.sh"] CMD ["postgres"] ``` This script allows the user to interact with Postgres in several ways. It can simply start Postgres: ```bash $ docker run postgres ``` Or, it can be used to run Postgres and pass parameters to the server: ```bash $ docker run postgres postgres --help ``` Lastly, it could also be used to start a totally different tool, such as Bash: ```bash $ docker run --rm -it postgres bash ``` ### VOLUME [Dockerfile reference for the VOLUME instruction](/engine/reference/builder.md#volume) The `VOLUME` instruction should be used to expose any database storage area, configuration storage, or files/folders created by your docker container. You are strongly encouraged to use `VOLUME` for any mutable and/or user-serviceable parts of your image. ### USER [Dockerfile reference for the USER instruction](/engine/reference/builder.md#user) If a service can run without privileges, use `USER` to change to a non-root user. Start by creating the user and group in the `Dockerfile` with something like `RUN groupadd -r postgres && useradd --no-log-init -r -g postgres postgres`. > Consider an explicit UID/GID > > Users and groups in an image are assigned a non-deterministic UID/GID in that > the "next" UID/GID is assigned regardless of image rebuilds. So, if it’s > critical, you should assign an explicit UID/GID. > Due to an [unresolved bug](https://github.com/golang/go/issues/13548) in the > Go archive/tar package's handling of sparse files, attempting to create a user > with a significantly large UID inside a Docker container can lead to disk > exhaustion because `/var/log/faillog` in the container layer is filled with > NULL (\0) characters. A workaround is to pass the `--no-log-init` flag to > useradd. The Debian/Ubuntu `adduser` wrapper does not support this flag. Avoid installing or using `sudo` as it has unpredictable TTY and signal-forwarding behavior that can cause problems. If you absolutely need functionality similar to `sudo`, such as initializing the daemon as `root` but running it as non-`root`), consider using ["gosu"](https://github.com/tianon/gosu). Lastly, to reduce layers and complexity, avoid switching `USER` back and forth frequently. ### WORKDIR [Dockerfile reference for the WORKDIR instruction](/engine/reference/builder.md#workdir) For clarity and reliability, you should always use absolute paths for your `WORKDIR`. Also, you should use `WORKDIR` instead of proliferating instructions like `RUN cd … && do-something`, which are hard to read, troubleshoot, and maintain. ### ONBUILD [Dockerfile reference for the ONBUILD instruction](/engine/reference/builder.md#onbuild) An `ONBUILD` command executes after the current `Dockerfile` build completes. `ONBUILD` executes in any child image derived `FROM` the current image. Think of the `ONBUILD` command as an instruction the parent `Dockerfile` gives to the child `Dockerfile`. A Docker build executes `ONBUILD` commands before any command in a child `Dockerfile`. `ONBUILD` is useful for images that are going to be built `FROM` a given image. For example, you would use `ONBUILD` for a language stack image that builds arbitrary user software written in that language within the `Dockerfile`, as you can see in [Ruby’s `ONBUILD` variants](https://github.com/docker-library/ruby/blob/master/2.4/jessie/onbuild/Dockerfile). Images built from `ONBUILD` should get a separate tag, for example: `ruby:1.9-onbuild` or `ruby:2.0-onbuild`. Be careful when putting `ADD` or `COPY` in `ONBUILD`. The "onbuild" image fails catastrophically if the new build's context is missing the resource being added. Adding a separate tag, as recommended above, helps mitigate this by allowing the `Dockerfile` author to make a choice. ## Examples for Official Images These Official Images have exemplary `Dockerfile`s: * [Go](https://hub.docker.com/_/golang/) * [Perl](https://hub.docker.com/_/perl/) * [Hy](https://hub.docker.com/_/hylang/) * [Ruby](https://hub.docker.com/_/ruby/) ## Additional resources: * [Dockerfile Reference](/engine/reference/builder.md) * [More about Base Images](baseimages.md) * [More about Automated Builds](/docker-hub/builds/) * [Guidelines for Creating Official Images](/docker-hub/official_images/)
{ "content_hash": "2530109466ab65f9dba089afd12353ca", "timestamp": "", "source": "github", "line_count": 923, "max_line_length": 185, "avg_line_length": 37.1614301191766, "alnum_prop": 0.7565889212827989, "repo_name": "londoncalling/docker.github.io", "id": "6beacdc26b50f8556cb6563cfca389e81c48bb17", "size": "34330", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "develop/develop-images/dockerfile_best-practices.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "628474" }, { "name": "Dockerfile", "bytes": "9553" }, { "name": "Go", "bytes": "8430" }, { "name": "HTML", "bytes": "84985" }, { "name": "JavaScript", "bytes": "12421593" }, { "name": "Makefile", "bytes": "11149" }, { "name": "Ruby", "bytes": "1132" }, { "name": "Shell", "bytes": "19808" } ], "symlink_target": "" }
// // MARLoginViewController.m // easywayout // // Created by Martin.Liu on 2017/1/4. // Copyright © 2017年 MAIERSI. All rights reserved. // #import "MARLoginViewController.h" @interface MARLoginViewController () #pragma mark IB Property for UI @property (weak, nonatomic) IBOutlet NSLayoutConstraint *constraint_upTitleTop; @property (weak, nonatomic) IBOutlet NSLayoutConstraint *constraint_TFSHeight; @property (weak, nonatomic) IBOutlet NSLayoutConstraint *constraint_loginButtonTop; @property (weak, nonatomic) IBOutlet NSLayoutConstraint *constraint_centerY; #pragma mark IB Property @property (weak, nonatomic) IBOutlet UITextField *userAccountTF; @property (weak, nonatomic) IBOutlet UITextField *passwordTF; #pragma mark IB Action - (IBAction)clickCloseButtonAction:(id)sender; - (IBAction)clickLoginButtonAction:(id)sender; @end @implementation MARLoginViewController - (void)viewDidLoad { [super viewDidLoad]; [self setTapResignTFS:YES]; } - (void)UIGlobal { if (kiPhone4s) { self.constraint_upTitleTop.constant = 50; self.constraint_centerY.constant = -10; self.constraint_TFSHeight.constant = 45; self.constraint_loginButtonTop.constant = 15; } else if (kiPhone5) { self.constraint_TFSHeight.constant = 50; self.constraint_loginButtonTop.constant = 30; } } - (void)viewWillAppear:(BOOL)animated { [super viewWillAppear:animated]; [self.navigationController setNavigationBarHidden:YES animated:YES]; } - (void)viewDidAppear:(BOOL)animated { [super viewDidAppear:animated]; self.navigationController.interactivePopGestureRecognizer.enabled = NO; self.navigationController.interactivePopGestureRecognizer.delegate = nil; } - (void)didReceiveMemoryWarning { [super didReceiveMemoryWarning]; // Dispose of any resources that can be recreated. } /* #pragma mark - Navigation // In a storyboard-based application, you will often want to do a little preparation before navigation - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender { // Get the new view controller using [segue destinationViewController]. // Pass the selected object to the new view controller. } */ - (IBAction)clickCloseButtonAction:(id)sender { [self dismissViewControllerAnimated:YES completion:nil]; } - (IBAction)clickLoginButtonAction:(id)sender { [self dismissViewControllerAnimated:YES completion:nil]; ShowSuccessMessage(@"模拟登陆成功", 1.f); } @end
{ "content_hash": "299a711f568b29cb8d55e43da6009c09", "timestamp": "", "source": "github", "line_count": 86, "max_line_length": 102, "avg_line_length": 28.953488372093023, "alnum_prop": 0.7473895582329317, "repo_name": "liulongdev/ProfilerTest", "id": "50dc0a3b82c1df92d32586eeb124cba65dab4865", "size": "2505", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "easywayout/easywayout/Modules/LoginModule/MARLoginViewController.m", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "12212" }, { "name": "Objective-C", "bytes": "3603165" }, { "name": "Objective-C++", "bytes": "71429" }, { "name": "Ruby", "bytes": "578" }, { "name": "Shell", "bytes": "8809" } ], "symlink_target": "" }
import React, { Component, PropTypes , defaultProps } from 'react' class ImageItem extends Component { render() { let imgUrl = this.props.url let scaleW = (this.props.frameWidth)? this.props.frameWidth+'w_':''; let scaleH = (this.props.frameHeight)? this.props.frameHeight+'h_':''; imgUrl = window.Core.platform === 'wx' ? imgUrl + '@' + scaleW + scaleH + '90Q' : imgUrl + '' return( <img src={imgUrl} /> ) } } ImageItem.PropTypes = { url : PropTypes.string.isRequired, frameWidth : PropTypes.number.isRequired, frameHeight : PropTypes.number.isRequired, } export default ImageItem
{ "content_hash": "dadc3aeaea3bf5151b24483364bb86d4", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 97, "avg_line_length": 27.26086956521739, "alnum_prop": 0.6618819776714514, "repo_name": "Hive-Team/venus-lp", "id": "b89e4b2e860a0070c6c58e65ba72a6c1ce4d29d7", "size": "627", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/common/components/general/ImageItem.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "89220" }, { "name": "HTML", "bytes": "1769" }, { "name": "JavaScript", "bytes": "50095" } ], "symlink_target": "" }
package Perl::Critic::Policy::CodeLayout::ProhibitTrailingWhitespace; use 5.006001; use strict; use warnings; use English qw(-no_match_vars); use Readonly; use charnames qw{}; use PPI::Token::Whitespace; use Perl::Critic::Utils qw{ :characters :severities }; use base 'Perl::Critic::Policy'; our $VERSION = '1.118'; #----------------------------------------------------------------------------- Readonly::Scalar my $EXPL => q{Don't use whitespace at the end of lines}; ## no critic (RequireInterpolationOfMetachars) Readonly::Hash my %C_STYLE_ESCAPES => ( ord "\t" => q{\t}, ord "\n" => q{\n}, ord "\r" => q{\r}, ord "\f" => q{\f}, ord "\b" => q{\b}, ord "\a" => q{\a}, ord "\e" => q{\e}, ); ## use critic #----------------------------------------------------------------------------- sub supported_parameters { return qw{ } } sub default_severity { return $SEVERITY_LOWEST } sub default_themes { return qw( core maintenance ) } sub applies_to { return 'PPI::Token::Whitespace' } #----------------------------------------------------------------------------- sub violates { my ( $self, $token, undef ) = @_; if ( $token->content() =~ m< ( (?! \n) \s )+ \n >xms ) { my $extra_whitespace = $1; my $description = q{Found "}; $description .= join $EMPTY, map { _escape($_) } split $EMPTY, $extra_whitespace; $description .= q{" at the end of the line}; return $self->violation( $description, $EXPL, $token ); } return; } sub _escape { my $character = shift; my $ordinal = ord $character; if (my $c_escape = $C_STYLE_ESCAPES{$ordinal}) { return $c_escape; } # Apparently, the charnames.pm that ships with older perls does not # support the C<viacode> function, and newer versions of the module are # not distributed separately from perl itself So if the C<viacode> method # is not supported, then just substitute something. ## no critic (RequireInterpolationOfMetachars) if ( charnames->can( 'viacode' ) ) { return q/\N{/ . charnames::viacode($ordinal) . q/}/; } else { return '\N{WHITESPACE CHAR}'; } } 1; #----------------------------------------------------------------------------- __END__ =pod =for stopwords =head1 NAME Perl::Critic::Policy::CodeLayout::ProhibitTrailingWhitespace - Don't use whitespace at the end of lines. =head1 AFFILIATION This Policy is part of the core L<Perl::Critic|Perl::Critic> distribution. =head1 DESCRIPTION Anything that is not readily visually detectable is a bad thing in general, and more specifically, as different people edit the same code, their editors may automatically strip out trailing whitespace, causing spurious differences between different versions of the same file (i.e. code in a source control system). =head1 CONFIGURATION This Policy is not configurable except for the standard options. =head1 AUTHOR Elliot Shank C<< <[email protected]> >> =head1 COPYRIGHT Copyright (c) 2007-2011 Elliot Shank. This program is free software; you can redistribute it and/or modify it under the same terms as Perl itself. The full text of this license can be found in the LICENSE file included with this module. =cut # Local Variables: # mode: cperl # cperl-indent-level: 4 # fill-column: 78 # indent-tabs-mode: nil # c-indentation-style: bsd # End: # ex: set ts=8 sts=4 sw=4 tw=78 ft=perl expandtab shiftround :
{ "content_hash": "4b7b90d93301d5fcd6c8902166af4b33", "timestamp": "", "source": "github", "line_count": 143, "max_line_length": 104, "avg_line_length": 25.132867132867133, "alnum_prop": 0.5809682804674458, "repo_name": "amidoimidazol/bio_info", "id": "d81098ec190b5fe7efbe282dbe238173a149a230", "size": "4011", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Beginning Perl for Bioinformatics/lib/Perl/Critic/Policy/CodeLayout/ProhibitTrailingWhitespace.pm", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "59131" }, { "name": "C", "bytes": "6411096" }, { "name": "C++", "bytes": "948727" }, { "name": "CSS", "bytes": "19636" }, { "name": "Groff", "bytes": "729471" }, { "name": "HTML", "bytes": "36592112" }, { "name": "JavaScript", "bytes": "276009" }, { "name": "Objective-C", "bytes": "9782" }, { "name": "Perl", "bytes": "35025376" }, { "name": "Perl6", "bytes": "474376" }, { "name": "Prolog", "bytes": "2605820" }, { "name": "Python", "bytes": "1276815" }, { "name": "Tcl", "bytes": "226362" }, { "name": "Visual Basic", "bytes": "269" } ], "symlink_target": "" }
<?xml version="1.0" encoding="utf-8"?> <resources> <string msgid="2869576371154716097" name="status_bar_notification_info_overflow">"၉၉၉+"</string> </resources>
{ "content_hash": "ba011cce17ef20ddcd54c0f43b60920a", "timestamp": "", "source": "github", "line_count": 4, "max_line_length": 100, "avg_line_length": 41, "alnum_prop": 0.7195121951219512, "repo_name": "Rezar/Ubiqlog", "id": "2ca01566a7a2792f75583c51f4fca712cc3e0f39", "size": "170", "binary": false, "copies": "18", "ref": "refs/heads/ma", "path": "app/build/intermediates/exploded-aar/com.android.support/support-compat/26.1.0/res/values-my/values-my.xml", "mode": "33188", "license": "bsd-2-clause", "language": [ { "name": "Java", "bytes": "1646185" } ], "symlink_target": "" }
import { CloseButton, CloseButtonProps } from "~/components/CloseButton" import { Portal, PortalProps } from "~/portal" import { chakra, ChakraProps, forwardRef, HTMLChakraProps, StylesProvider, SystemStyleObject, ThemingProps, useMultiStyleConfig, useStyles, } from "~/system" import { fadeConfig } from "~/transition" import { callAllHandlers, cx, FocusableElement, __DEV__ } from "~/utils" import { createContext } from "~/react-utils" import { AnimatePresence, HTMLMotionProps, motion } from "framer-motion" import * as React from "react" import { RemoveScroll } from "react-remove-scroll" import type { MouseEvent } from "react" import { ModalTransition } from "./modal-transition" import { useModal, UseModalProps, UseModalReturn } from "./use-modal" interface ModalOptions { /** * If `true`, the modal will autofocus the first enabled and interactive * element within the `ModalContent` * * @default true */ autoFocus?: boolean /** * The `ref` of element to receive focus when the modal opens. */ initialFocusRef?: React.RefObject<FocusableElement> /** * The `ref` of element to receive focus when the modal closes. */ finalFocusRef?: React.RefObject<FocusableElement> /** * If `true`, the modal will return focus to the element that triggered it when it closes. * @default true */ returnFocusOnClose?: boolean /** * If `true`, scrolling will be disabled on the `body` when the modal opens. * @default true */ blockScrollOnMount?: boolean /** * Handle zoom/pinch gestures on iOS devices when scroll locking is enabled. * Defaults to `false`. */ allowPinchZoom?: boolean /** * If `true`, a `padding-right` will be applied to the body element * that's equal to the width of the scrollbar. * * This can help prevent some unpleasant flickering effect * and content adjustment when the modal opens */ preserveScrollBarGap?: boolean } type ScrollBehavior = "inside" | "outside" type MotionPreset = "slideInBottom" | "slideInRight" | "scale" | "none" export interface ModalProps extends UseModalProps, ModalOptions, ThemingProps<"Modal"> { children: React.ReactNode /** * If `true`, the modal will be centered on screen. * @default false */ isCentered?: boolean /** * Where scroll behavior should originate. * - If set to `inside`, scroll only occurs within the `ModalBody`. * - If set to `outside`, the entire `ModalContent` will scroll within the viewport. *o * @default "outside" */ scrollBehavior?: ScrollBehavior /** * Props to be forwarded to the portal component */ portalProps?: Pick<PortalProps, "appendToParentPortal" | "containerRef"> /** * The transition that should be used for the modal */ motionPreset?: MotionPreset } interface ModalContext extends ModalOptions, UseModalReturn { /** * The transition that should be used for the modal */ motionPreset?: MotionPreset } const [ModalContextProvider, useModalContext] = createContext<ModalContext>({ strict: true, name: "ModalContext", errorMessage: "useModalContext: `context` is undefined. Seems you forgot to wrap modal components in `<Modal />`", }) export { ModalContextProvider, useModalContext } /** * Modal provides context, theming, and accessibility properties * to all other modal components. * * It doesn't render any DOM node. */ export const Modal: React.FC<ModalProps> = (props) => { const { portalProps, children, autoFocus, initialFocusRef, finalFocusRef, returnFocusOnClose, blockScrollOnMount, allowPinchZoom, preserveScrollBarGap, motionPreset, } = props const styles = useMultiStyleConfig("Modal", props) const modal = useModal(props) const context = { ...modal, autoFocus, initialFocusRef, finalFocusRef, returnFocusOnClose, blockScrollOnMount, allowPinchZoom, preserveScrollBarGap, motionPreset, } return ( <ModalContextProvider value={context}> <StylesProvider value={styles}> <AnimatePresence>{context.isOpen && <Portal {...portalProps}>{children}</Portal>}</AnimatePresence> </StylesProvider> </ModalContextProvider> ) } Modal.defaultProps = { returnFocusOnClose: true, scrollBehavior: "outside", autoFocus: true, blockScrollOnMount: true, allowPinchZoom: false, motionPreset: "scale", } if (__DEV__) { Modal.displayName = "Modal" } export interface ModalContentProps extends HTMLChakraProps<"section"> { /** * The props to forward to the modal's content wrapper */ containerProps?: HTMLChakraProps<"div"> } const Motion = chakra(motion.div) /** * ModalContent is used to group modal's content. It has all the * necessary `aria-*` properties to indicate that it is a modal */ export const ModalContent = forwardRef<ModalContentProps, "section">((props, ref) => { const { className, children, containerProps: rootProps, ...rest } = props const { getDialogProps, getDialogContainerProps, preserveScrollBarGap, allowPinchZoom, blockScrollOnMount } = useModalContext() const dialogProps = getDialogProps(rest, ref) as any const containerProps = getDialogContainerProps(rootProps) const _className = cx("chakra-modal__content", className) const styles = useStyles() const dialogStyles: SystemStyleObject = { display: "flex", flexDirection: "column", position: "relative", width: "100%", outline: 0, ...styles.dialog, } const dialogContainerStyles: SystemStyleObject = { display: "flex", width: "100vw", height: "100vh", position: "fixed", left: 0, top: 0, ...styles.dialogContainer, } const { motionPreset } = useModalContext() return ( <RemoveScroll removeScrollBar={!preserveScrollBarGap} allowPinchZoom={allowPinchZoom} enabled={blockScrollOnMount}> <chakra.div {...containerProps} className="chakra-modal__content-container" __css={dialogContainerStyles}> <ModalTransition preset={motionPreset} className={_className} {...dialogProps} __css={dialogStyles}> {children} </ModalTransition> </chakra.div> </RemoveScroll> ) }) if (__DEV__) { ModalContent.displayName = "ModalContent" } export interface ModalOverlayProps extends Omit<HTMLMotionProps<"div">, "color" | "transition" | "css">, Omit<ChakraProps, "css"> { children?: React.ReactNode } /** * ModalOverlay renders a backdrop behind the modal. It is * also used as a wrapper for the modal content for better positioning. * * @see Docs https://chakra-ui.com/docs/overlay/modal */ export const ModalOverlay = forwardRef<ModalOverlayProps, "div">((props, ref) => { const { className, transition, ...rest } = props const _className = cx("chakra-modal__overlay", className) const styles = useStyles() const overlayStyle: SystemStyleObject = { pos: "fixed", left: "0", top: "0", w: "100vw", h: "100vh", ...styles.overlay, } const { motionPreset } = useModalContext() const motionProps = motionPreset === "none" ? {} : fadeConfig return <Motion {...motionProps} __css={overlayStyle} ref={ref} className={_className} {...rest} /> }) if (__DEV__) { ModalOverlay.displayName = "ModalOverlay" } export interface ModalHeaderProps extends HTMLChakraProps<"header"> {} /** * ModalHeader * * React component that houses the title of the modal. * * @see Docs https://chakra-ui.com/docs/components/modal */ export const ModalHeader = forwardRef<ModalHeaderProps, "header">((props, ref) => { const { className, ...rest } = props const { headerId, setHeaderMounted } = useModalContext() /** * Notify us if this component was rendered or used * so we can append `aria-labelledby` automatically */ React.useEffect(() => { setHeaderMounted(true) return () => setHeaderMounted(false) }, [setHeaderMounted]) const _className = cx("chakra-modal__header", className) const styles = useStyles() const headerStyles: SystemStyleObject = { flex: 0, ...styles.header, } return <chakra.header ref={ref} className={_className} id={headerId} {...rest} __css={headerStyles} /> }) if (__DEV__) { ModalHeader.displayName = "ModalHeader" } export interface ModalBodyProps extends HTMLChakraProps<"div"> {} /** * ModalBody * * React component that houses the main content of the modal. * * @see Docs https://chakra-ui.com/docs/components/modal */ export const ModalBody = forwardRef<ModalBodyProps, "div">((props, ref) => { const { className, ...rest } = props const { bodyId, setBodyMounted } = useModalContext() /** * Notify us if this component was rendered or used * so we can append `aria-describedby` automatically */ React.useEffect(() => { setBodyMounted(true) return () => setBodyMounted(false) }, [setBodyMounted]) const _className = cx("chakra-modal__body", className) const styles = useStyles() return <chakra.div ref={ref} className={_className} id={bodyId} {...rest} __css={styles.body} /> }) if (__DEV__) { ModalBody.displayName = "ModalBody" } export interface ModalFooterProps extends HTMLChakraProps<"footer"> {} /** * ModalFooter houses the action buttons of the modal. * @see Docs https://chakra-ui.com/docs/components/modal */ export const ModalFooter = forwardRef<ModalFooterProps, "footer">((props, ref) => { const { className, ...rest } = props const _className = cx("chakra-modal__footer", className) const styles = useStyles() const footerStyles: SystemStyleObject = { display: "flex", alignItems: "center", justifyContent: "flex-end", ...styles.footer, } return <chakra.footer ref={ref} {...rest} __css={footerStyles} className={_className} /> }) if (__DEV__) { ModalFooter.displayName = "ModalFooter" } /** * ModalCloseButton is used closes the modal. * * You don't need to pass the `onClick` to it, it reads the * `onClose` action from the modal context. */ export const ModalCloseButton = forwardRef<CloseButtonProps, "button">((props, ref) => { const { onClick, className, ...rest } = props const { onClose } = useModalContext() const _className = cx("chakra-modal__close-btn", className) const styles = useStyles() return ( <CloseButton ref={ref} __css={styles.closeButton} className={_className} onClick={callAllHandlers(onClick, (event: MouseEvent) => { event.stopPropagation() onClose() })} {...rest} /> ) }) if (__DEV__) { ModalCloseButton.displayName = "ModalCloseButton" }
{ "content_hash": "f93d7f60784088c278115cc12568bf6e", "timestamp": "", "source": "github", "line_count": 387, "max_line_length": 119, "avg_line_length": 27.3953488372093, "alnum_prop": 0.683267308055084, "repo_name": "UgnisSoftware/ugnis", "id": "5f8a98d0022dae7fc9fda593bcd772139d315176", "size": "10602", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/components/Modal/src/Modal.tsx", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "1532" }, { "name": "TypeScript", "bytes": "712619" } ], "symlink_target": "" }
using System; using System.Collections; namespace Org.BouncyCastle.Cms { public class RecipientInformationStore { private readonly ArrayList all; //ArrayList[RecipientInformation] private readonly Hashtable table = new Hashtable(); // Hashtable[RecipientID, ArrayList[RecipientInformation]] public RecipientInformationStore( ICollection recipientInfos) { foreach (RecipientInformation recipientInformation in recipientInfos) { RecipientID rid = recipientInformation.RecipientID; ArrayList list = (ArrayList) table[rid]; if (list == null) { table[rid] = list = new ArrayList(1); } list.Add(recipientInformation); } this.all = new ArrayList(recipientInfos); } /** * Return the first RecipientInformation object that matches the * passed in selector. Null if there are no matches. * * @param selector to identify a recipient * @return a single RecipientInformation object. Null if none matches. */ public RecipientInformation GetFirstRecipient( RecipientID selector) { ArrayList list = (ArrayList) table[selector]; return list == null ? null : (RecipientInformation) list[0]; } /** * Return the number of recipients in the collection. * * @return number of recipients identified. */ public int Count { get { return all.Count; } } /** * Return all recipients in the collection * * @return a collection of recipients. */ public ICollection GetRecipients() { return new ArrayList(all); } /** * Return possible empty collection with recipients matching the passed in RecipientID * * @param selector a recipient id to select against. * @return a collection of RecipientInformation objects. */ public ICollection GetRecipients( RecipientID selector) { ArrayList list = (ArrayList) table[selector]; return list == null ? new ArrayList() : new ArrayList(list); } } }
{ "content_hash": "b0d2bb03801f78778210407e0ca085b9", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 112, "avg_line_length": 24.27848101265823, "alnum_prop": 0.7085505735140771, "repo_name": "maurobennici/LicenseOn", "id": "df8017f04bfad9d6f29987129ece1fced4fcc243", "size": "1918", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "crypto/src/cms/RecipientInformationStore.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "14348" }, { "name": "C#", "bytes": "4370330" }, { "name": "HTML", "bytes": "671" }, { "name": "Smalltalk", "bytes": "20844" } ], "symlink_target": "" }
(function () { 'use strict'; angular.module('guiapp.dashboard') .controller('ChangeNumReplicasCtrl', ChangeNumReplicasCtrl); ChangeNumReplicasCtrl.$inject = ['$modalInstance', 'indexService']; function ChangeNumReplicasCtrl($modalInstance, indexService) { var cnrVm = this; cnrVm.dialog = { "numReplicas": indexService.numReplicas, "name": indexService.name }; cnrVm.close = close; function close (result) { $modalInstance.close(result); } } })();
{ "content_hash": "d26a762f327c79665adb223893663a3a", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 71, "avg_line_length": 24.565217391304348, "alnum_prop": 0.5982300884955752, "repo_name": "ihsl/elasticsearch-rtf", "id": "090e2d6ddfd7f4c1bd83a4aa1d01f020ef5ddd84", "size": "565", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "plugins/gui/javascript/dashboard/changenumreplicas.controller.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "12020" }, { "name": "CSS", "bytes": "377681" }, { "name": "HTML", "bytes": "304992" }, { "name": "JavaScript", "bytes": "1643151" }, { "name": "Shell", "bytes": "56808" }, { "name": "Smarty", "bytes": "1495" } ], "symlink_target": "" }
ACCEPTED #### According to International Plant Names Index #### Published in null #### Original name null ### Remarks null
{ "content_hash": "3f3b5c5a8ffab3f0651ca523cfeca790", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 31, "avg_line_length": 9.692307692307692, "alnum_prop": 0.7063492063492064, "repo_name": "mdoering/backbone", "id": "d244ad26c983989601e8183dc624f4133ae15f1c", "size": "184", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Magnoliopsida/Malvales/Malvaceae/Sidastrum/Sidastrum acumenatum/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!--NewPage--> <HTML> <HEAD> <!-- Generated by javadoc (build 1.6.0_45) on Thu Nov 13 21:22:00 UTC 2014 --> <META http-equiv="Content-Type" content="text/html; charset=UTF-8"> <TITLE> Uses of Class org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL (Apache Hadoop Main 2.6.0 API) </TITLE> <META NAME="date" CONTENT="2014-11-13"> <LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../../stylesheet.css" TITLE="Style"> <SCRIPT type="text/javascript"> function windowTitle() { if (location.href.indexOf('is-external=true') == -1) { parent.document.title="Uses of Class org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL (Apache Hadoop Main 2.6.0 API)"; } } </SCRIPT> <NOSCRIPT> </NOSCRIPT> </HEAD> <BODY BGCOLOR="white" onload="windowTitle();"> <HR> <!-- ========= START OF TOP NAVBAR ======= --> <A NAME="navbar_top"><!-- --></A> <A HREF="#skip-navbar_top" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_top_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.html" title="class in org.apache.hadoop.security.token.delegation.web"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../../index.html?org/apache/hadoop/security/token/delegation/web//class-useDelegationTokenAuthenticatedURL.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="DelegationTokenAuthenticatedURL.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_top"></A> <!-- ========= END OF TOP NAVBAR ========= --> <HR> <CENTER> <H2> <B>Uses of Class<br>org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL</B></H2> </CENTER> No usage of org.apache.hadoop.security.token.delegation.web.DelegationTokenAuthenticatedURL <P> <HR> <!-- ======= START OF BOTTOM NAVBAR ====== --> <A NAME="navbar_bottom"><!-- --></A> <A HREF="#skip-navbar_bottom" title="Skip navigation links"></A> <TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY=""> <TR> <TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A NAME="navbar_bottom_firstrow"><!-- --></A> <TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY=""> <TR ALIGN="center" VALIGN="top"> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../org/apache/hadoop/security/token/delegation/web/DelegationTokenAuthenticatedURL.html" title="class in org.apache.hadoop.security.token.delegation.web"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> &nbsp;<FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A>&nbsp;</TD> <TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A>&nbsp;</TD> </TR> </TABLE> </TD> <TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM> </EM> </TD> </TR> <TR> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> &nbsp;PREV&nbsp; &nbsp;NEXT</FONT></TD> <TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2"> <A HREF="../../../../../../../../index.html?org/apache/hadoop/security/token/delegation/web//class-useDelegationTokenAuthenticatedURL.html" target="_top"><B>FRAMES</B></A> &nbsp; &nbsp;<A HREF="DelegationTokenAuthenticatedURL.html" target="_top"><B>NO FRAMES</B></A> &nbsp; &nbsp;<SCRIPT type="text/javascript"> <!-- if(window==top) { document.writeln('<A HREF="../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>'); } //--> </SCRIPT> <NOSCRIPT> <A HREF="../../../../../../../../allclasses-noframe.html"><B>All Classes</B></A> </NOSCRIPT> </FONT></TD> </TR> </TABLE> <A NAME="skip-navbar_bottom"></A> <!-- ======== END OF BOTTOM NAVBAR ======= --> <HR> Copyright &#169; 2014 <a href="http://www.apache.org">Apache Software Foundation</a>. All Rights Reserved. </BODY> </HTML>
{ "content_hash": "ebd12c6fd6898e5c7fd826d7de82dd23", "timestamp": "", "source": "github", "line_count": 145, "max_line_length": 292, "avg_line_length": 46.227586206896554, "alnum_prop": 0.6236013725197672, "repo_name": "SAT-Hadoop/hadoop-2.6.0", "id": "099af2da4ae68045c67dd5d605de32ce50fed389", "size": "6703", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "share/doc/hadoop/api/org/apache/hadoop/security/token/delegation/web/class-use/DelegationTokenAuthenticatedURL.html", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "119024" }, { "name": "C", "bytes": "29572" }, { "name": "C++", "bytes": "16604" }, { "name": "CSS", "bytes": "452213" }, { "name": "HTML", "bytes": "72854691" }, { "name": "JavaScript", "bytes": "18210" }, { "name": "Shell", "bytes": "203634" }, { "name": "XSLT", "bytes": "20437" } ], "symlink_target": "" }
module Hammer class ContentPages def building_contents content_types, input_dir, output_dir, service data = [] @test = Hammer::ContentProxy.new() content_types.each do |content_params| @params = content_params contents(service).each do |content| ContentProxy.register_variable( content_variable_name, content ) text = parse_template( "#{input_dir}/#{@params['template']}", check_service(content, service), input_dir, output_dir ) output_path = write_file(text, content, output_dir, service) ContentProxy.unregister_variable(content_variable_name) data << { filename: content_params['template'], output_filename: output_path, generated: true } end end data end def register_content_file_path autobuild_content_types, service autobuild_content_types.each do |content_params| @params = content_params ContentProxy.add_paths(get_paths(content_params, service)) end @params end def get_paths(content_params, service) @params = content_params contents(service).map do |content| check_service(content, service) end end def contents service cached = ContentCache.get('contents', @params) return cached if cached helper = ensure_helper_service(service) result = helper.send(@params['content_key']) || [] ContentCache.cache('contents', result, @params) end def ensure_helper_service service if service == 'contentful' ContentfulHelper.new( Settings.contentful, @params['space_name']) elsif service == 'chisel' ChiselHelper.new(Settings.chisel) elsif service == 'cockpit' CockpitHelper.new(Settings.cockpit) end end def parse_template(template_path, filename, input_dir, output_dir) parsers = Hammer::Parser.for_filename(template_path) text = File.read(template_path) parsers.each do |parser_class| parser = parser_class.new parser.directory = input_dir parser.input_directory = input_dir parser.output_directory = output_dir parser.path = Pathname.new(File.join(input_dir, filename)).relative_path_from(Pathname.new(input_dir)).to_s text = parser.parse(text, parser.path, @test) end text end def content_path_concern content, service path = '' if service == 'contentful' && content.homePage path = 'index.html' return path end if !@params['urlAliasValue'].to_s.strip.empty? path = @params['urlAliasValue'].to_s unless path.ends_with?('.html') || path.ends_with?('.htm') path += '.html' end elsif @params['urlAliasSource'] path_text = check_service_for_generate_path(service, content) path_text = content.first if path_text.nil? path = path_text.to_s.parameterize + '.html' if @params['urlAliasPrefix'].to_s != '' path = "#{@params['urlAliasPrefix'].to_s}/#{path}" end end path end def cockpit_content_path content chains = [] chains << @params['urlAliasPrefix'] if @params['urlAliasPrefix'].to_s.length > 0 if @params['urlAliasSource'].to_s.length > 0 slug = content.send(@params['urlAliasSource']) slug = content.id if slug.to_s.length == 0 slug else slug = content.id end chains << slug.downcase.parameterize path = chains.join('/') path << '.html' unless path =~ /(.html?)$/i path end def check_service_for_generate_path service, content if service == 'chisel' content.fields[@params['urlAliasSource'].to_s] elsif service == 'contentful' content[@params['urlAliasSource'].to_s] end end def check_service content, service if service == 'cockpit' cockpit_content_path(content) else content_path_concern(content, service) end end def write_file(text, content, output_dir, service) output_path = check_service(content, service) filepath = output_dir + '/' + output_path dir = File.dirname(filepath) FileUtils.mkdir_p(dir) unless File.directory?(dir) File.open(filepath, 'w+') { |f| f.write(text) } output_path end def content_variable_name @params['content_key'].singularize.to_sym end end end
{ "content_hash": "c86c682733598ab19f605fd55949f45f", "timestamp": "", "source": "github", "line_count": 156, "max_line_length": 115, "avg_line_length": 29.653846153846153, "alnum_prop": 0.5987894509295287, "repo_name": "beachio/hammer-gem", "id": "6abc8328b226897eebee59be38f37ed415e5e0c9", "size": "4626", "binary": false, "copies": "1", "ref": "refs/heads/latest", "path": "lib/hammer/content/concerns/content_pages.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "16007" }, { "name": "CoffeeScript", "bytes": "19" }, { "name": "HTML", "bytes": "35912" }, { "name": "Hack", "bytes": "325" }, { "name": "JavaScript", "bytes": "1189" }, { "name": "PHP", "bytes": "1457" }, { "name": "Ruby", "bytes": "238897" } ], "symlink_target": "" }
package com.alexrnl.subtitlecorrector.correctionstrategy; import java.util.Collection; import java.util.Collections; import com.alexrnl.commons.translation.Translatable; /** * A parameter of a strategy.<br /> * @author Alex * @param <T> * the type of the parameter. */ public class Parameter<T> implements Translatable { /** The type of parameter represented */ private final ParameterType type; /** The translation key of the parameter */ private final String translationKey; /** <code>true</code> if the parameter is required */ private final boolean required; /** The value of the parameter */ private T value; /** The parser for the parameter */ private final Parser<T> parser; /** The possible values for the parameter */ private final Collection<T> possibleValues; /** * Interface for parsing String into the parameter type. * @author Alex * @param <U> * the target parameter type. */ public static interface Parser<U> { /** * Parse the string value specified into the target type. * @param value * the value to parse. * @return the actual value of the parameter to use. * @throws IllegalArgumentException * if the parsing could not be completed. */ U parse (String value); } /** * Decorator for a parser which allow to validate values when a collection of possible values is * provided. * @author Alex */ private class ParameterValueValidator implements Parser<T> { /** The parser to decorate */ private final Parser<T> innerParser; /** * Constructor #1.<br /> * @param innerParser * the innerParser to decorate. */ private ParameterValueValidator (final Parser<T> innerParser) { super(); this.innerParser = innerParser; } @Override public T parse (final String strValue) { final T parameter = innerParser.parse(strValue); if (!possibleValues.contains(parameter)) { throw new IllegalArgumentException("The value " + parameter + " is not in the allowed values: " + possibleValues); } return parameter; } } /** * Constructor #1.<br /> * @param type * the type of the parameter. * @param translationKey * the translation key of the parameter. * @param required * <code>true</code> if the parameter is required. * @param parser * the parser to convert a string into the parameter type. * @param defaultValue * the default value of the parameter. * @param possibleValues * the possible values for the parameter, or <code>null</code> if it is a free parameter. */ public Parameter (final ParameterType type, final String translationKey, final boolean required, final Parser<T> parser, final T defaultValue, final Collection<T> possibleValues) { super(); this.type = type; this.translationKey = translationKey; this.required = required; this.value = defaultValue; this.possibleValues = possibleValues == null ? null : Collections.unmodifiableCollection(possibleValues); this.parser = this.possibleValues != null ? new ParameterValueValidator(parser) : parser; if (this.type == ParameterType.LIST && this.possibleValues == null) { throw new IllegalArgumentException("Cannot build a LIST type parameter without a " + "possible value collection provided."); } } /** * Constructor #2.<br /> * @param type * the type of the parameter. * @param translationKey * the translation key of the parameter. * @param required * <code>true</code> if the parameter is required. * @param parser * the parser to convert a string into the parameter type. * @param defaultValue * the default value of the parameter. */ public Parameter (final ParameterType type, final String translationKey, final boolean required, final Parser<T> parser, final T defaultValue) { this(type, translationKey, required, parser, defaultValue, null); } /** * Constructor #3.<br /> * Build a parameter with no default value (which is therefore, required). * @param type * the type of the parameter. * @param translationKey * the translation key of the parameter. * @param parser * the parser to convert a string into the parameter type. */ public Parameter (final ParameterType type, final String translationKey, final Parser<T> parser) { this(type, translationKey, parser, null); } /** * Constructor #4.<br /> * Build a parameter with no default value (which is therefore, required). * @param type * the type of the parameter. * @param translationKey * the translation key of the parameter. * @param parser * the parser to convert a string into the parameter type. * @param possibleValues * the possible values for the parameter, or <code>null</code> if it is a free parameter. */ public Parameter (final ParameterType type, final String translationKey, final Parser<T> parser, final Collection<T> possibleValues) { this(type, translationKey, true, parser, null, possibleValues); } /** * Return the attribute type. * @return the attribute type. */ public ParameterType getType () { return type; } @Override public String getTranslationKey () { return translationKey; } /** * Return the attribute required. * @return the attribute required. */ public boolean isRequired () { return required; } /** * Return the attribute value. * @return the attribute value. */ public T getValue () { return value; } /** * Set the attribute value. * @param value the attribute value. */ public void setValue (final String value) { this.value = parser.parse(value); } /** * Return the attribute possibleValues. * @return the attribute possibleValues. */ public Collection<T> getPossibleValues () { return possibleValues; } }
{ "content_hash": "c3fd7a78b60eb613ba07cd4103d33dee", "timestamp": "", "source": "github", "line_count": 201, "max_line_length": 118, "avg_line_length": 29.323383084577113, "alnum_prop": 0.6810315575161181, "repo_name": "AlexRNL/SubtitleCorrector", "id": "737abf30f731fd0fb69b5b1413323f12d362be18", "size": "5894", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/alexrnl/subtitlecorrector/correctionstrategy/Parameter.java", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Java", "bytes": "257406" } ], "symlink_target": "" }
@interface AppDelegate : UIResponder <UIApplicationDelegate> @property (strong, nonatomic) UIWindow *window; @end
{ "content_hash": "9f0b4b46de98cfdd41d39780dc2026f1", "timestamp": "", "source": "github", "line_count": 7, "max_line_length": 60, "avg_line_length": 16.857142857142858, "alnum_prop": 0.7796610169491526, "repo_name": "Tobess/TBSplitViewController", "id": "582a60c314490328912fe2ce4ab82289d53a643b", "size": "282", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "TBSplitViewController/AppDelegate.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Objective-C", "bytes": "4282" } ], "symlink_target": "" }
package com.crassirostris.cache.controller; import lombok.extern.slf4j.*; import org.springframework.cache.annotation.*; import org.springframework.web.bind.annotation.*; import java.text.*; import java.util.*; /** * Created by crassirostris on 15. 9. 8.. */ @Slf4j @RestController public class SimpleCacheController extends AbstractCacheController { @RequestMapping("/get") public String get() { return getData(this.getClass().getCanonicalName() + " get"); } @Cacheable("simpleCache") @RequestMapping("/simple") public String getcache() throws InterruptedException { Thread.sleep(300L); return getData(this.getClass().getCanonicalName() + " getcache sleep 300ms"); } }
{ "content_hash": "be2e542b12c472d1e9a9da7ce7ce0fa4", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 79, "avg_line_length": 26.46153846153846, "alnum_prop": 0.747093023255814, "repo_name": "juyoec/CacheBoxTest", "id": "89b431fd3890133406bf6173309067d40193c2b0", "size": "688", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/crassirostris/cache/controller/SimpleCacheController.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "5540" }, { "name": "HTML", "bytes": "18795" }, { "name": "JavaScript", "bytes": "165037" } ], "symlink_target": "" }
// --------------------------------------------------------------------------------- // <copyright file="RespectPetMessageComposer.cs" company="https://github.com/sant0ro/Yupi"> // Copyright (c) 2016 Claudio Santoro, TheDoctor // </copyright> // <license> // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // </license> // --------------------------------------------------------------------------------- namespace Yupi.Messages.Pets { using System; using Yupi.Protocol.Buffers; public class RespectPetMessageComposer : Yupi.Messages.Contracts.RespectPetMessageComposer { #region Methods public override void Compose(Yupi.Protocol.ISender session, int entityId) { using (ServerMessage message = Pool.GetMessageBuffer(Id)) { message.AppendInteger(entityId); message.AppendBool(true); session.Send(message); } } #endregion Methods } }
{ "content_hash": "7735d72bd9b6392bc50f9d0020a1265a", "timestamp": "", "source": "github", "line_count": 47, "max_line_length": 94, "avg_line_length": 43.297872340425535, "alnum_prop": 0.6412776412776413, "repo_name": "TheDoct0r11/Yupi", "id": "78815178d77ce0daad6744a28b46305e8e14c431", "size": "2037", "binary": false, "copies": "2", "ref": "refs/heads/dev", "path": "Yupi.Messages/Composer/Pets/RespectPetMessageComposer.cs", "mode": "33261", "license": "mit", "language": [ { "name": "ASP", "bytes": "49" }, { "name": "Batchfile", "bytes": "48" }, { "name": "C#", "bytes": "3235386" }, { "name": "CSS", "bytes": "1232" }, { "name": "HTML", "bytes": "1769025" }, { "name": "Python", "bytes": "1894" }, { "name": "Shell", "bytes": "1150" } ], "symlink_target": "" }
import {expect} from 'chai'; import adapterManager from 'src/adaptermanager'; import {spec, masSizeOrdering, resetUserSync, hasVideoMediaType} from 'modules/rubiconBidAdapter'; import {parse as parseQuery} from 'querystring'; import {newBidder} from 'src/adapters/bidderFactory'; import {userSync} from 'src/userSync'; import {config} from 'src/config'; import * as utils from 'src/utils'; import find from 'core-js/library/fn/array/find'; var CONSTANTS = require('src/constants.json'); const INTEGRATION = `pbjs_lite_v$prebid.version$`; // $prebid.version$ will be substituted in by gulp in built prebid describe('the rubicon adapter', () => { let sandbox, bidderRequest, sizeMap; /** * @typedef {Object} sizeMapConverted * @property {string} sizeId * @property {string} size * @property {Array.<Array>} sizeAsArray * @property {number} width * @property {number} height */ /** * @param {Array.<sizeMapConverted>} sizesMapConverted * @param {Object} bid * @return {sizeMapConverted} */ function getSizeIdForBid(sizesMapConverted, bid) { return find(sizesMapConverted, item => (item.width === bid.width && item.height === bid.height)); } /** * @param {Array.<Object>} ads * @param {sizeMapConverted} size * @return {Object} */ function getResponseAdBySize(ads, size) { return find(ads, item => item.size_id === size.sizeId); } /** * @param {Array.<BidRequest>} bidRequests * @param {sizeMapConverted} size * @return {BidRequest} */ function getBidRequestBySize(bidRequests, size) { return find(bidRequests, item => item.sizes[0][0] === size.width && item.sizes[0][1] === size.height); } /** * @typedef {Object} overrideProps * @property {string} status * @property {number} cpm * @property {number} zone_id * @property {number} ad_id * @property {string} creative_id * @property {string} targeting_key - rpfl_{id} */ /** * @param {number} i - index * @param {string} sizeId - id that maps to size * @param {Array.<overrideProps>} [indexOverMap] * @return {{status: string, cpm: number, zone_id: *, size_id: *, impression_id: *, ad_id: *, creative_id: string, type: string, targeting: *[]}} */ function createResponseAdByIndex(i, sizeId, indexOverMap) { const overridePropMap = (indexOverMap && indexOverMap[i] && typeof indexOverMap[i] === 'object') ? indexOverMap[i] : {}; const overrideProps = Object.keys(overridePropMap).reduce((aggregate, key) => { aggregate[key] = overridePropMap[key]; return aggregate; }, {}); const getProp = (propName, defaultValue) => { return (overrideProps[propName]) ? overridePropMap[propName] : defaultValue; }; return { 'status': getProp('status', 'ok'), 'cpm': getProp('cpm', i / 100), 'zone_id': getProp('zone_id', i + 1), 'size_id': sizeId, 'impression_id': getProp('impression_id', `1-${i}`), 'ad_id': getProp('ad_id', i + 1), 'advertiser': i + 1, 'network': i + 1, 'creative_id': getProp('creative_id', `crid-${i}`), 'type': 'script', 'script': 'alert(\'foo\')', 'campaign_id': i + 1, 'targeting': [ { 'key': getProp('targeting_key', `rpfl_${i}`), 'values': [ '43_tier_all_test' ] } ] }; } /** * @param {number} i * @param {Array.<Array>} size * @return {{ params: {accountId: string, siteId: string, zoneId: string }, adUnitCode: string, code: string, sizes: *[], bidId: string, bidderRequestId: string }} */ function createBidRequestByIndex(i, size) { return { bidder: 'rubicon', params: { accountId: '14062', siteId: '70608', zoneId: (i + 1).toString(), userId: '12346', position: 'atf', referrer: 'localhost' }, adUnitCode: `/19968336/header-bid-tag-${i}`, code: `div-${i}`, sizes: [size], bidId: i.toString(), bidderRequestId: i.toString(), auctionId: 'c45dd708-a418-42ec-b8a7-b70a6c6fab0a', transactionId: 'd45dd707-a418-42ec-b8a7-b70a6c6fab0b' }; } /** * @param {boolean} [gdprApplies] */ function createGdprBidderRequest(gdprApplies) { if (typeof gdprApplies === 'boolean') { bidderRequest.gdprConsent = { 'consentString': 'BOJ/P2HOJ/P2HABABMAAAAAZ+A==', 'gdprApplies': gdprApplies }; } else { bidderRequest.gdprConsent = { 'consentString': 'BOJ/P2HOJ/P2HABABMAAAAAZ+A==' }; } } function createVideoBidderRequest() { createGdprBidderRequest(true); let bid = bidderRequest.bids[0]; bid.mediaTypes = { video: { context: 'instream' } }; bid.params.video = { 'language': 'en', 'p_aso.video.ext.skip': true, 'p_aso.video.ext.skipdelay': 15, 'playerHeight': 320, 'playerWidth': 640, 'size_id': 201, 'aeParams': { 'p_aso.video.ext.skip': '1', 'p_aso.video.ext.skipdelay': '15' } }; } function createLegacyVideoBidderRequest() { createGdprBidderRequest(true); let bid = bidderRequest.bids[0]; // Legacy property (Prebid <1.0) bid.mediaType = 'video'; bid.params.video = { 'language': 'en', 'p_aso.video.ext.skip': true, 'p_aso.video.ext.skipdelay': 15, 'playerHeight': 320, 'playerWidth': 640, 'size_id': 201, 'aeParams': { 'p_aso.video.ext.skip': '1', 'p_aso.video.ext.skipdelay': '15' } }; } function createVideoBidderRequestNoVideo() { let bid = bidderRequest.bids[0]; bid.mediaTypes = { video: { context: 'instream' }, }; bid.params.video = ''; } function createLegacyVideoBidderRequestNoVideo() { let bid = bidderRequest.bids[0]; bid.mediaType = 'video'; bid.params.video = ''; } function createVideoBidderRequestOutstream() { let bid = bidderRequest.bids[0]; bid.mediaTypes = { video: { context: 'outstream' }, }; bid.params.video = { 'language': 'en', 'p_aso.video.ext.skip': true, 'p_aso.video.ext.skipdelay': 15, 'playerHeight': 320, 'playerWidth': 640, 'size_id': 203, 'aeParams': { 'p_aso.video.ext.skip': '1', 'p_aso.video.ext.skipdelay': '15' } }; } function createVideoBidderRequestNoPlayer() { let bid = bidderRequest.bids[0]; bid.mediaTypes = { video: { context: 'instream' }, }; bid.params.video = { 'language': 'en', 'p_aso.video.ext.skip': true, 'p_aso.video.ext.skipdelay': 15, 'size_id': 201, 'aeParams': { 'p_aso.video.ext.skip': '1', 'p_aso.video.ext.skipdelay': '15' } }; } function createLegacyVideoBidderRequestNoPlayer() { let bid = bidderRequest.bids[0]; bid.mediaType = 'video'; bid.params.video = { 'language': 'en', 'p_aso.video.ext.skip': true, 'p_aso.video.ext.skipdelay': 15, 'size_id': 201, 'aeParams': { 'p_aso.video.ext.skip': '1', 'p_aso.video.ext.skipdelay': '15' } }; } beforeEach(() => { sandbox = sinon.sandbox.create(); bidderRequest = { bidderCode: 'rubicon', auctionId: 'c45dd708-a418-42ec-b8a7-b70a6c6fab0a', bidderRequestId: '178e34bad3658f', bids: [ { bidder: 'rubicon', params: { accountId: '14062', siteId: '70608', zoneId: '335918', userId: '12346', keywords: ['a', 'b', 'c'], inventory: { rating: '5-star', prodtype: ['tech', 'mobile'] }, visitor: { ucat: 'new', lastsearch: 'iphone', likes: ['sports', 'video games'] }, position: 'atf', referrer: 'localhost', latLong: [40.7607823, '111.8910325'] }, adUnitCode: '/19968336/header-bid-tag-0', code: 'div-1', sizes: [[300, 250], [320, 50]], bidId: '2ffb201a808da7', bidderRequestId: '178e34bad3658f', auctionId: 'c45dd708-a418-42ec-b8a7-b70a6c6fab0a', transactionId: 'd45dd707-a418-42ec-b8a7-b70a6c6fab0b' } ], start: 1472239426002, auctionStart: 1472239426000, timeout: 5000 }; sizeMap = [ {sizeId: 1, size: '468x60'}, {sizeId: 2, size: '728x90'}, {sizeId: 5, size: '120x90'}, {sizeId: 8, size: '120x600'}, {sizeId: 9, size: '160x600'}, {sizeId: 10, size: '300x600'}, {sizeId: 13, size: '200x200'}, {sizeId: 14, size: '250x250'}, {sizeId: 15, size: '300x250'}, {sizeId: 16, size: '336x280'}, {sizeId: 19, size: '300x100'}, {sizeId: 31, size: '980x120'}, {sizeId: 32, size: '250x360'} // Create convenience properties for [sizeAsArray, width, height] by parsing the size string ].map(item => { const sizeAsArray = item.size.split('x').map(s => parseInt(s)); return { sizeId: item.sizeId, size: item.size, sizeAsArray: sizeAsArray.slice(), width: sizeAsArray[0], height: sizeAsArray[1] }; }); }); afterEach(() => { sandbox.restore(); }); describe('MAS mapping / ordering', () => { it('should sort values without any MAS priority sizes in regular ascending order', () => { let ordering = masSizeOrdering([126, 43, 65, 16]); expect(ordering).to.deep.equal([16, 43, 65, 126]); }); it('should sort MAS priority sizes in the proper order w/ rest ascending', () => { let ordering = masSizeOrdering([43, 9, 65, 15, 16, 126]); expect(ordering).to.deep.equal([15, 9, 16, 43, 65, 126]); ordering = masSizeOrdering([43, 15, 9, 65, 16, 126, 2]); expect(ordering).to.deep.equal([15, 2, 9, 16, 43, 65, 126]); ordering = masSizeOrdering([8, 43, 9, 65, 16, 126, 2]); expect(ordering).to.deep.equal([2, 9, 8, 16, 43, 65, 126]); }); }); describe('buildRequests implementation', () => { describe('for requests', () => { describe('to fastlane', () => { it('should make a well-formed request objects', () => { sandbox.stub(Math, 'random').callsFake(() => 0.1); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); expect(request.url).to.equal('//fastlane.rubiconproject.com/a/api/fastlane.json'); let expectedQuery = { 'account_id': '14062', 'site_id': '70608', 'zone_id': '335918', 'size_id': '15', 'alt_size_ids': '43', 'p_pos': 'atf', 'rp_floor': '0.01', 'rp_secure': /[01]/, 'rand': '0.1', 'tk_flint': INTEGRATION, 'x_source.tid': 'd45dd707-a418-42ec-b8a7-b70a6c6fab0b', 'p_screen_res': /\d+x\d+/, 'tk_user_key': '12346', 'kw': 'a,b,c', 'tg_v.ucat': 'new', 'tg_v.lastsearch': 'iphone', 'tg_v.likes': 'sports,video games', 'tg_i.rating': '5-star', 'tg_i.prodtype': 'tech,mobile', 'tg_fl.eid': 'div-1', 'rf': 'localhost' }; // test that all values above are both present and correct Object.keys(expectedQuery).forEach(key => { let value = expectedQuery[key]; if (value instanceof RegExp) { expect(data[key]).to.match(value); } else { expect(data[key]).to.equal(value); } }); }); it('ad engine query params should be ordered correctly', () => { sandbox.stub(Math, 'random').callsFake(() => 0.1); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); const referenceOrdering = ['account_id', 'site_id', 'zone_id', 'size_id', 'alt_size_ids', 'p_pos', 'rf', 'p_geo.latitude', 'p_geo.longitude', 'kw', 'tg_v.ucat', 'tg_v.lastsearch', 'tg_v.likes', 'tg_i.rating', 'tg_i.prodtype', 'tk_flint', 'x_source.tid', 'p_screen_res', 'rp_floor', 'rp_secure', 'tk_user_key', 'tg_fl.eid', 'slots', 'rand']; request.data.split('&').forEach((item, i) => { expect(item.split('=')[0]).to.equal(referenceOrdering[i]); }); }); it('should make a well-formed request object without latLong', () => { let expectedQuery = { 'account_id': '14062', 'site_id': '70608', 'zone_id': '335918', 'size_id': '15', 'alt_size_ids': '43', 'p_pos': 'atf', 'rp_floor': '0.01', 'rp_secure': /[01]/, 'rand': '0.1', 'tk_flint': INTEGRATION, 'x_source.tid': 'd45dd707-a418-42ec-b8a7-b70a6c6fab0b', 'p_screen_res': /\d+x\d+/, 'tk_user_key': '12346', 'kw': 'a,b,c', 'tg_v.ucat': 'new', 'tg_v.lastsearch': 'iphone', 'tg_v.likes': 'sports,video games', 'tg_i.rating': '5-star', 'tg_i.prodtype': 'tech,mobile', 'rf': 'localhost', 'p_geo.latitude': undefined, 'p_geo.longitude': undefined }; sandbox.stub(Math, 'random').callsFake(() => 0.1); delete bidderRequest.bids[0].params.latLong; [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); data = parseQuery(request.data); expect(request.url).to.equal('//fastlane.rubiconproject.com/a/api/fastlane.json'); // test that all values above are both present and correct Object.keys(expectedQuery).forEach(key => { let value = expectedQuery[key]; if (value instanceof RegExp) { expect(data[key]).to.match(value); } else { expect(data[key]).to.equal(value); } }); bidderRequest.bids[0].params.latLong = []; let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); expect(request.url).to.equal('//fastlane.rubiconproject.com/a/api/fastlane.json'); // test that all values above are both present and correct Object.keys(expectedQuery).forEach(key => { let value = expectedQuery[key]; if (value instanceof RegExp) { expect(data[key]).to.match(value); } else { expect(data[key]).to.equal(value); } }); }); it('page_url should use params.referrer, config.getConfig("pageUrl"), utils.getTopWindowUrl() in that order', () => { sandbox.stub(utils, 'getTopWindowUrl').callsFake(() => 'http://www.prebid.org'); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); expect(parseQuery(request.data).rf).to.equal('localhost'); delete bidderRequest.bids[0].params.referrer; [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); expect(parseQuery(request.data).rf).to.equal('http://www.prebid.org'); let origGetConfig = config.getConfig; sandbox.stub(config, 'getConfig').callsFake(function (key) { if (key === 'pageUrl') { return 'http://www.rubiconproject.com'; } return origGetConfig.apply(config, arguments); }); [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); expect(parseQuery(request.data).rf).to.equal('http://www.rubiconproject.com'); bidderRequest.bids[0].params.secure = true; [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); expect(parseQuery(request.data).rf).to.equal('https://www.rubiconproject.com'); }); it('should use rubicon sizes if present (including non-mappable sizes)', () => { var sizesBidderRequest = clone(bidderRequest); sizesBidderRequest.bids[0].params.sizes = [55, 57, 59, 801]; let [request] = spec.buildRequests(sizesBidderRequest.bids, sizesBidderRequest); let data = parseQuery(request.data); expect(data['size_id']).to.equal('55'); expect(data['alt_size_ids']).to.equal('57,59,801'); }); it('should not validate bid request if no valid sizes', () => { var sizesBidderRequest = clone(bidderRequest); sizesBidderRequest.bids[0].sizes = [[621, 250], [300, 251]]; let result = spec.isBidRequestValid(sizesBidderRequest.bids[0]); expect(result).to.equal(false); }); it('should not validate bid request if no account id is present', () => { var noAccountBidderRequest = clone(bidderRequest); delete noAccountBidderRequest.bids[0].params.accountId; let result = spec.isBidRequestValid(noAccountBidderRequest.bids[0]); expect(result).to.equal(false); }); it('should allow a floor override', () => { var floorBidderRequest = clone(bidderRequest); floorBidderRequest.bids[0].params.floor = 2; let [request] = spec.buildRequests(floorBidderRequest.bids, floorBidderRequest); let data = parseQuery(request.data); expect(data['rp_floor']).to.equal('2'); }); it('should send digitrust params', () => { window.DigiTrust = { getUser: function () { } }; sandbox.stub(window.DigiTrust, 'getUser').callsFake(() => ({ success: true, identity: { privacy: {optout: false}, id: 'testId', keyv: 'testKeyV' } }) ); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); let expectedQuery = { 'dt.id': 'testId', 'dt.keyv': 'testKeyV', 'dt.pref': '0' }; // test that all values above are both present and correct Object.keys(expectedQuery).forEach(key => { let value = expectedQuery[key]; expect(data[key]).to.equal(value); }); delete window.DigiTrust; }); it('should not send digitrust params when DigiTrust not loaded', () => { let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); let undefinedKeys = ['dt.id', 'dt.keyv']; // Test that none of the DigiTrust keys are part of the query undefinedKeys.forEach(key => { expect(typeof data[key]).to.equal('undefined'); }); }); it('should not send digitrust params due to optout', () => { window.DigiTrust = { getUser: function () { } }; sandbox.stub(window.DigiTrust, 'getUser').callsFake(() => ({ success: true, identity: { privacy: {optout: true}, id: 'testId', keyv: 'testKeyV' } }) ); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); let undefinedKeys = ['dt.id', 'dt.keyv']; // Test that none of the DigiTrust keys are part of the query undefinedKeys.forEach(key => { expect(typeof data[key]).to.equal('undefined'); }); delete window.DigiTrust; }); it('should not send digitrust params due to failure', () => { window.DigiTrust = { getUser: function () { } }; sandbox.stub(window.DigiTrust, 'getUser').callsFake(() => ({ success: false, identity: { privacy: {optout: false}, id: 'testId', keyv: 'testKeyV' } }) ); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); let undefinedKeys = ['dt.id', 'dt.keyv']; // Test that none of the DigiTrust keys are part of the query undefinedKeys.forEach(key => { expect(typeof data[key]).to.equal('undefined'); }); delete window.DigiTrust; }); describe('digiTrustId config', () => { var origGetConfig; beforeEach(() => { window.DigiTrust = { getUser: sandbox.spy() }; }); afterEach(() => { delete window.DigiTrust; }); it('should send digiTrustId config params', () => { sandbox.stub(config, 'getConfig').callsFake((key) => { var config = { digiTrustId: { success: true, identity: { privacy: {optout: false}, id: 'testId', keyv: 'testKeyV' } } }; return config[key]; }); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); let expectedQuery = { 'dt.id': 'testId', 'dt.keyv': 'testKeyV' }; // test that all values above are both present and correct Object.keys(expectedQuery).forEach(key => { let value = expectedQuery[key]; expect(data[key]).to.equal(value); }); // should not have called DigiTrust.getUser() expect(window.DigiTrust.getUser.notCalled).to.equal(true); }); it('should not send digiTrustId config params due to optout', () => { sandbox.stub(config, 'getConfig').callsFake((key) => { var config = { digiTrustId: { success: true, identity: { privacy: {optout: true}, id: 'testId', keyv: 'testKeyV' } } } return config[key]; }); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); let undefinedKeys = ['dt.id', 'dt.keyv']; // Test that none of the DigiTrust keys are part of the query undefinedKeys.forEach(key => { expect(typeof data[key]).to.equal('undefined'); }); // should not have called DigiTrust.getUser() expect(window.DigiTrust.getUser.notCalled).to.equal(true); }); it('should not send digiTrustId config params due to failure', () => { sandbox.stub(config, 'getConfig').callsFake((key) => { var config = { digiTrustId: { success: false, identity: { privacy: {optout: false}, id: 'testId', keyv: 'testKeyV' } } } return config[key]; }); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); let undefinedKeys = ['dt.id', 'dt.keyv']; // Test that none of the DigiTrust keys are part of the query undefinedKeys.forEach(key => { expect(typeof data[key]).to.equal('undefined'); }); // should not have called DigiTrust.getUser() expect(window.DigiTrust.getUser.notCalled).to.equal(true); }); it('should not send digiTrustId config params if they do not exist', () => { sandbox.stub(config, 'getConfig').callsFake((key) => { var config = {}; return config[key]; }); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); let undefinedKeys = ['dt.id', 'dt.keyv']; // Test that none of the DigiTrust keys are part of the query undefinedKeys.forEach(key => { expect(typeof data[key]).to.equal('undefined'); }); // should have called DigiTrust.getUser() once expect(window.DigiTrust.getUser.calledOnce).to.equal(true); }); }); describe('GDPR consent config', () => { it('should send "gdpr" and "gdpr_consent", when gdprConsent defines consentString and gdprApplies', () => { createGdprBidderRequest(true); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); expect(data['gdpr']).to.equal('1'); expect(data['gdpr_consent']).to.equal('BOJ/P2HOJ/P2HABABMAAAAAZ+A=='); }); it('should send only "gdpr_consent", when gdprConsent defines only consentString', () => { createGdprBidderRequest(); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); expect(data['gdpr_consent']).to.equal('BOJ/P2HOJ/P2HABABMAAAAAZ+A=='); expect(data['gdpr']).to.equal(undefined); }); it('should not send GDPR params if gdprConsent is not defined', () => { let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); expect(data['gdpr']).to.equal(undefined); expect(data['gdpr_consent']).to.equal(undefined); }); it('should set "gdpr" value as 1 or 0, using "gdprApplies" value of either true/false', () => { createGdprBidderRequest(true); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let data = parseQuery(request.data); expect(data['gdpr']).to.equal('1'); createGdprBidderRequest(false); [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); data = parseQuery(request.data); expect(data['gdpr']).to.equal('0'); }); }); describe('singleRequest config', () => { it('should group all bid requests with the same site id', () => { sandbox.stub(Math, 'random').callsFake(() => 0.1); sandbox.stub(config, 'getConfig').callsFake((key) => { const config = { 'rubicon.singleRequest': true }; return config[key]; }); const expectedQuery = { 'account_id': '14062', 'site_id': '70608', 'zone_id': '335918', 'size_id': '15', 'alt_size_ids': '43', 'p_pos': 'atf', 'rp_floor': '0.01', 'rp_secure': /[01]/, 'rand': '0.1', 'tk_flint': INTEGRATION, 'x_source.tid': 'd45dd707-a418-42ec-b8a7-b70a6c6fab0b', 'p_screen_res': /\d+x\d+/, 'tk_user_key': '12346', 'kw': 'a,b,c', 'tg_v.ucat': 'new', 'tg_v.lastsearch': 'iphone', 'tg_v.likes': 'sports,video games', 'tg_i.rating': '5-star', 'tg_i.prodtype': 'tech,mobile', 'tg_fl.eid': 'div-1', 'rf': 'localhost' }; const bidCopy = clone(bidderRequest.bids[0]); bidCopy.params.siteId = '70608'; bidCopy.params.zoneId = '1111'; bidderRequest.bids.push(bidCopy); const bidCopy2 = clone(bidderRequest.bids[0]); bidCopy2.params.siteId = '99999'; bidCopy2.params.zoneId = '2222'; bidderRequest.bids.push(bidCopy2); const bidCopy3 = clone(bidderRequest.bids[0]); bidCopy3.params.siteId = '99999'; bidCopy3.params.zoneId = '3333'; bidderRequest.bids.push(bidCopy3); const serverRequests = spec.buildRequests(bidderRequest.bids, bidderRequest); // array length should match the num of unique 'siteIds' expect(serverRequests).to.be.a('array'); expect(serverRequests).to.have.lengthOf(2); // collect all bidRequests so order can be checked against the url param slot order const bidRequests = serverRequests.reduce((aggregator, item) => aggregator.concat(item.bidRequest), []); let bidRequestIndex = 0; serverRequests.forEach(item => { expect(item).to.be.a('object'); expect(item).to.have.property('method'); expect(item).to.have.property('url'); expect(item).to.have.property('data'); expect(item).to.have.property('bidRequest'); expect(item.method).to.equal('GET'); expect(item.url).to.equal('//fastlane.rubiconproject.com/a/api/fastlane.json'); expect(item.data).to.be.a('string'); // 'bidRequest' type must be 'array' if SRA enabled expect(item.bidRequest).to.be.a('array').to.have.lengthOf(2); item.bidRequest.forEach((bidRequestItem, i, array) => { expect(bidRequestItem).to.be.a('object'); // every 'siteId' values need to match expect(bidRequestItem.params.siteId).to.equal(array[0].params.siteId); }); const data = parseQuery(item.data); Object.keys(expectedQuery).forEach(key => { expect(data).to.have.property(key); // extract semicolon delineated values const params = data[key].split(';'); // skip value test for site and zone ids if (key !== 'site_id' && key !== 'zone_id') { if (expectedQuery[key] instanceof RegExp) { params.forEach(paramItem => { expect(paramItem).to.match(expectedQuery[key]); }); } else { expect(params).to.contain(expectedQuery[key]); } } // check parsed url data list order with requestBid list, items must have same index in both lists if (key === 'zone_id') { params.forEach((p) => { expect(bidRequests[bidRequestIndex]).to.be.a('object'); expect(bidRequests[bidRequestIndex].params).to.be.a('object'); // 'zone_id' is used to verify so each bid must have a unique 'zone_id' expect(p).to.equal(bidRequests[bidRequestIndex].params.zoneId); // increment to next bidRequest index having verified that item positions match in url params and bidRequest lists bidRequestIndex++; }); } }); }); }); it('should not send more than 10 bids in a request', () => { sandbox.stub(config, 'getConfig').callsFake((key) => { const config = { 'rubicon.singleRequest': true }; return config[key]; }); for (let i = 0; i < 20; i++) { let bidCopy = clone(bidderRequest.bids[0]); bidCopy.params.zoneId = `${i}0000`; bidderRequest.bids.push(bidCopy); } const serverRequests = spec.buildRequests(bidderRequest.bids, bidderRequest); // if bids are greater than 10, additional bids are dropped expect(serverRequests[0].bidRequest).to.have.lengthOf(10); // check that slots param value matches const foundSlotsCount = serverRequests[0].data.indexOf('&slots=10&'); expect(foundSlotsCount !== -1).to.equal(true); // check that zone_id has 10 values (since all zone_ids are unique all should exist in get param) const data = parseQuery(serverRequests[0].data); expect(data).to.be.a('object'); expect(data).to.have.property('zone_id'); expect(data.zone_id.split(';')).to.have.lengthOf(10); }); it('should not group bid requests if singleRequest does not equal true', () => { sandbox.stub(config, 'getConfig').callsFake((key) => { const config = { 'rubicon.singleRequest': false }; return config[key]; }); const bidCopy = clone(bidderRequest.bids[0]); bidderRequest.bids.push(bidCopy); const bidCopy2 = clone(bidderRequest.bids[0]); bidCopy2.params.siteId = '32001'; bidderRequest.bids.push(bidCopy2); const bidCopy3 = clone(bidderRequest.bids[0]); bidCopy3.params.siteId = '32001'; bidderRequest.bids.push(bidCopy3); let serverRequests = spec.buildRequests(bidderRequest.bids, bidderRequest); expect(serverRequests).that.is.an('array').of.length(4); }); it('should not group video bid requests', () => { sandbox.stub(config, 'getConfig').callsFake((key) => { const config = { 'rubicon.singleRequest': true }; return config[key]; }); const bidCopy = clone(bidderRequest.bids[0]); bidderRequest.bids.push(bidCopy); const bidCopy2 = clone(bidderRequest.bids[0]); bidCopy2.params.siteId = '32001'; bidderRequest.bids.push(bidCopy2); const bidCopy3 = clone(bidderRequest.bids[0]); bidCopy3.params.siteId = '32001'; bidderRequest.bids.push(bidCopy3); const bidCopy4 = clone(bidderRequest.bids[0]); bidCopy4.mediaType = 'video'; bidCopy4.params.video = { 'language': 'en', 'p_aso.video.ext.skip': true, 'p_aso.video.ext.skipdelay': 15, 'playerHeight': 320, 'playerWidth': 640, 'size_id': 201, 'aeParams': { 'p_aso.video.ext.skip': '1', 'p_aso.video.ext.skipdelay': '15' } }; bidderRequest.bids.push(bidCopy4); let serverRequests = spec.buildRequests(bidderRequest.bids, bidderRequest); expect(serverRequests).that.is.an('array').of.length(3); }); }); }); describe('for video requests', () => { it('should make a well-formed video request with legacy mediaType config', () => { createLegacyVideoBidderRequest(); sandbox.stub(Date, 'now').callsFake(() => bidderRequest.auctionStart + 100 ); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let post = request.data; let url = request.url; expect(url).to.equal('//fastlane-adv.rubiconproject.com/v1/auction/video'); expect(post).to.have.property('page_url').that.is.a('string'); expect(post.resolution).to.match(/\d+x\d+/); expect(post.account_id).to.equal('14062'); expect(post.integration).to.equal(INTEGRATION); expect(post['x_source.tid']).to.equal('d45dd707-a418-42ec-b8a7-b70a6c6fab0b'); expect(post).to.have.property('timeout').that.is.a('number'); expect(post.timeout < 5000).to.equal(true); expect(post.stash_creatives).to.equal(true); expect(post.gdpr_consent).to.equal('BOJ/P2HOJ/P2HABABMAAAAAZ+A=='); expect(post.gdpr).to.equal(1); expect(post).to.have.property('ae_pass_through_parameters'); expect(post.ae_pass_through_parameters) .to.have.property('p_aso.video.ext.skip') .that.equals('1'); expect(post.ae_pass_through_parameters) .to.have.property('p_aso.video.ext.skipdelay') .that.equals('15'); expect(post).to.have.property('slots') .with.length.of(1); let slot = post.slots[0]; expect(slot.site_id).to.equal('70608'); expect(slot.zone_id).to.equal('335918'); expect(slot.position).to.equal('atf'); expect(slot.floor).to.equal(0.01); expect(slot.element_id).to.equal(bidderRequest.bids[0].adUnitCode); expect(slot.name).to.equal(bidderRequest.bids[0].adUnitCode); expect(slot.language).to.equal('en'); expect(slot.width).to.equal(640); expect(slot.height).to.equal(320); expect(slot.size_id).to.equal(201); expect(slot).to.have.property('inventory').that.is.an('object'); expect(slot.inventory).to.have.property('rating').that.equals('5-star'); expect(slot.inventory).to.have.property('prodtype').that.deep.equals(['tech', 'mobile']); expect(slot).to.have.property('keywords') .that.is.an('array') .of.length(3) .that.deep.equals(['a', 'b', 'c']); expect(slot).to.have.property('visitor').that.is.an('object'); expect(slot.visitor).to.have.property('ucat').that.equals('new'); expect(slot.visitor).to.have.property('lastsearch').that.equals('iphone'); expect(slot.visitor).to.have.property('likes').that.deep.equals(['sports', 'video games']); }); it('should make a well-formed video request', () => { createVideoBidderRequest(); sandbox.stub(Date, 'now').callsFake(() => bidderRequest.auctionStart + 100 ); let [request] = spec.buildRequests(bidderRequest.bids, bidderRequest); let post = request.data; let url = request.url; expect(url).to.equal('//fastlane-adv.rubiconproject.com/v1/auction/video'); expect(post).to.have.property('page_url').that.is.a('string'); expect(post.resolution).to.match(/\d+x\d+/); expect(post.account_id).to.equal('14062'); expect(post.integration).to.equal(INTEGRATION); expect(post['x_source.tid']).to.equal('d45dd707-a418-42ec-b8a7-b70a6c6fab0b'); expect(post).to.have.property('timeout').that.is.a('number'); expect(post.timeout < 5000).to.equal(true); expect(post.stash_creatives).to.equal(true); expect(post.gdpr_consent).to.equal('BOJ/P2HOJ/P2HABABMAAAAAZ+A=='); expect(post.gdpr).to.equal(1); expect(post).to.have.property('ae_pass_through_parameters'); expect(post.ae_pass_through_parameters) .to.have.property('p_aso.video.ext.skip') .that.equals('1'); expect(post.ae_pass_through_parameters) .to.have.property('p_aso.video.ext.skipdelay') .that.equals('15'); expect(post).to.have.property('slots') .with.length.of(1); let slot = post.slots[0]; expect(slot.site_id).to.equal('70608'); expect(slot.zone_id).to.equal('335918'); expect(slot.position).to.equal('atf'); expect(slot.floor).to.equal(0.01); expect(slot.element_id).to.equal(bidderRequest.bids[0].adUnitCode); expect(slot.name).to.equal(bidderRequest.bids[0].adUnitCode); expect(slot.language).to.equal('en'); expect(slot.width).to.equal(640); expect(slot.height).to.equal(320); expect(slot.size_id).to.equal(201); expect(slot).to.have.property('inventory').that.is.an('object'); expect(slot.inventory).to.have.property('rating').that.equals('5-star'); expect(slot.inventory).to.have.property('prodtype').that.deep.equals(['tech', 'mobile']); expect(slot).to.have.property('keywords') .that.is.an('array') .of.length(3) .that.deep.equals(['a', 'b', 'c']); expect(slot).to.have.property('visitor').that.is.an('object'); expect(slot.visitor).to.have.property('ucat').that.equals('new'); expect(slot.visitor).to.have.property('lastsearch').that.equals('iphone'); expect(slot.visitor).to.have.property('likes').that.deep.equals(['sports', 'video games']); }); it('should send request with proper ad position', () => { createVideoBidderRequest(); var positionBidderRequest = clone(bidderRequest); positionBidderRequest.bids[0].params.position = 'atf'; let [request] = spec.buildRequests(positionBidderRequest.bids, positionBidderRequest); let post = request.data; let slot = post.slots[0]; expect(slot.position).to.equal('atf'); positionBidderRequest = clone(bidderRequest); positionBidderRequest.bids[0].params.position = 'btf'; [request] = spec.buildRequests(positionBidderRequest.bids, positionBidderRequest); post = request.data; slot = post.slots[0]; expect(slot.position).to.equal('btf'); positionBidderRequest = clone(bidderRequest); positionBidderRequest.bids[0].params.position = 'unknown'; [request] = spec.buildRequests(positionBidderRequest.bids, positionBidderRequest); post = request.data; slot = post.slots[0]; expect(slot.position).to.equal('unknown'); positionBidderRequest = clone(bidderRequest); positionBidderRequest.bids[0].params.position = '123'; [request] = spec.buildRequests(positionBidderRequest.bids, positionBidderRequest); post = request.data; slot = post.slots[0]; expect(slot.position).to.equal('unknown'); positionBidderRequest = clone(bidderRequest); delete positionBidderRequest.bids[0].params.position; expect(positionBidderRequest.bids[0].params.position).to.equal(undefined); [request] = spec.buildRequests(positionBidderRequest.bids, positionBidderRequest); post = request.data; slot = post.slots[0]; expect(slot.position).to.equal('unknown'); }); it('should allow a floor price override', () => { createVideoBidderRequest(); sandbox.stub(Date, 'now').callsFake(() => bidderRequest.auctionStart + 100 ); var floorBidderRequest = clone(bidderRequest); // enter an explicit floor price // floorBidderRequest.bids[0].params.floor = 3.25; let [request] = spec.buildRequests(floorBidderRequest.bids, floorBidderRequest); let post = request.data; let floor = post.slots[0].floor; expect(floor).to.equal(3.25); }); it('should validate bid request with invalid video if a mediaTypes banner property is defined', () => { const bidRequest = { mediaTypes: { video: { context: 'instream' }, banner: { sizes: [[300, 250]] } }, params: { accountId: 1001, video: { size_id: 201 } }, sizes: [[300, 250]] } sandbox.stub(Date, 'now').callsFake(() => bidderRequest.auctionStart + 100 ); expect(spec.isBidRequestValid(bidRequest)).to.equal(true); }); it('should not validate bid request when a invalid video object and no banner object is passed in', () => { createVideoBidderRequestNoVideo(); sandbox.stub(Date, 'now').callsFake(() => bidderRequest.auctionStart + 100 ); const bidRequestCopy = clone(bidderRequest.bids[0]); expect(spec.isBidRequestValid(bidRequestCopy)).to.equal(false); bidRequestCopy.params.video = {}; expect(spec.isBidRequestValid(bidRequestCopy)).to.equal(false); bidRequestCopy.params.video = undefined; expect(spec.isBidRequestValid(bidRequestCopy)).to.equal(false); bidRequestCopy.params.video = 123; expect(spec.isBidRequestValid(bidRequestCopy)).to.equal(false); bidRequestCopy.params.video = {size_id: undefined}; expect(spec.isBidRequestValid(bidRequestCopy)).to.equal(false); delete bidRequestCopy.params.video; expect(spec.isBidRequestValid(bidRequestCopy)).to.equal(false); }); it('should not validate bid request when an invalid video object is passed in with legacy config mediaType', () => { createLegacyVideoBidderRequestNoVideo(); sandbox.stub(Date, 'now').callsFake(() => bidderRequest.auctionStart + 100 ); const bidderRequestCopy = clone(bidderRequest); expect(spec.isBidRequestValid(bidderRequestCopy.bids[0])).to.equal(false); bidderRequestCopy.bids[0].params.video = {}; expect(spec.isBidRequestValid(bidderRequestCopy.bids[0])).to.equal(false); bidderRequestCopy.bids[0].params.video = undefined; expect(spec.isBidRequestValid(bidderRequestCopy.bids[0])).to.equal(false); bidderRequestCopy.bids[0].params.video = NaN; expect(spec.isBidRequestValid(bidderRequestCopy.bids[0])).to.equal(false); delete bidderRequestCopy.bids[0].params.video; expect(spec.isBidRequestValid(bidderRequestCopy.bids[0])).to.equal(false); }); it('bid request is valid when video context is outstream', () => { createVideoBidderRequestOutstream(); sandbox.stub(Date, 'now').callsFake(() => bidderRequest.auctionStart + 100 ); const bidRequestCopy = clone(bidderRequest); let [request] = spec.buildRequests(bidRequestCopy.bids, bidRequestCopy); expect(spec.isBidRequestValid(bidderRequest.bids[0])).to.equal(true); expect(request.data.slots[0].size_id).to.equal(203); }); it('should get size from bid.sizes too', () => { createVideoBidderRequestNoPlayer(); sandbox.stub(Date, 'now').callsFake(() => bidderRequest.auctionStart + 100 ); const bidRequestCopy = clone(bidderRequest); let [request] = spec.buildRequests(bidRequestCopy.bids, bidRequestCopy); expect(request.data.slots[0].width).to.equal(300); expect(request.data.slots[0].height).to.equal(250); }); it('should get size from bid.sizes too with legacy config mediaType', () => { createLegacyVideoBidderRequestNoPlayer(); sandbox.stub(Date, 'now').callsFake(() => bidderRequest.auctionStart + 100 ); const bidRequestCopy = clone(bidderRequest); let [request] = spec.buildRequests(bidRequestCopy.bids, bidRequestCopy); expect(request.data.slots[0].width).to.equal(300); expect(request.data.slots[0].height).to.equal(250); }); }); describe('combineSlotUrlParams', () => { it('should combine an array of slot url params', () => { expect(spec.combineSlotUrlParams([])).to.deep.equal({}); expect(spec.combineSlotUrlParams([{p1: 'foo', p2: 'test', p3: ''}])).to.deep.equal({p1: 'foo', p2: 'test', p3: ''}); expect(spec.combineSlotUrlParams([{}, {p1: 'foo', p2: 'test'}])).to.deep.equal({p1: ';foo', p2: ';test'}); expect(spec.combineSlotUrlParams([{}, {}, {p1: 'foo', p2: ''}, {}])).to.deep.equal({p1: ';;foo;', p2: ''}); expect(spec.combineSlotUrlParams([{}, {p1: 'foo'}, {p1: ''}])).to.deep.equal({p1: ';foo;'}); expect(spec.combineSlotUrlParams([ {p1: 'foo', p2: 'test'}, {p2: 'test', p3: 'bar'}, {p1: 'bar', p2: 'test', p4: 'bar'} ])).to.deep.equal({p1: 'foo;;bar', p2: 'test', p3: ';bar;', p4: ';;bar'}); expect(spec.combineSlotUrlParams([ {p1: 'foo', p2: 'test', p3: 'baz'}, {p1: 'foo', p2: 'bar'}, {p2: 'test'} ])).to.deep.equal({p1: 'foo;foo;', p2: 'test;bar;test', p3: 'baz;;'}); }); }); describe('createSlotParams', () => { it('should return a valid slot params object', () => { let expectedQuery = { 'account_id': '14062', 'site_id': '70608', 'zone_id': '335918', 'size_id': 15, 'alt_size_ids': '43', 'p_pos': 'atf', 'rp_floor': 0.01, 'rp_secure': /[01]/, 'tk_flint': INTEGRATION, 'x_source.tid': 'd45dd707-a418-42ec-b8a7-b70a6c6fab0b', 'p_screen_res': /\d+x\d+/, 'tk_user_key': '12346', 'kw': 'a,b,c', 'tg_v.ucat': 'new', 'tg_v.lastsearch': 'iphone', 'tg_v.likes': 'sports,video games', 'tg_i.rating': '5-star', 'tg_i.prodtype': 'tech,mobile', 'tg_fl.eid': 'div-1', 'rf': 'localhost' }; const slotParams = spec.createSlotParams(bidderRequest.bids[0], bidderRequest); // test that all values above are both present and correct Object.keys(expectedQuery).forEach(key => { const value = expectedQuery[key]; if (value instanceof RegExp) { expect(slotParams[key]).to.match(value); } else { expect(slotParams[key]).to.equal(value); } }); }); }); describe('hasVideoMediaType', () => { it('should return true if mediaType is video and size_id is set', () => { createVideoBidderRequest(); const legacyVideoTypeBidRequest = hasVideoMediaType(bidderRequest.bids[0]); expect(legacyVideoTypeBidRequest).is.equal(true); }); it('should return false if mediaType is video and size_id is not defined', () => { expect(spec.isBidRequestValid({ bid: 99, mediaType: 'video', params: { video: {} } })).is.equal(false); }); it('should return false if bidRequest.mediaType is not equal to video', () => { expect(hasVideoMediaType({ mediaType: 'banner' })).is.equal(false); }); it('should return false if bidRequest.mediaType is not defined', () => { expect(hasVideoMediaType({})).is.equal(false); }); it('should return true if bidRequest.mediaTypes.video.context is instream and size_id is defined', () => { expect(hasVideoMediaType({ mediaTypes: { video: { context: 'instream' } }, params: { video: { size_id: 7 } } })).is.equal(true); }); it('should return false if bidRequest.mediaTypes.video.context is instream but size_id is not defined', () => { expect(spec.isBidRequestValid({ mediaTypes: { video: { context: 'instream' } }, params: { video: {} } })).is.equal(false); }); }); }); describe('interpretResponse', () => { describe('for fastlane', () => { it('should handle a success response and sort by cpm', () => { let response = { 'status': 'ok', 'account_id': 14062, 'site_id': 70608, 'zone_id': 530022, 'size_id': 15, 'alt_size_ids': [ 43 ], 'tracking': '', 'inventory': {}, 'ads': [ { 'status': 'ok', 'impression_id': '153dc240-8229-4604-b8f5-256933b9374c', 'size_id': '15', 'ad_id': '6', 'advertiser': 7, 'network': 8, 'creative_id': 'crid-9', 'type': 'script', 'script': 'alert(\'foo\')', 'campaign_id': 10, 'cpm': 0.811, 'targeting': [ { 'key': 'rpfl_14062', 'values': [ '15_tier_all_test' ] } ] }, { 'status': 'ok', 'impression_id': '153dc240-8229-4604-b8f5-256933b9374d', 'size_id': '43', 'ad_id': '7', 'advertiser': 7, 'network': 8, 'creative_id': 'crid-9', 'type': 'script', 'script': 'alert(\'foo\')', 'campaign_id': 10, 'cpm': 0.911, 'targeting': [ { 'key': 'rpfl_14062', 'values': [ '43_tier_all_test' ] } ] } ] }; let bids = spec.interpretResponse({body: response}, { bidRequest: bidderRequest.bids[0] }); expect(bids).to.be.lengthOf(2); expect(bids[0].width).to.equal(320); expect(bids[0].height).to.equal(50); expect(bids[0].cpm).to.equal(0.911); expect(bids[0].ttl).to.equal(300); expect(bids[0].netRevenue).to.equal(false); expect(bids[0].rubicon.advertiserId).to.equal(7); expect(bids[0].rubicon.networkId).to.equal(8); expect(bids[0].creativeId).to.equal('crid-9'); expect(bids[0].currency).to.equal('USD'); expect(bids[0].ad).to.contain(`alert('foo')`) .and.to.contain(`<html>`) .and.to.contain(`<div data-rp-impression-id='153dc240-8229-4604-b8f5-256933b9374d'>`); expect(bids[0].rubiconTargeting.rpfl_elemid).to.equal('/19968336/header-bid-tag-0'); expect(bids[0].rubiconTargeting.rpfl_14062).to.equal('43_tier_all_test'); expect(bids[1].width).to.equal(300); expect(bids[1].height).to.equal(250); expect(bids[1].cpm).to.equal(0.811); expect(bids[1].ttl).to.equal(300); expect(bids[1].netRevenue).to.equal(false); expect(bids[1].rubicon.advertiserId).to.equal(7); expect(bids[1].rubicon.networkId).to.equal(8); expect(bids[1].creativeId).to.equal('crid-9'); expect(bids[1].currency).to.equal('USD'); expect(bids[1].ad).to.contain(`alert('foo')`) .and.to.contain(`<html>`) .and.to.contain(`<div data-rp-impression-id='153dc240-8229-4604-b8f5-256933b9374c'>`); expect(bids[1].rubiconTargeting.rpfl_elemid).to.equal('/19968336/header-bid-tag-0'); expect(bids[1].rubiconTargeting.rpfl_14062).to.equal('15_tier_all_test'); }); it('should be fine with a CPM of 0', () => { let response = { 'status': 'ok', 'account_id': 14062, 'site_id': 70608, 'zone_id': 530022, 'size_id': 15, 'alt_size_ids': [ 43 ], 'tracking': '', 'inventory': {}, 'ads': [{ 'status': 'ok', 'cpm': 0, 'size_id': 15 }] }; let bids = spec.interpretResponse({body: response}, { bidRequest: bidderRequest.bids[0] }); expect(bids).to.be.lengthOf(1); expect(bids[0].cpm).to.be.equal(0); }); it('should handle an error with no ads returned', () => { let response = { 'status': 'ok', 'account_id': 14062, 'site_id': 70608, 'zone_id': 530022, 'size_id': 15, 'alt_size_ids': [ 43 ], 'tracking': '', 'inventory': {}, 'ads': [] }; let bids = spec.interpretResponse({body: response}, { bidRequest: bidderRequest.bids[0] }); expect(bids).to.be.lengthOf(0); }); it('should handle an error', () => { let response = { 'status': 'ok', 'account_id': 14062, 'site_id': 70608, 'zone_id': 530022, 'size_id': 15, 'alt_size_ids': [ 43 ], 'tracking': '', 'inventory': {}, 'ads': [{ 'status': 'not_ok', }] }; let bids = spec.interpretResponse({body: response}, { bidRequest: bidderRequest.bids[0] }); expect(bids).to.be.lengthOf(0); }); it('should handle an error because of malformed json response', () => { let response = '{test{'; let bids = spec.interpretResponse({body: response}, { bidRequest: bidderRequest.bids[0] }); expect(bids).to.be.lengthOf(0); }); it('should handle a bidRequest argument of type Array', () => { let response = { 'status': 'ok', 'account_id': 14062, 'site_id': 70608, 'zone_id': 530022, 'size_id': 15, 'alt_size_ids': [ 43 ], 'tracking': '', 'inventory': {}, 'ads': [{ 'status': 'ok', 'cpm': 0, 'size_id': 15 }] }; let bids = spec.interpretResponse({ body: response }, { bidRequest: [clone(bidderRequest.bids[0])] }); expect(bids).to.be.lengthOf(1); expect(bids[0].cpm).to.be.equal(0); }); describe('singleRequest enabled', () => { it('handles bidRequest of type Array and returns associated adUnits', () => { const overrideMap = []; overrideMap[0] = { impression_id: '1' }; const stubAds = []; for (let i = 0; i < 10; i++) { stubAds.push(createResponseAdByIndex(i, sizeMap[i].sizeId, overrideMap)); } const stubBids = []; for (let i = 0; i < 10; i++) { stubBids.push(createBidRequestByIndex(i, sizeMap[i].sizeAsArray.slice())); } const bids = spec.interpretResponse({ body: { 'status': 'ok', 'site_id': '1100', 'account_id': 14062, 'zone_id': 2100, 'size_id': '1', 'tracking': '', 'inventory': {}, 'ads': stubAds }}, { bidRequest: stubBids }); expect(bids).to.be.a('array').with.lengthOf(10); bids.forEach((bid) => { expect(bid).to.be.a('object'); expect(bid).to.have.property('cpm').that.is.a('number'); expect(bid).to.have.property('width').that.is.a('number'); expect(bid).to.have.property('height').that.is.a('number'); // verify that result bid 'sizeId' links to a size from the sizeMap const size = getSizeIdForBid(sizeMap, bid); expect(size).to.be.a('object'); // use 'size' to verify that result bid links to the 'response.ad' passed to function const associateAd = getResponseAdBySize(stubAds, size); expect(associateAd).to.be.a('object'); expect(associateAd).to.have.property('creative_id').that.is.a('string'); // use 'size' to verify that result bid links to the 'bidRequest' passed to function const associateBidRequest = getBidRequestBySize(stubBids, size); expect(associateBidRequest).to.be.a('object'); expect(associateBidRequest).to.have.property('bidId').that.is.a('string'); // verify all bid properties set using 'ad' and 'bidRequest' match // 'ad.creative_id === bid.creativeId' expect(bid.requestId).to.equal(associateBidRequest.bidId); // 'bid.requestId === bidRequest.bidId' expect(bid.creativeId).to.equal(associateAd.creative_id); }); }); it('handles incorrect adUnits length by returning all bids with matching ads', () => { const overrideMap = []; overrideMap[0] = { impression_id: '1' }; const stubAds = []; for (let i = 0; i < 6; i++) { stubAds.push(createResponseAdByIndex(i, sizeMap[i].sizeId, overrideMap)); } const stubBids = []; for (let i = 0; i < 10; i++) { stubBids.push(createBidRequestByIndex(i, sizeMap[i].sizeAsArray.slice())); } const bids = spec.interpretResponse({ body: { 'status': 'ok', 'site_id': '1100', 'account_id': 14062, 'zone_id': 2100, 'size_id': '1', 'tracking': '', 'inventory': {}, 'ads': stubAds }}, { bidRequest: stubBids }); // no bids expected because response didn't match requested bid number expect(bids).to.be.a('array').with.lengthOf(6); }); it('skips adUnits with error status and returns all bids with ok status', () => { const stubAds = []; // Create overrides to break associations between bids and ads // Each override should cause one less bid to be returned by interpretResponse const overrideMap = []; overrideMap[0] = { impression_id: '1' }; overrideMap[2] = { status: 'error' }; overrideMap[4] = { status: 'error' }; overrideMap[7] = { status: 'error' }; overrideMap[8] = { status: 'error' }; for (let i = 0; i < 10; i++) { stubAds.push(createResponseAdByIndex(i, sizeMap[i].sizeId, overrideMap)); } const stubBids = []; for (let i = 0; i < 10; i++) { stubBids.push(createBidRequestByIndex(i, sizeMap[i].sizeAsArray.slice())); } const bids = spec.interpretResponse({ body: { 'status': 'error', 'site_id': '1100', 'account_id': 14062, 'zone_id': 2100, 'size_id': '1', 'tracking': '', 'inventory': {}, 'ads': stubAds }}, { bidRequest: stubBids }); expect(bids).to.be.a('array').with.lengthOf(6); bids.forEach((bid) => { expect(bid).to.be.a('object'); expect(bid).to.have.property('cpm').that.is.a('number'); expect(bid).to.have.property('width').that.is.a('number'); expect(bid).to.have.property('height').that.is.a('number'); // verify that result bid 'sizeId' links to a size from the sizeMap const size = getSizeIdForBid(sizeMap, bid); expect(size).to.be.a('object'); // use 'size' to verify that result bid links to the 'response.ad' passed to function const associateAd = getResponseAdBySize(stubAds, size); expect(associateAd).to.be.a('object'); expect(associateAd).to.have.property('creative_id').that.is.a('string'); expect(associateAd).to.have.property('status').that.is.a('string'); expect(associateAd.status).to.equal('ok'); // use 'size' to verify that result bid links to the 'bidRequest' passed to function const associateBidRequest = getBidRequestBySize(stubBids, size); expect(associateBidRequest).to.be.a('object'); expect(associateBidRequest).to.have.property('bidId').that.is.a('string'); // verify all bid properties set using 'ad' and 'bidRequest' match // 'ad.creative_id === bid.creativeId' expect(bid.requestId).to.equal(associateBidRequest.bidId); // 'bid.requestId === bidRequest.bidId' expect(bid.creativeId).to.equal(associateAd.creative_id); }); }); }); }); describe('for video', () => { beforeEach(() => { createVideoBidderRequest(); }); it('should register a successful bid', () => { let response = { 'status': 'ok', 'ads': { '/19968336/header-bid-tag-0': [ { 'status': 'ok', 'cpm': 1, 'tier': 'tier0200', 'targeting': { 'rpfl_8000': '201_tier0200', 'rpfl_elemid': '/19968336/header-bid-tag-0' }, 'impression_id': 'a40fe16e-d08d-46a9-869d-2e1573599e0c', 'site_id': 88888, 'zone_id': 54321, 'creative_type': 'video', 'creative_depot_url': 'https://fastlane-adv.rubiconproject.com/v1/creative/a40fe16e-d08d-46a9-869d-2e1573599e0c.xml', 'ad_id': 999999, 'creative_id': 'crid-999999', 'size_id': 201, 'advertiser': 12345 } ] }, 'account_id': 7780 }; let bids = spec.interpretResponse({body: response}, { bidRequest: bidderRequest.bids[0] }); expect(bids).to.be.lengthOf(1); expect(bids[0].creativeId).to.equal('crid-999999'); expect(bids[0].cpm).to.equal(1); expect(bids[0].ttl).to.equal(300); expect(bids[0].netRevenue).to.equal(false); expect(bids[0].vastUrl).to.equal( 'https://fastlane-adv.rubiconproject.com/v1/creative/a40fe16e-d08d-46a9-869d-2e1573599e0c.xml' ); expect(bids[0].impression_id).to.equal('a40fe16e-d08d-46a9-869d-2e1573599e0c'); expect(bids[0].mediaType).to.equal('video'); expect(bids[0].videoCacheKey).to.equal('a40fe16e-d08d-46a9-869d-2e1573599e0c'); }); }); }); }); describe('user sync', () => { const emilyUrl = 'https://eus.rubiconproject.com/usync.html'; beforeEach(() => { resetUserSync(); }); it('should register the Emily iframe', () => { let syncs = spec.getUserSyncs({ iframeEnabled: true }); expect(syncs).to.deep.equal({type: 'iframe', url: emilyUrl}); }); it('should not register the Emily iframe more than once', () => { let syncs = spec.getUserSyncs({ iframeEnabled: true }); expect(syncs).to.deep.equal({type: 'iframe', url: emilyUrl}); // when called again, should still have only been called once syncs = spec.getUserSyncs(); expect(syncs).to.equal(undefined); }); it('should pass gdpr params if consent is true', () => { expect(spec.getUserSyncs({ iframeEnabled: true }, {}, { gdprApplies: true, consentString: 'foo' })).to.deep.equal({ type: 'iframe', url: `${emilyUrl}?gdpr=1&gdpr_consent=foo` }); }); it('should pass gdpr params if consent is false', () => { expect(spec.getUserSyncs({ iframeEnabled: true }, {}, { gdprApplies: false, consentString: 'foo' })).to.deep.equal({ type: 'iframe', url: `${emilyUrl}?gdpr=0&gdpr_consent=foo` }); }); it('should pass gdpr param gdpr_consent only when gdprApplies is undefined', () => { expect(spec.getUserSyncs({ iframeEnabled: true }, {}, { consentString: 'foo' })).to.deep.equal({ type: 'iframe', url: `${emilyUrl}?gdpr_consent=foo` }); }); it('should pass no params if gdpr consentString is not defined', () => { expect(spec.getUserSyncs({ iframeEnabled: true }, {}, {})).to.deep.equal({ type: 'iframe', url: `${emilyUrl}` }); }); it('should pass no params if gdpr consentString is a number', () => { expect(spec.getUserSyncs({ iframeEnabled: true }, {}, { consentString: 0 })).to.deep.equal({ type: 'iframe', url: `${emilyUrl}` }); }); it('should pass no params if gdpr consentString is null', () => { expect(spec.getUserSyncs({ iframeEnabled: true }, {}, { consentString: null })).to.deep.equal({ type: 'iframe', url: `${emilyUrl}` }); }); it('should pass no params if gdpr consentString is a object', () => { expect(spec.getUserSyncs({ iframeEnabled: true }, {}, { consentString: {} })).to.deep.equal({ type: 'iframe', url: `${emilyUrl}` }); }); it('should pass no params if gdpr is not defined', () => { expect(spec.getUserSyncs({ iframeEnabled: true }, {}, undefined)).to.deep.equal({ type: 'iframe', url: `${emilyUrl}` }); }); }); }); function clone(obj) { return JSON.parse(JSON.stringify(obj)); }
{ "content_hash": "cabee96269113f96bf9b6f982551ce20", "timestamp": "", "source": "github", "line_count": 1942, "max_line_length": 350, "avg_line_length": 36.12152420185376, "alnum_prop": 0.5273992130923191, "repo_name": "Somoaudience/Prebid.js", "id": "c02a4c9f86c005c73826bb4eabfac30d1e6ce0b7", "size": "70148", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/spec/modules/rubiconBidAdapter_spec.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "HTML", "bytes": "201871" }, { "name": "JavaScript", "bytes": "3011299" } ], "symlink_target": "" }
ENV['RAILS_ENV'] ||= 'test' require File.expand_path('../../config/environment', __FILE__) require 'rails/test_help' class ActiveSupport::TestCase # Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order. fixtures :all # Add more helper methods to be used by all tests here... end class ActionController::TestCase include Devise::TestHelpers end
{ "content_hash": "700585740a5d28191fd0360b2dc139ea", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 82, "avg_line_length": 27.071428571428573, "alnum_prop": 0.7335092348284961, "repo_name": "darlingtonamz/webfaq", "id": "0f6da25427346b1e70aee13e9bbaf2fc1ed68c24", "size": "379", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "test/test_helper.rb", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "6767" }, { "name": "CoffeeScript", "bytes": "1266" }, { "name": "HTML", "bytes": "23604" }, { "name": "JavaScript", "bytes": "10205" }, { "name": "Ruby", "bytes": "50976" }, { "name": "Shell", "bytes": "383" } ], "symlink_target": "" }
'use strict'; /* Validators */ var validation = angular.module('validation', []); /* * Validation rules controller * When defining new rules add name and parameters into $scope.validationRules * When a special function is required for the validation define it under {name.fun} and has to return 1 if validation successful * otherwise define fun : function(){return null;} * * */ validation.controller('validationRules', function($scope, $http, $location) { //console.log($scope.product_accident.$valid+"?"); $scope.validationRules = { "required": { msg2 : function(){ return true; }, "regex": null, "msg": "You have to enter at least something", fun: function(param, $scope) { if (param !== "" && param !== undefined) { return true; } } }, "email": { msg2 : function(){ return true; }, "regex": /^(([^<>()[\]\\.,;:\s@\"]+(\.[^<>()[\]\\.,;:\s@\"]+)*)|(\".+\"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/, "msg": "You have to enter the whole email.", }, "fullname": { msg2 : function(){ return true; }, "regex": /^\s*\S+\s+\S+(\s*\S)*\s*$/, "msg": "You have to enter the whole name.", } }; }); //connected on blur event directive validation.directive('validate', function($location) { return { restrict: 'AC', require: 'ngModel', link: function(scope, element, attrs, ngModel) { ngModel.$error = null; var curr_form = $location.path().replace("/", ""); ngModel.$setValidity('form', false); element.bind('blur', function() { var error_msg = new Array(); var validation_a = attrs.validate.replace(" ", "").split(","); for (var i = 0; i < validation_a.length; i++) { var rule = scope.validationRules[validation_a[i]]; if (rule !== undefined && rule.regex === null) { if (!rule.fun(ngModel.$viewValue)) { error_msg.push(rule.msg); } } else if (rule !== undefined && rule.regex !== null) { if (!rule.regex.test(ngModel.$viewValue)) { error_msg.push(rule.msg); } } else { alert("You are trying to use a nonexistent validation rule!"); } } if (error_msg.length !== 0) { scope.$apply(function() { ngModel.$error = error_msg; ngModel.$setValidity('form', false); scope.main_model.validated = scope[curr_form].$valid; }); } else { scope.$apply(function() { ngModel.$error = null; ngModel.$setValidity('form', true); scope.main_model.validated = scope[curr_form].$valid; }); } }); }, controller: function($scope, $element, $attrs) { //var curr_form = $location.path().replace("/", ""); $scope.main_model.validated = false; } }; });
{ "content_hash": "81394512ae097ecf213dba2e722d8d82", "timestamp": "", "source": "github", "line_count": 100, "max_line_length": 177, "avg_line_length": 36.63, "alnum_prop": 0.4283374283374283, "repo_name": "ma66ot/flow2.0-angular", "id": "ea645c2226290b5a7ac3cb70f276394590288f9e", "size": "3663", "binary": false, "copies": "1", "ref": "refs/heads/cutom-step", "path": "app/js/validation.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "73785" }, { "name": "JavaScript", "bytes": "82988" }, { "name": "Ruby", "bytes": "503" }, { "name": "Shell", "bytes": "5978" } ], "symlink_target": "" }
/** * Gossip-based set implementation. */ package io.atomix.protocols.gossip.set;
{ "content_hash": "5d04e5429aed882e973373cf1ac60685", "timestamp": "", "source": "github", "line_count": 6, "max_line_length": 39, "avg_line_length": 14.333333333333334, "alnum_prop": 0.7093023255813954, "repo_name": "atomix/atomix", "id": "7a3ec99f67cfb99bfb478e636d39c240e4cb56c0", "size": "699", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "protocols/gossip/src/main/java/io/atomix/protocols/gossip/set/package-info.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "6083318" }, { "name": "Shell", "bytes": "2233" } ], "symlink_target": "" }
/* Start of the app, we require everything that is needed */ define(function (require) { var activity = require("sugar-web/activity/activity"); window.PaintApp = require("activity/paint-app"); require("activity/paint-activity"); PaintApp.libs.activity = activity; /* Fetching and storing of the palettes */ PaintApp.palettes.presencePalette = require("sugar-web/graphics/presencepalette"); PaintApp.palettes.colorPalette = require("activity/palettes/color-palette"); PaintApp.palettes.stampPalette = require("activity/palettes/stamp-palette"); PaintApp.palettes.textPalette = require("activity/palettes/text-palette"); PaintApp.palettes.drawingsPalette = require("activity/palettes/drawings-palette"); PaintApp.palettes.filtersPalette = require("activity/palettes/filters-palette"); /* Fetching and storing of the buttons */ PaintApp.buttons.sizeButton = require("activity/buttons/size-button"); PaintApp.buttons.clearButton = require("activity/buttons/clear-button"); PaintApp.buttons.undoButton = require("activity/buttons/undo-button"); PaintApp.buttons.redoButton = require("activity/buttons/redo-button"); /* Fetching and storing of the modes */ PaintApp.modes.Pen = require("activity/modes/modes-pen"); PaintApp.modes.Eraser = require("activity/modes/modes-eraser"); PaintApp.modes.Bucket = require("activity/modes/modes-bucket"); PaintApp.modes.Text = require("activity/modes/modes-text"); PaintApp.modes.Stamp = require("activity/modes/modes-stamp"); PaintApp.modes.Copy = require("activity/modes/modes-copy"); PaintApp.modes.Paste = require("activity/modes/modes-paste"); PaintApp.collaboration = require("activity/collaboration"); require(['domReady!', 'sugar-web/datastore', 'paper-core', 'mustache', 'lzstring', 'humane'], function (doc, datastore, _paper, mustache, lzstring, humane) { /* Fetching and storing libraries */ PaintApp.libs.mustache = mustache; PaintApp.libs.humane = humane; PaintApp.libs.lzstring = lzstring; //Setup of the activity activity.setup(); /* Fetch and store UI elements */ initGui(); document.getElementById("stop-button").addEventListener('click', function (event) { var data = { width: PaintApp.elements.canvas.width / window.devicePixelRatio, height: PaintApp.elements.canvas.height / window.devicePixelRatio, src: PaintApp.collaboration.compress(PaintApp.elements.canvas.toDataURL()) } var jsonData = JSON.stringify(data); activity.getDatastoreObject().setDataAsText(jsonData); activity.getDatastoreObject().save(function (error) { }); }); //Fetch of the history if not starting shared if (!window.top.sugar || !window.top.sugar.environment || !window.top.sugar.environment.sharedId) { activity.getDatastoreObject().loadAsText(function (error, metadata, jsonData) { if (jsonData == null) { return; } var data = JSON.parse(jsonData); PaintApp.clearCanvas(); img = new Image(); img.onload = function () { PaintApp.elements.canvas.getContext('2d').drawImage(img, 0, 0, data.width, data.height); PaintApp.saveCanvas(); }; img.src = PaintApp.collaboration.decompress(data.src); //DISPLAY }); } // If starting in shared mode, we disable undo/redo if (window.top && window.top.sugar && window.top.sugar.environment && window.top.sugar.environment.sharedId) { PaintApp.data.isHost = false; PaintApp.buttons.undoButton.hideGui(); PaintApp.buttons.redoButton.hideGui(); PaintApp.displayUndoRedoButtons(); } }); });
{ "content_hash": "7073dad4d94d9c8a13a8b05e23595e90", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 161, "avg_line_length": 44.422222222222224, "alnum_prop": 0.644072036018009, "repo_name": "mikklfr/Paint.Activity", "id": "c1a97d4975e4c7e830103e12fb230bd3e60e0341", "size": "3998", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "js/activity.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "39364" }, { "name": "HTML", "bytes": "2674" }, { "name": "JavaScript", "bytes": "578270" }, { "name": "Python", "bytes": "88" } ], "symlink_target": "" }
namespace content { class RenderFrame; } // namespace content namespace cast_receiver { // This class provides URL request rewrite rules by binding a // UrlRequestRulesReceiver mojo interface and listening for updates from // browser. The lifespan of provider and rules is tied to a RenderFrame. Owned // by CastRuntimeContentRendererClient, this object will be destroyed on // RenderFrame destruction, triggering the destruction of all of the objects it // exposes. class UrlRewriteRulesProvider final : public content::RenderFrameObserver { public: // |on_render_frame_deleted_callback| must delete |this|. UrlRewriteRulesProvider( content::RenderFrame* render_frame, base::OnceCallback<void(int)> on_render_frame_deleted_callback); ~UrlRewriteRulesProvider() override; UrlRewriteRulesProvider(const UrlRewriteRulesProvider&) = delete; UrlRewriteRulesProvider& operator=(const UrlRewriteRulesProvider&) = delete; const scoped_refptr<url_rewrite::UrlRequestRewriteRules>& GetCachedRules() const; private: // content::RenderFrameObserver implementation. void OnDestruct() override; url_rewrite::UrlRequestRulesReceiver url_request_rules_receiver_; base::OnceCallback<void(int)> on_render_frame_deleted_callback_; }; } // namespace cast_receiver #endif // COMPONENTS_CAST_RECEIVER_RENDERER_URL_REWRITE_RULES_PROVIDER_H_
{ "content_hash": "d9fb2e17b2d06b415d653d4aadde763d", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 79, "avg_line_length": 37.027027027027025, "alnum_prop": 0.7788321167883212, "repo_name": "chromium/chromium", "id": "5f713b7f2a5c71252b217fa973c4dd674cd78b56", "size": "1818", "binary": false, "copies": "5", "ref": "refs/heads/main", "path": "components/cast_receiver/renderer/url_rewrite_rules_provider.h", "mode": "33188", "license": "bsd-3-clause", "language": [], "symlink_target": "" }
JSIL.MakeEnum( "Microsoft.CSharp.RuntimeBinder.CSharpBinderFlags", true, { None: 0, CheckedContext: 1, InvokeSimpleName: 2, InvokeSpecialName: 4, BinaryOperationLogical: 8, ConvertExplicit: 16, ConvertArrayIndex: 32, ResultIndexed: 64, ValueFromCompoundAssignment: 128, ResultDiscarded: 256 }, true );
{ "content_hash": "d0504d0119ba6f2c40bf02871a1f1090", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 61, "avg_line_length": 24.714285714285715, "alnum_prop": 0.6994219653179191, "repo_name": "hach-que/JSIL", "id": "73868bac28ebef0b0263d2cad3ab7e0b2b618d3a", "size": "348", "binary": false, "copies": "4", "ref": "refs/heads/master", "path": "JSIL.Libraries/Includes/Bootstrap/Dynamic/Enums/Microsoft.CSharp.RuntimeBinder.CSharpBinderFlags.js", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "554" }, { "name": "Batchfile", "bytes": "3258" }, { "name": "C#", "bytes": "3002307" }, { "name": "C++", "bytes": "4583" }, { "name": "CSS", "bytes": "21575" }, { "name": "F#", "bytes": "2354" }, { "name": "HTML", "bytes": "60654" }, { "name": "JavaScript", "bytes": "1866922" }, { "name": "Shell", "bytes": "1780" }, { "name": "Visual Basic", "bytes": "1386" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using EPiServer; using EPiServer.Framework.Cache; using EPiServer.Logging.Compatibility; using EPiServer.ServiceLocation; using EPiServer.Shell.ObjectEditing; using Geta.VippyWrapper.Responses; namespace Geta.VippyModule.SelectionQueries { [ServiceConfiguration(typeof(ISelectionQuery))] public class VippyVideoSelectionQuery : ISelectionQuery { private readonly VippyWrapper.VippyWrapper _vippyWrapper; readonly ILog _logger = LogManager.GetLogger(typeof(VippyVideoSelectionQuery)); public VippyVideoSelectionQuery(VippyWrapper.VippyWrapper vippyWrapper) { if (vippyWrapper == null) throw new ArgumentNullException("vippyWrapper"); _vippyWrapper = vippyWrapper; } public ISelectItem GetItemByValue(string value) { var video = GetVideoById(value); if (video != null) { return new SelectItem { Text = video.Title, Value = video.VideoId }; } return null; } public Video GetVideoById(string id) { return _vippyWrapper.GetVideo(id, false).Result; } public IEnumerable<ISelectItem> GetItems(string query) { var matches = GetVideos(); if (IsNotEmpty(query)) { matches = FilterByName(query, matches); } var list = matches .Take(20) .Select(x => new SelectItem { Text = x.Title, Value = x.VideoId }) .ToList(); list.Insert(0, new SelectItem { Text = string.Empty, Value = string.Empty }); return list; } private static bool IsNotEmpty(string query) { return !string.IsNullOrEmpty(query) && !string.Equals(query, "*", StringComparison.OrdinalIgnoreCase); } private static IEnumerable<Video> FilterByName(string name, IEnumerable<Video> matches) { //Remove * at the end of name var n = name.Substring(0, name.Length - 1); return matches.Where(e => e.Title.StartsWith(n, StringComparison.OrdinalIgnoreCase)); } private IEnumerable<Video> GetVideos() { string cacheKey = "vippyvideos"; var videos = CacheManager.Get(cacheKey) as IEnumerable<Video>; if (videos == null) { videos = _vippyWrapper.GetVideos().Result; CacheManager.Insert(cacheKey, videos, new CacheEvictionPolicy(null, null, null, TimeSpan.FromMinutes(2), CacheTimeoutType.Absolute)); } return videos; } } }
{ "content_hash": "47a0898685a1ffb2bd633388eb1ac720", "timestamp": "", "source": "github", "line_count": 101, "max_line_length": 149, "avg_line_length": 29.425742574257427, "alnum_prop": 0.5585464333781965, "repo_name": "Geta/vippy", "id": "4e22d8389b5c59c807cc5f7857f4f0028f6efc83", "size": "2974", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Geta.VippyModule/SelectionQueries/VippyVideoSelectionQuery.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "95" }, { "name": "C#", "bytes": "62385" }, { "name": "JavaScript", "bytes": "1800" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using System.Runtime.InteropServices; using OpenCvSharp.Internal.Util; namespace OpenCvSharp.Internal.Vectors; /// <summary> /// </summary> public class VectorOfPoint : DisposableCvObject, IStdVector<Point> { /// <summary> /// Constructor /// </summary> public VectorOfPoint() { ptr = NativeMethods.vector_Point2i_new1(); } /// <summary> /// Constructor /// </summary> /// <param name="size"></param> public VectorOfPoint(nuint size) { if (size < 0) throw new ArgumentOutOfRangeException(nameof(size)); ptr = NativeMethods.vector_Point2i_new2(size); } /// <summary> /// Constructor /// </summary> /// <param name="data"></param> public VectorOfPoint(IEnumerable<Point> data) { if (data == null) throw new ArgumentNullException(nameof(data)); var array = data.ToArray(); ptr = NativeMethods.vector_Point2i_new3(array, (nuint)array.Length); } /// <summary> /// Releases unmanaged resources /// </summary> protected override void DisposeUnmanaged() { NativeMethods.vector_Point2i_delete(ptr); base.DisposeUnmanaged(); } /// <summary> /// vector.size() /// </summary> public int Size { get { var res = NativeMethods.vector_Point2i_getSize(ptr); GC.KeepAlive(this); return (int)res; } } /// <summary> /// &amp;vector[0] /// </summary> public IntPtr ElemPtr { get { var res = NativeMethods.vector_Point2i_getPointer(ptr); GC.KeepAlive(this); return res; } } /// <summary> /// Converts std::vector to managed array /// </summary> /// <returns></returns> public Point[] ToArray() { var size = Size; if (size == 0) { return Array.Empty<Point>(); } var dst = new Point[size]; using (var dstPtr = new ArrayAddress1<Point>(dst)) { long bytesToCopy = Marshal.SizeOf<Point>() * dst.Length; unsafe { Buffer.MemoryCopy(ElemPtr.ToPointer(), dstPtr.Pointer.ToPointer(), bytesToCopy, bytesToCopy); } } GC.KeepAlive(this); // ElemPtr is IntPtr to memory held by this object, so // make sure we are not disposed until finished with copy. return dst; } }
{ "content_hash": "1c9f61bd9f63a68ec71a5ed9ecad494b", "timestamp": "", "source": "github", "line_count": 103, "max_line_length": 109, "avg_line_length": 24.87378640776699, "alnum_prop": 0.5562060889929742, "repo_name": "shimat/opencvsharp", "id": "6c67992739e7baec689ef1a3d9350d63cd78a202", "size": "2564", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/OpenCvSharp/Internal/Vectors/VectorOfPoint.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "167162" }, { "name": "C#", "bytes": "4363841" }, { "name": "C++", "bytes": "534952" }, { "name": "CMake", "bytes": "1931" }, { "name": "Dockerfile", "bytes": "35507" }, { "name": "PowerShell", "bytes": "2285" } ], "symlink_target": "" }
PyBaseConvert ============= Convert arbitrary numbers between integer bases from 2 to 64. Installation ------------ git clone https://github.com/TwoBitAlchemist/PyBaseConvert Requires Python 2.6+ or Python 3. Usage ----- ./base_convert.py NUM [from_base [to_base [round_to]]] Convert `NUM`, which is any real number, from `from_base` to `to_base`, rounding to `round_to` digits in case the result is non-terminating. If unspecified, both `from_base` and `to_base` are assumed to be 10 and results are rounded (`round_to`) to 5 places following the radix point. For convenient use in modules, the convert function can be imported and used the same way: from PyBaseConvert import base_convert or from PyBaseConvert.base_convert import convert Note that PyBaseConvert should be replaced with whatever directory you cloned into in case you changed the name. Once imported, the argument order is the same as given on the command line. In case the call is malformed, ValueError is raised. On successful conversions, a string is returned. Examples -------- >>> base_convert.convert(10, 2) '2' >>> base_convert.convert('a', 16) '10' >>> base_convert.convert('a', 16, 2) '1010' >>> base_convert.convert(0.1, from_base=2) '.5' >>> base_convert.convert(0.1, to_base=2) '.00011' >>> base_convert.convert(0.1, to_base=2, round_to=9) '.000110011' >>> base_convert.convert(-8, 10, 5) '-13' Warning About Notation ---------------------- This program uses the following notation for digits in every base (up to the amount of digits that base needs): * `0-9` for the digits 0 through 9. * `a-z` for the digits 10 through 35. * `A-Z` for the digits 36 through 61. * `+` and `/` for the digits 62 and 63. This may lead to unexpected results for users expecting conformity with accepted notations for certain commonly used bases. Specifically, this means that bases like hexadecimal are case sensitive and require lowercase letters (and that the program will complain that, e.g., `A` is not a valid hex digit). Likewise, there are several different implementations of base 64 with varying rules and inconsistencies with one another. If you need more standard behavior with respect to commonly used bases, please do not use this program in a production environment.
{ "content_hash": "f591964fcacfcd56363456804749e3ea", "timestamp": "", "source": "github", "line_count": 76, "max_line_length": 78, "avg_line_length": 30.842105263157894, "alnum_prop": 0.7090443686006825, "repo_name": "TwoBitAlchemist/PyBaseConvert", "id": "3e5c761868e9582953de8379e7d7de486fced6fe", "size": "2344", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "2899" } ], "symlink_target": "" }
"""Helpers to build or extract data from feaLib AST objects.""" from __future__ import print_function, division, absolute_import, unicode_literals from fontTools.feaLib import ast from fontTools import unicodedata import collections import re # we re-export here all the feaLib AST classes so they can be used from # writer modules with a single `from ufo2ft.featureWriters import ast` import sys self = sys.modules[__name__] for name in getattr(ast, "__all__", dir(ast)): if isinstance(getattr(ast, name), type): setattr(self, name, getattr(ast, name)) del sys, self, name def getScriptLanguageSystems(feaFile): """Return dictionary keyed by Unicode script code containing lists of (OT_SCRIPT_TAG, [OT_LANGUAGE_TAG, ...]) tuples (excluding "DFLT"). """ languagesByScript = collections.OrderedDict() for ls in [ st for st in feaFile.statements if isinstance(st, ast.LanguageSystemStatement) ]: if ls.script == "DFLT": continue languagesByScript.setdefault(ls.script, []).append(ls.language) langSysMap = collections.OrderedDict() for script, languages in languagesByScript.items(): sc = unicodedata.ot_tag_to_script(script) langSysMap.setdefault(sc, []).append((script, languages)) return langSysMap def iterFeatureBlocks(feaFile, tag=None): for statement in feaFile.statements: if isinstance(statement, ast.FeatureBlock): if tag is not None and statement.name != tag: continue yield statement def findFeatureTags(feaFile): return {f.name for f in iterFeatureBlocks(feaFile)} def iterClassDefinitions(feaFile, featureTag=None): if featureTag is None: # start from top-level class definitions for s in feaFile.statements: if isinstance(s, ast.GlyphClassDefinition): yield s # then iterate over per-feature class definitions for fea in iterFeatureBlocks(feaFile, tag=featureTag): for s in fea.statements: if isinstance(s, ast.GlyphClassDefinition): yield s LOOKUP_FLAGS = { "RightToLeft": 1, "IgnoreBaseGlyphs": 2, "IgnoreLigatures": 4, "IgnoreMarks": 8, } def makeLookupFlag(name=None, markAttachment=None, markFilteringSet=None): value = 0 if name is None else LOOKUP_FLAGS[name] if markAttachment is not None: assert isinstance(markAttachment, ast.GlyphClassDefinition) markAttachment = ast.GlyphClassName(markAttachment) if markFilteringSet is not None: assert isinstance(markFilteringSet, ast.GlyphClassDefinition) markFilteringSet = ast.GlyphClassName(markFilteringSet) return ast.LookupFlagStatement( value, markAttachment=markAttachment, markFilteringSet=markFilteringSet ) def makeGlyphClassDefinitions(groups, feaFile=None, stripPrefix=""): """ Given a groups dictionary ({str: list[str]}), create feaLib GlyphClassDefinition objects for each group. Return a dict keyed by the original group name. If `stripPrefix` (str) is provided and a group name starts with it, the string will be stripped from the beginning of the class name. """ classDefs = {} if feaFile is not None: classNames = {cdef.name for cdef in iterClassDefinitions(feaFile)} else: classNames = set() lengthPrefix = len(stripPrefix) for groupName, members in sorted(groups.items()): originalGroupName = groupName if stripPrefix and groupName.startswith(stripPrefix): groupName = groupName[lengthPrefix:] className = makeFeaClassName(groupName, classNames) classNames.add(className) classDef = makeGlyphClassDefinition(className, members) classDefs[originalGroupName] = classDef return classDefs def makeGlyphClassDefinition(className, members): glyphNames = [ast.GlyphName(g) for g in members] glyphClass = ast.GlyphClass(glyphNames) classDef = ast.GlyphClassDefinition(className, glyphClass) return classDef def makeFeaClassName(name, existingClassNames=None): """Make a glyph class name which is legal to use in feature text. Ensures the name only includes characters in "A-Za-z0-9._", and isn't already defined. """ name = re.sub(r"[^A-Za-z0-9._]", r"", name) if existingClassNames is None: return name i = 1 origName = name while name in existingClassNames: name = "%s_%d" % (origName, i) i += 1 return name def addLookupReferences( feature, lookups, script=None, languages=None, exclude_dflt=False ): """Add references to named lookups to the feature's statements. If `script` (str) and `languages` (sequence of str) are provided, only register the lookup for the given script and languages, optionally with `exclude_dflt` directive. Otherwise add a global reference which will be registered for all the scripts and languages in the feature file's `languagesystems` statements. """ assert lookups if not script: for lookup in lookups: feature.statements.append(ast.LookupReferenceStatement(lookup)) return feature.statements.append(ast.ScriptStatement(script)) if exclude_dflt: for language in languages or ("dflt",): feature.statements.append( ast.LanguageStatement(language, include_default=False) ) for lookup in lookups: feature.statements.append(ast.LookupReferenceStatement(lookup)) else: feature.statements.append(ast.LanguageStatement("dflt", include_default=True)) for lookup in lookups: feature.statements.append(ast.LookupReferenceStatement(lookup)) for language in languages or (): if language == "dflt": continue feature.statements.append( ast.LanguageStatement(language, include_default=True) ) _GDEFGlyphClasses = collections.namedtuple( "_GDEFGlyphClasses", "base ligature mark component" ) def getGDEFGlyphClasses(feaLib): """Return GDEF GlyphClassDef base/mark/ligature/component glyphs, or None if no GDEF table is defined in the feature file. """ for st in feaLib.statements: if isinstance(st, ast.TableBlock) and st.name == "GDEF": for st in st.statements: if isinstance(st, ast.GlyphClassDefStatement): return _GDEFGlyphClasses( frozenset(st.baseGlyphs.glyphSet()) if st.baseGlyphs is not None else frozenset(), frozenset(st.ligatureGlyphs.glyphSet()) if st.ligatureGlyphs is not None else frozenset(), frozenset(st.markGlyphs.glyphSet()) if st.markGlyphs is not None else frozenset(), frozenset(st.componentGlyphs.glyphSet()) if st.componentGlyphs is not None else frozenset(), ) return _GDEFGlyphClasses(None, None, None, None)
{ "content_hash": "568db5a792d371e9286d2ee0202d139f", "timestamp": "", "source": "github", "line_count": 202, "max_line_length": 86, "avg_line_length": 35.87128712871287, "alnum_prop": 0.6577422025945349, "repo_name": "jamesgk/ufo2ft", "id": "f51c6247fd773e6f00c18988e892f08119bc7ebf", "size": "7246", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Lib/ufo2ft/featureWriters/ast.py", "mode": "33188", "license": "mit", "language": [ { "name": "Python", "bytes": "77326" } ], "symlink_target": "" }
// imported .dll's using Newtonsoft.Json; namespace TwitchLibrary.Models.Clients.PubSub.Response { public class PubSubResponse { [JsonProperty("type")] public string type { get; internal set; } [JsonProperty("nonce")] public string nonce { get; internal set; } [JsonProperty("error")] public string error { get; internal set; } } }
{ "content_hash": "4dd44ec5287a688ebb6d90c241cb6fac", "timestamp": "", "source": "github", "line_count": 17, "max_line_length": 54, "avg_line_length": 23.294117647058822, "alnum_prop": 0.6237373737373737, "repo_name": "RokuHodo/Twitch-Library", "id": "4fd08a57d5a8b01cf4c42ad5b109b503e9366b67", "size": "398", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Models/Clients/PubSub/Response/PubSubResponse.cs", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C#", "bytes": "519604" } ], "symlink_target": "" }
@interface KYUser : NSObject // 用户名 @property(nonatomic,copy)NSString *userName; // 密码 @property(nonatomic,copy)NSString *password; + (instancetype)shareUser; @end
{ "content_hash": "b9df11cc46b15f60db2eed5095673627", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 44, "avg_line_length": 15.272727272727273, "alnum_prop": 0.75, "repo_name": "stonesMonkey/kuaiYi", "id": "a80f126f99ba48029d7a0a8bfb077d055c9ac4a0", "size": "333", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "KuaiYiSuper/KuaiYiSuper/Classes/Main/Login/Model/KYUser.h", "mode": "33188", "license": "mit", "language": [ { "name": "Objective-C", "bytes": "1040671" }, { "name": "Ruby", "bytes": "357" }, { "name": "Shell", "bytes": "8303" } ], "symlink_target": "" }
<?xml version="1.0" encoding="UTF-8"?> <configuration> <appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender"> <encoder> <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern> </encoder> </appender> <appender name="TEST_LOG" class="ch.qos.logback.core.FileAppender"> <file>target/test.log</file> <encoder> <pattern>%d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n</pattern> </encoder> </appender> <root level="WARN"> <appender-ref ref="TEST_LOG"/> </root> <logger name="JOB_MESSAGES" level="TRACE"> <appender-ref ref="STDOUT"/> </logger> </configuration>
{ "content_hash": "c2754d63fb772853bb6e2880f8a95be7", "timestamp": "", "source": "github", "line_count": 25, "max_line_length": 81, "avg_line_length": 26.76, "alnum_prop": 0.6158445440956651, "repo_name": "daisy/pipeline-issues", "id": "13076c43d6a53bf0367f242f3423fcb88c323427", "size": "669", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "modules/braille/liblouis-utils/src/test/resources/logback.xml", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
""" The classes in this module provide a property-like interface to widget instance variables in a class. These properties translate essential pieces of widget state into more convenient python objects (for example, the check state of a button to a bool). Example Use:: class Foo(object): bar = ButtonProperty('_button') def __init__(self): self._button = QtGui.QCheckBox() f = Foo() f.bar = True # equivalent to f._button.setChecked(True) assert f.bar == True """ from .qtutil import pretty_number class WidgetProperty(object): """ Base class for widget properties Subclasses implement, at a minimum, the "get" and "set" methods, which translate between widget states and python variables """ def __init__(self, att): """ :param att: The location, within a class instance, of the widget to wrap around. If the widget is nested inside another variable, normal '.' syntax can be used (e.g. 'sub_window.button') :type att: str""" self._att = att.split('.') def __get__(self, instance, type=None): widget = reduce(getattr, [instance] + self._att) return self.getter(widget) def __set__(self, instance, value): widget = reduce(getattr, [instance] + self._att) self.setter(widget, value) def getter(self, widget): """ Return the state of a widget. Depends on type of widget, and must be overridden""" raise NotImplementedError() def setter(self, widget, value): """ Set the state of a widget to a certain value""" raise NotImplementedError() class ButtonProperty(WidgetProperty): """Wrapper around the check state for QAbstractButton widgets""" def getter(self, widget): return widget.isChecked() def setter(self, widget, value): widget.setChecked(value) class FloatLineProperty(WidgetProperty): """Wrapper around the text state for QLineEdit widgets. Assumes that the text is a floating point number """ def getter(self, widget): try: return float(widget.text()) except ValueError: return 0 def setter(self, widget, value): widget.setText(pretty_number(value)) widget.editingFinished.emit()
{ "content_hash": "621f0d31c222fcf63b9965941649040b", "timestamp": "", "source": "github", "line_count": 79, "max_line_length": 72, "avg_line_length": 29.27848101265823, "alnum_prop": 0.6437527021184609, "repo_name": "glue-viz/glue-qt", "id": "df4c4e21e6653ecf6e5578be7048cfb1cc7b41d5", "size": "2313", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "glue/qt/widget_properties.py", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "4040" }, { "name": "Python", "bytes": "2472826" }, { "name": "Shell", "bytes": "63" } ], "symlink_target": "" }
require File.expand_path('../../test_helper', __FILE__) begin require 'oauth/request_proxy/rest_client_request' require 'rest-client' class RestlClientRequestProxyTest < Minitest::Test def test_that_proxy_simple_get_request_works request = ::RestClient::Request.new(method: :get, url: "http://example.com/test?key=value") request_proxy = OAuth::RequestProxy.proxy(request, {:uri => 'http://example.com/test?key=value'}) expected_parameters = {'key' => ['value']} assert_equal expected_parameters, request_proxy.parameters_for_signature assert_equal 'http://example.com/test', request_proxy.normalized_uri assert_equal 'GET', request_proxy.method end def test_that_proxy_simple_post_request_works_with_arguments request = ::RestClient::Request.new(method: :post, url: "http://example.com/test") params = {'key' => 'value'} request_proxy = OAuth::RequestProxy.proxy(request, {:uri => 'http://example.com/test', :parameters => params}) expected_parameters = {'key' => 'value'} assert_equal expected_parameters, request_proxy.parameters_for_signature assert_equal 'http://example.com/test', request_proxy.normalized_uri assert_equal 'POST', request_proxy.method end def test_that_proxy_simple_post_request_works_with_form_data request = ::RestClient::Request.new(method: :post, url: "http://example.com/test", payload: {'key' => 'value'}, headers: {'Content-Type' => 'application/x-www-form-urlencoded'}) request_proxy = OAuth::RequestProxy.proxy(request, {:uri => 'http://example.com/test'}) expected_parameters = {'key' => 'value'} assert_equal expected_parameters, request_proxy.parameters_for_signature assert_equal 'http://example.com/test', request_proxy.normalized_uri assert_equal 'POST', request_proxy.method end def test_that_proxy_simple_put_request_works_with_arguments request = ::RestClient::Request.new(method: :put, url: "http://example.com/test") params = {'key' => 'value'} request_proxy = OAuth::RequestProxy.proxy(request, {:uri => 'http://example.com/test', :parameters => params}) expected_parameters = {'key' => 'value'} assert_equal expected_parameters, request_proxy.parameters_for_signature assert_equal 'http://example.com/test', request_proxy.normalized_uri assert_equal 'PUT', request_proxy.method end def test_that_proxy_simple_put_request_works_with_form_data request = ::RestClient::Request.new(method: :put, url: "http://example.com/test", payload: {'key' => 'value'}, headers: {'Content-Type' => 'application/x-www-form-urlencoded'}) request_proxy = OAuth::RequestProxy.proxy(request, {:uri => 'http://example.com/test'}) expected_parameters = {'key' => 'value'} assert_equal expected_parameters, request_proxy.parameters_for_signature assert_equal 'http://example.com/test', request_proxy.normalized_uri assert_equal 'PUT', request_proxy.method end def test_that_proxy_post_request_works_with_mixed_parameter_sources request = ::RestClient::Request.new(url: 'http://example.com/test?key=value', method: :post, payload: {'key2' => 'value2'}, headers: {'Content-Type' => 'application/x-www-form-urlencoded'}) request_proxy = OAuth::RequestProxy.proxy(request, {:uri => 'http://example.com/test?key=value', :parameters => {'key3' => 'value3'}}) expected_parameters = {'key' => ['value'], 'key2' => 'value2', 'key3' => 'value3'} assert_equal expected_parameters, request_proxy.parameters_for_signature assert_equal 'http://example.com/test', request_proxy.normalized_uri assert_equal 'POST', request_proxy.method end end rescue LoadError => e warn "! problem loading rest-client, skipping these tests: #{e}" end
{ "content_hash": "23f6185e63fd83ee6291dae331036f4c", "timestamp": "", "source": "github", "line_count": 81, "max_line_length": 140, "avg_line_length": 48.123456790123456, "alnum_prop": 0.6703437660338635, "repo_name": "galois17/oauth-ruby", "id": "45115baafa5770d54fadb7f78a1d721925ec3647", "size": "3898", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "test/units/test_rest_client_request_proxy.rb", "mode": "33188", "license": "mit", "language": [ { "name": "Ruby", "bytes": "186143" } ], "symlink_target": "" }
template <class CK> void _test_circle_construct(CK ck) { typedef CGAL::Circle_2<CK> Circle_2; typedef CGAL::Circular_arc_2<CK> Circular_arc_2; typedef CGAL::Point_2<CK> Point_2; typedef CGAL::Line_2<CK> Line_2; typedef CGAL::Line_arc_2<CK> Line_arc_2; typedef CGAL::Circular_arc_point_2<CK> Circular_arc_point_2; typedef typename CK::RT RT; typedef typename CK::FT FT; typedef typename CK::Construct_circle_2 Construct_circle_2; typedef typename CK::Intersect_2 Intersect_2; typedef typename CK::Make_x_monotone_2 Make_x_monotone_2; typedef typename CK::Make_xy_monotone_2 Make_xy_monotone_2; typedef typename CK::Split_2 Split_2; typedef typename CK::Get_equation Get_equation; typedef typename CK::Compare_xy_2 Compare_xy_2; typedef typename CK::Do_intersect_2 Do_intersect_2; //fix warnings with g++-4.8 [-Wunused-local-typedefs] CGAL_USE_TYPE(Construct_circle_2); CGAL_USE_TYPE(Get_equation); CGAL::Random generatorOfgenerator; int random_seed = generatorOfgenerator.get_int(0, 123456); std::cout << "random_seed = " << random_seed << std::endl; CGAL::Random theRandom(random_seed); int random_max = 127; int random_min = -127; FT sqrt2 = std::sqrt(2.0)/2; //test of get_equation_object() int x_equation = theRandom.get_int(random_min,random_max); int y_equation = theRandom.get_int(random_min,random_max); int r_equation = theRandom.get_int(1,random_max); Point_2 center_circ_equation(x_equation,y_equation); Circle_2 circ_equation(center_circ_equation, r_equation); std::cout << "the circle used by the equation :" << circ_equation << std::endl; std::cout << "testing {x,y}_extremal_points" << std::endl; for(int i=0; i<20; i++) { int x1 = theRandom.get_int(random_min,random_max); int y1 = theRandom.get_int(random_min,random_max); int x2 = theRandom.get_int(random_min,random_max); int y2 = theRandom.get_int(random_min,random_max); int x3 = theRandom.get_int(random_min,random_max); int y3 = theRandom.get_int(random_min,random_max); if(x1 == x2 && y1 == y2) continue; if(x1 == x3 && y1 == y3) continue; if(x2 == x3 && y2 == y3) continue; if(CGAL::collinear(Point_2(x1,y1), Point_2(x2,y2), Point_2(x3,y3))) continue; Circular_arc_2 ca(Point_2(x1,y1), Point_2(x2,y2), Point_2(x3,y3)); Circle_2 c = ca.supporting_circle(); Circular_arc_point_2 cp_x_min = x_extremal_point(c, true); Circular_arc_point_2 cp_x_max = x_extremal_point(c, false); Circular_arc_point_2 cp_y_min = y_extremal_point(c, true); Circular_arc_point_2 cp_y_max = y_extremal_point(c, false); assert(CGAL_NTS square(cp_x_min.x() - c.center().x()) == c.squared_radius()); assert(CGAL_NTS square(cp_x_max.x() - c.center().x()) == c.squared_radius()); assert(CGAL_NTS square(cp_y_min.y() - c.center().y()) == c.squared_radius()); assert(CGAL_NTS square(cp_y_max.y() - c.center().y()) == c.squared_radius()); assert(cp_x_min.x() < cp_x_max.x()); assert(cp_y_min.y() < cp_y_max.y()); } //Constuct_intersections_2 with 2 intersection's points std::cout << std::endl << "construct_intersection_2" << std::endl; Do_intersect_2 theDo_intersect_2 = ck.do_intersect_2_object(); Intersect_2 theConstruct_intersect_2 = ck.intersect_2_object(); int center_circ_intersection_2_1_x = theRandom.get_int(random_min, random_max); int center_circ_intersection_2_1_y = theRandom.get_int(random_min, random_max); int circ_intersection_2_1_r = theRandom.get_int(1, random_max); Point_2 center_circ_intersections_2_1(center_circ_intersection_2_1_x, center_circ_intersection_2_1_y); Circle_2 circ_intersections_2_1(center_circ_intersections_2_1, circ_intersection_2_1_r * circ_intersection_2_1_r); Point_2 center_circ_intersections_2_2(center_circ_intersection_2_1_x + circ_intersection_2_1_r, center_circ_intersection_2_1_y); Circle_2 circ_intersections_2_2(center_circ_intersections_2_2, circ_intersection_2_1_r); std::vector< CGAL::Object > vector_for_intersection_1, vector_for_intersection_1l; theConstruct_intersect_2(circ_intersections_2_1, circ_intersections_2_2, std::back_inserter(vector_for_intersection_1)); intersection(circ_intersections_2_1, circ_intersections_2_2, std::back_inserter(vector_for_intersection_1l)); // there are 2 intersection's points assert(theDo_intersect_2(circ_intersections_2_1, circ_intersections_2_1)); assert(do_intersect(circ_intersections_2_1, circ_intersections_2_1)); std::pair<Circular_arc_point_2, unsigned > the_pair; assert(assign(the_pair, vector_for_intersection_1[0])); assert(assign(the_pair, vector_for_intersection_1l[0])); Circular_arc_point_2 first = the_pair.first; std::cout << first << std::endl; assert(assign(the_pair, vector_for_intersection_1[1])); assert(assign(the_pair, vector_for_intersection_1l[1])); Circular_arc_point_2 second = the_pair.first; std::cout << second << std::endl; Compare_xy_2 theCompare_xy_2 = ck.compare_xy_2_object(); assert(theCompare_xy_2(first, second) == CGAL::SMALLER); //Constuct_intersections_2 with 1 intersection's point Point_2 center_circ_intersections_2_3(center_circ_intersection_2_1_x + 2 * circ_intersection_2_1_r, center_circ_intersection_2_1_y); Circle_2 circ_intersections_2_3(center_circ_intersections_2_3, circ_intersection_2_1_r * circ_intersection_2_1_r); Circular_arc_point_2 the_intersection_point_1 = CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_3, true); Circular_arc_point_2 the_intersection_point_2 = CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_3, false); std::vector< CGAL::Object > vector_for_intersection_2; theConstruct_intersect_2(circ_intersections_2_1, circ_intersections_2_3, std::back_inserter(vector_for_intersection_2)); assert(theDo_intersect_2(circ_intersections_2_1, circ_intersections_2_3)); assert(vector_for_intersection_2.size() == 1); assign(the_pair, vector_for_intersection_2[0]); assert(the_pair.first == the_intersection_point_1); assert(the_pair.first == the_intersection_point_2); Point_2 center_circ_intersections_2_3_bis(center_circ_intersection_2_1_x + circ_intersection_2_1_r, center_circ_intersection_2_1_y); Circle_2 circ_intersections_2_3_bis(center_circ_intersections_2_3_bis, circ_intersection_2_1_r * circ_intersection_2_1_r * 4); std::vector< CGAL::Object > vector_for_intersection_2_bis; theConstruct_intersect_2(circ_intersections_2_1, circ_intersections_2_3_bis, std::back_inserter(vector_for_intersection_2_bis)); assert(theDo_intersect_2(circ_intersections_2_1, circ_intersections_2_3_bis)); assert(vector_for_intersection_2_bis.size() == 1); assign(the_pair, vector_for_intersection_2_bis[0]); assert(the_pair.second == 2u); //With circular arc Point_2 center_circ_intersections_2_1_low(center_circ_intersection_2_1_x, center_circ_intersection_2_1_y - circ_intersection_2_1_r); Circle_2 circ_intersections_2_1_low(center_circ_intersections_2_1_low, circ_intersection_2_1_r * circ_intersection_2_1_r); Line_2 line_horizontal_circ_2_1_low(center_circ_intersections_2_1_low, Point_2(center_circ_intersection_2_1_x - circ_intersection_2_1_r, center_circ_intersection_2_1_y - circ_intersection_2_1_r)); Circular_arc_2 circ_arc_2_1_low_part_high(circ_intersections_2_1_low, line_horizontal_circ_2_1_low, false, line_horizontal_circ_2_1_low, true); Line_2 line_horizontal(center_circ_intersections_2_1, Point_2(center_circ_intersection_2_1_x - circ_intersection_2_1_r, center_circ_intersection_2_1_y)); Circular_arc_2 circ_arc_2_1_part_low(circ_intersections_2_1, line_horizontal, true, line_horizontal, false); //////////////if(circ_arc_2_1_low_part_high.center() == ////////////// circ_arc_2_1_part_low.center()) { ////////////// std::cout << "OH NO!" << std::endl; //////////////} else std::cout << "OK" << std::endl; std::vector< CGAL::Object > vector_for_intersection_3; theConstruct_intersect_2(circ_arc_2_1_part_low, circ_arc_2_1_low_part_high, std::back_inserter(vector_for_intersection_3)); assert(theDo_intersect_2(circ_arc_2_1_part_low, circ_arc_2_1_low_part_high)); /////////////std::cout << "The size: " << vector_for_intersection_3.size() << std::endl; assert(vector_for_intersection_3.size() == 2); assign(the_pair, vector_for_intersection_3[0]); assert(the_pair.second == 1u); assert(the_pair.first == CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_1_low, true)); assign(the_pair, vector_for_intersection_3[1]); assert(the_pair.second == 1u); assert(the_pair.first == CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_1_low, false)); std::cout << "intersection with overlap arc_circle" << std::endl; Point_2 point_arc_overlap_left(center_circ_intersection_2_1_x - circ_intersection_2_1_r, center_circ_intersection_2_1_y); Point_2 point_arc_overlap_low_right(center_circ_intersection_2_1_x + circ_intersection_2_1_r , center_circ_intersection_2_1_y - circ_intersection_2_1_r); Point_2 point_arc_overlap_low_left(center_circ_intersection_2_1_x - circ_intersection_2_1_r , center_circ_intersection_2_1_y - circ_intersection_2_1_r); Line_2 line_arc_overlap_horizontal(center_circ_intersections_2_1, point_arc_overlap_left); Line_2 line_arc_overlap_low_right(center_circ_intersections_2_1, point_arc_overlap_low_right); Line_2 line_arc_overlap_low_left(center_circ_intersections_2_1, point_arc_overlap_low_left); //circ_arc_overlap_1 and circ_arc_overlap_2 are overlap on a circular_arc Circular_arc_2 circ_arc_overlap_1(circ_intersections_2_1, line_arc_overlap_horizontal, true, line_arc_overlap_low_right, false); Circular_arc_2 circ_arc_overlap_2(circ_intersections_2_1, line_arc_overlap_low_left, true, line_arc_overlap_horizontal, false); //circ_arc_overlap_1 and circ_arc_overlap_3 are overlap in one point Circular_arc_2 circ_arc_overlap_3(circ_intersections_2_1, line_arc_overlap_low_right, false, line_arc_overlap_horizontal, false); Circular_arc_2 circ_arc_overlap_upper_part(circ_intersections_2_1, line_arc_overlap_horizontal, false, line_arc_overlap_horizontal, true); Circular_arc_2 circ_arc_overlap_lower_part(circ_intersections_2_1, line_arc_overlap_horizontal, true, line_arc_overlap_horizontal, false); assert(circ_arc_overlap_1.is_x_monotone() && circ_arc_overlap_2.is_x_monotone() && circ_arc_overlap_3.is_x_monotone() && circ_arc_overlap_upper_part.is_x_monotone() && circ_arc_overlap_lower_part.is_x_monotone() ); std::cout << "Intersection : same circular arc" << std::endl; std::vector< CGAL::Object > vector_for_intersection_the_same_arc; theConstruct_intersect_2(circ_arc_overlap_1, circ_arc_overlap_1, std::back_inserter(vector_for_intersection_the_same_arc)); assert(theDo_intersect_2(circ_arc_overlap_1, circ_arc_overlap_1)); assert(vector_for_intersection_the_same_arc.size() == 1); Circular_arc_2 res_same; assert(assign(res_same, vector_for_intersection_the_same_arc[0])); assert(res_same.source() == circ_arc_overlap_1.source()); assert(res_same.target() == circ_arc_overlap_1.target()); std::cout << "Intersection : overlap on a circular arc" << std::endl; Circular_arc_2 circ_arc_in_overlap; Circular_arc_2 circ_arc_in_overlap_2; std::vector< CGAL::Object > vector_for_intersection_overlap_1_1; theConstruct_intersect_2(circ_arc_overlap_2, circ_arc_overlap_1, std::back_inserter(vector_for_intersection_overlap_1_1)); assert(theDo_intersect_2(circ_arc_overlap_2, circ_arc_overlap_1)); Circular_arc_2 circ_arc_overlap_result(circ_intersections_2_1, line_arc_overlap_low_left, true, line_arc_overlap_low_right, false); assert(vector_for_intersection_overlap_1_1.size() == 1); assign(circ_arc_in_overlap, vector_for_intersection_overlap_1_1[0]); assert(circ_arc_in_overlap.source() == circ_arc_overlap_result.source()); assert(circ_arc_in_overlap.target() == circ_arc_overlap_result.target()); std::vector< CGAL::Object > vector_for_intersection_overlap_1_2; theConstruct_intersect_2(circ_arc_overlap_2, circ_arc_overlap_1, std::back_inserter(vector_for_intersection_overlap_1_2)); assert(theDo_intersect_2(circ_arc_overlap_2, circ_arc_overlap_1)); assert(vector_for_intersection_overlap_1_2.size() == 1); assign(circ_arc_in_overlap, vector_for_intersection_overlap_1_2[0]); assert(circ_arc_in_overlap.source() == circ_arc_overlap_result.source()); assert(circ_arc_in_overlap.target() == circ_arc_overlap_result.target()); std::cout << "Intersection : overlap in one point" << std::endl; std::vector< CGAL::Object > vector_for_intersection_overlap_2_1; theConstruct_intersect_2(circ_arc_overlap_1, circ_arc_overlap_3, std::back_inserter(vector_for_intersection_overlap_2_1)); assert(theDo_intersect_2(circ_arc_overlap_1, circ_arc_overlap_3)); assert(vector_for_intersection_overlap_2_1.size() == 1); assign(the_pair, vector_for_intersection_overlap_2_1[0]); std::cout << "x = " << the_pair.first.x() << " the result must be = " << center_circ_intersection_2_1_x + circ_intersection_2_1_r * sqrt2 << std::endl; assert(the_pair.first.x() * (center_circ_intersection_2_1_x + circ_intersection_2_1_r * sqrt2) >= 0); assert(square(the_pair.first.x() - RT(center_circ_intersection_2_1_x)) == (circ_intersection_2_1_r * circ_intersection_2_1_r / typename CK::RT(2))); std::cout << "y = " << the_pair.first.y() << " the result must be = " << center_circ_intersection_2_1_y - circ_intersection_2_1_r * sqrt2 << std::endl; assert(the_pair.first.y() * (center_circ_intersection_2_1_y - circ_intersection_2_1_r * sqrt2) >= 0); assert(square(the_pair.first.y() - RT(center_circ_intersection_2_1_y)) == (circ_intersection_2_1_r * circ_intersection_2_1_r / typename CK::RT(2))); std::vector< CGAL::Object > vector_for_intersection_overlap_2_2; theConstruct_intersect_2(circ_arc_overlap_3, circ_arc_overlap_1, std::back_inserter(vector_for_intersection_overlap_2_2)); assert(theDo_intersect_2(circ_arc_overlap_3, circ_arc_overlap_1)); assert(vector_for_intersection_overlap_2_2.size() == 1); assign(the_pair, vector_for_intersection_overlap_2_2[0]); std::cout << "x = " << the_pair.first.x() << " the result must be = " << center_circ_intersection_2_1_x + circ_intersection_2_1_r * sqrt2 << std::endl; assert(the_pair.first.x() * (center_circ_intersection_2_1_x + circ_intersection_2_1_r * sqrt2) >= 0); assert(square(the_pair.first.x() - RT(center_circ_intersection_2_1_x)) == (circ_intersection_2_1_r * circ_intersection_2_1_r / typename CK::RT(2))); std::cout << "y = " << the_pair.first.y() << " the result must be = " << center_circ_intersection_2_1_y - circ_intersection_2_1_r * sqrt2 << std::endl; assert(the_pair.first.y() * (center_circ_intersection_2_1_y - circ_intersection_2_1_r * sqrt2) >= 0); assert(square(the_pair.first.y() - RT(center_circ_intersection_2_1_y)) == (circ_intersection_2_1_r * circ_intersection_2_1_r / typename CK::RT(2))); std::cout << "Intersection : overlap in two points: " << "lower_part_arc , upper_part_arc" << std::endl; std::vector< CGAL::Object > vector_for_intersection_overlap_3_1; theConstruct_intersect_2(circ_arc_overlap_upper_part, circ_arc_overlap_lower_part, std::back_inserter(vector_for_intersection_overlap_3_1)); assert(theDo_intersect_2(circ_arc_overlap_upper_part, circ_arc_overlap_lower_part)); assert(vector_for_intersection_overlap_3_1.size() == 2); assign(the_pair, vector_for_intersection_overlap_3_1[0]); assert(the_pair.first == circ_arc_overlap_lower_part.source()); //assert(the_pair.first.is_left()); assign(the_pair, vector_for_intersection_overlap_3_1[1]); assert(the_pair.first == circ_arc_overlap_lower_part.target()); //assert(!the_pair.first.is_left()); std::vector< CGAL::Object > vector_for_intersection_overlap_3_2; theConstruct_intersect_2(circ_arc_overlap_lower_part, circ_arc_overlap_upper_part, std::back_inserter(vector_for_intersection_overlap_3_2)); assert(theDo_intersect_2(circ_arc_overlap_lower_part, circ_arc_overlap_upper_part)); assert(vector_for_intersection_overlap_3_2.size() == 2); assign(the_pair, vector_for_intersection_overlap_3_2[0]); assert(the_pair.first == circ_arc_overlap_lower_part.source()); //assert(the_pair.first.is_left()); assign(the_pair, vector_for_intersection_overlap_3_2[1]); assert(the_pair.first == circ_arc_overlap_lower_part.target()); //assert(!the_pair.first.is_left()); //Intersection with 2 Circular_arc no x_monotone std::cout << "Intersection on two points of 2 Circular_arc no x_monotone" << std::endl; Circular_arc_2 circ_arc_no_x_monotone_1(circ_intersections_2_1, line_arc_overlap_low_right, true, line_arc_overlap_low_right, false); Point_2 center_circ_intersections_2_4(center_circ_intersection_2_1_x - circ_intersection_2_1_r, center_circ_intersection_2_1_y - circ_intersection_2_1_r); Circle_2 circ_intersections_2_4(center_circ_intersections_2_4, circ_intersection_2_1_r * circ_intersection_2_1_r); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_1_1; theConstruct_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_4, std::back_inserter(vector_for_intersection_no_x_monotone_1_1)); assert(theDo_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_4)); assert(vector_for_intersection_no_x_monotone_1_1.size() == 2); assert(assign(the_pair, vector_for_intersection_no_x_monotone_1_1[0])); assert(the_pair.first == CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_4, true)); assert(the_pair.second == 1u); //assert(the_pair.first.is_left()); assert(assign(the_pair, vector_for_intersection_no_x_monotone_1_1[1])); assert(the_pair.first == CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_4, false)); assert(the_pair.second == 1u); //assert(!the_pair.first.is_left()); std::cout << "Intersection on one points no tangent of 2 Circular_arc no x_monotone" << std::endl; Point_2 center_circ_intersections_2_5(center_circ_intersection_2_1_x - circ_intersection_2_1_r, center_circ_intersection_2_1_y ); Circle_2 circ_intersections_2_5(center_circ_intersections_2_5, circ_intersection_2_1_r * circ_intersection_2_1_r); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_1_2; theConstruct_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_5, std::back_inserter(vector_for_intersection_no_x_monotone_1_2)); assert(theDo_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_5)); assert(vector_for_intersection_no_x_monotone_1_2.size() == 1); assert(assign(the_pair, vector_for_intersection_no_x_monotone_1_2[0])); assert(the_pair.first == CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_5, true)); assert(the_pair.second == 1u); //assert(the_pair.first.is_left()); std::cout << "Intersection on one points tangent of 2 Circular_arc no x_monotone" << std::endl; Point_2 center_circ_intersections_2_6(center_circ_intersection_2_1_x , center_circ_intersection_2_1_y - 2 * circ_intersection_2_1_r ); Circle_2 circ_intersections_2_6(center_circ_intersections_2_6, circ_intersection_2_1_r * circ_intersection_2_1_r); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_1_3; theConstruct_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_6, std::back_inserter(vector_for_intersection_no_x_monotone_1_3)); assert(theDo_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_6)); assert(vector_for_intersection_no_x_monotone_1_3.size() == 1); assert(assign(the_pair, vector_for_intersection_no_x_monotone_1_3[0])); assert(the_pair.first == CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_6, true)); assert(the_pair.second == 2u); Point_2 center_circ_intersections_2_7(center_circ_intersection_2_1_x - 2 * circ_intersection_2_1_r, center_circ_intersection_2_1_y); Circle_2 circ_intersections_2_7(center_circ_intersections_2_7, circ_intersection_2_1_r * circ_intersection_2_1_r); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_1_4; theConstruct_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_7, std::back_inserter(vector_for_intersection_no_x_monotone_1_4)); assert(theDo_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_7)); assert(vector_for_intersection_no_x_monotone_1_4.size() == 1); assert(assign(the_pair, vector_for_intersection_no_x_monotone_1_4[0])); assert(the_pair.first == CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_7, true)); assert(the_pair.second == 2u); Point_2 center_circ_intersections_2_8(center_circ_intersection_2_1_x + circ_intersection_2_1_r, center_circ_intersection_2_1_y); Circle_2 circ_intersections_2_8(center_circ_intersections_2_8, circ_intersection_2_1_r * circ_intersection_2_1_r * 4); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_1_5; theConstruct_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_8, std::back_inserter(vector_for_intersection_no_x_monotone_1_5)); assert(theDo_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_8)); assert(vector_for_intersection_no_x_monotone_1_5.size() == 1); assert(assign(the_pair, vector_for_intersection_no_x_monotone_1_5[0])); assert(the_pair.first == CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_8, true)); assert(the_pair.second == 2u); Point_2 center_circ_intersections_2_9(center_circ_intersection_2_1_x, center_circ_intersection_2_1_y + circ_intersection_2_1_r); Circle_2 circ_intersections_2_9(center_circ_intersections_2_9, circ_intersection_2_1_r * circ_intersection_2_1_r * 4); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_1_6; theConstruct_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_9, std::back_inserter(vector_for_intersection_no_x_monotone_1_6)); assert(theDo_intersect_2(circ_arc_no_x_monotone_1, circ_intersections_2_9)); assert(vector_for_intersection_no_x_monotone_1_6.size() == 1); assert(assign(the_pair, vector_for_intersection_no_x_monotone_1_6[0])); assert(the_pair.first == CGAL::circle_intersect<CK>(circ_intersections_2_1, circ_intersections_2_9, true)); assert(the_pair.second == 2u); std::cout << "Intersection of 2 Circular_arc no x_monotone" << " : overlap on a circular arc in bottom " << std::endl; Circular_arc_2 circ_arc_no_x_monotone_2(circ_intersections_2_1, line_arc_overlap_low_left, true, line_arc_overlap_low_left, false); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_2_1; theConstruct_intersect_2(circ_arc_no_x_monotone_1, circ_arc_no_x_monotone_2, std::back_inserter(vector_for_intersection_no_x_monotone_2_1)); assert(theDo_intersect_2(circ_arc_no_x_monotone_1, circ_arc_no_x_monotone_2)); assert(vector_for_intersection_no_x_monotone_2_1.size() == 1); assert(assign(circ_arc_in_overlap, vector_for_intersection_no_x_monotone_2_1[0])); assert(circ_arc_in_overlap.is_x_monotone()); assert(circ_arc_in_overlap.source() == circ_arc_overlap_result.source()); assert(circ_arc_in_overlap.target() == circ_arc_overlap_result.target()); std::cout << "Intersection of 2 Circular_arc no x_monotone" << " : overlap on a circular arc at left " << std::endl; Circular_arc_2 circ_arc_no_x_monotone_3(circ_intersections_2_1, line_arc_overlap_low_left, false, line_arc_overlap_low_left, true); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_2_2; theConstruct_intersect_2(circ_arc_no_x_monotone_1, circ_arc_no_x_monotone_3, std::back_inserter(vector_for_intersection_no_x_monotone_2_2)); assert(theDo_intersect_2(circ_arc_no_x_monotone_1, circ_arc_no_x_monotone_3)); assert(vector_for_intersection_no_x_monotone_2_2.size() == 1); assert(assign(circ_arc_in_overlap, vector_for_intersection_no_x_monotone_2_2[0])); assert(circ_arc_in_overlap.source() == circ_arc_no_x_monotone_1.source()); assert(circ_arc_in_overlap.target() == circ_arc_no_x_monotone_3.target()); std::cout << "Intersection of 2 Circular_arc no x_monotone" << " : overlap on a circular arc in bottom " << "and one endpoint" << std::endl; Circular_arc_2 circ_arc_no_x_monotone_4(circ_intersections_2_1, line_arc_overlap_low_left, true, line_arc_overlap_low_right, true); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_2_3; theConstruct_intersect_2(circ_arc_no_x_monotone_1, circ_arc_no_x_monotone_4, std::back_inserter(vector_for_intersection_no_x_monotone_2_3)); assert(theDo_intersect_2(circ_arc_no_x_monotone_1, circ_arc_no_x_monotone_4)); std::cout << vector_for_intersection_no_x_monotone_2_3.size() << std::endl; std::cout << vector_for_intersection_no_x_monotone_2_3.size() << std::endl; assert(vector_for_intersection_no_x_monotone_2_3.size() == 2); assert(assign(circ_arc_in_overlap, vector_for_intersection_no_x_monotone_2_3[0])); assert(assign(the_pair, vector_for_intersection_no_x_monotone_2_3[1])); assert(circ_arc_in_overlap.is_x_monotone()); assert(circ_arc_in_overlap.source() == circ_arc_overlap_result.source()); assert(circ_arc_in_overlap.target() == circ_arc_overlap_result.target()); assert(the_pair.first == circ_arc_no_x_monotone_4.target()); assert(the_pair.second == 1u); std::cout << "Intersection of 2 Circular_arc no x_monotone" << ": overlap in two points" << std::endl; Circular_arc_2 circ_arc_no_x_monotone_5(circ_intersections_2_1, line_arc_overlap_low_right, true, line_arc_overlap_low_left, true); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_2_4; theConstruct_intersect_2(circ_arc_no_x_monotone_4, circ_arc_no_x_monotone_5, std::back_inserter(vector_for_intersection_no_x_monotone_2_4)); assert(theDo_intersect_2(circ_arc_no_x_monotone_4, circ_arc_no_x_monotone_5)); std::cout << vector_for_intersection_no_x_monotone_2_4.size() << std::endl; assert(vector_for_intersection_no_x_monotone_2_4.size() == 2); assert(assign(the_pair, vector_for_intersection_no_x_monotone_2_4[0])); assert(the_pair.first == circ_arc_no_x_monotone_5.target()); assert(the_pair.second == 1u); assert(assign(the_pair, vector_for_intersection_no_x_monotone_2_4[1])); assert(the_pair.first == circ_arc_no_x_monotone_5.source()); assert(the_pair.second == 1u); std::cout << "Intersection of 2 Circular_arc no x_monotone" << ": overlap in one points" << std::endl; Circular_arc_2 circ_arc_no_x_monotone_6(circ_intersections_2_1, line_arc_overlap_low_right, false, line_arc_overlap_low_right, true); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_2_5; theConstruct_intersect_2(circ_arc_no_x_monotone_6, circ_arc_no_x_monotone_5, std::back_inserter(vector_for_intersection_no_x_monotone_2_5)); assert(theDo_intersect_2(circ_arc_no_x_monotone_6, circ_arc_no_x_monotone_5)); std::cout << vector_for_intersection_no_x_monotone_2_5.size() << std::endl; assert(vector_for_intersection_no_x_monotone_2_5.size() == 1); assert(assign(the_pair, vector_for_intersection_no_x_monotone_2_5[0])); assert(the_pair.first == circ_arc_no_x_monotone_5.source()); assert(the_pair.second == 1u); std::cout << "Intersection of 2 Circular_arc no x_monotone" << " : overlap on 2 circular arcs " << std::endl; Circular_arc_2 circ_arc_no_x_monotone_7(circ_intersections_2_1, line_arc_overlap_low_left, false, line_arc_overlap_low_right, false); std::vector< CGAL::Object > vector_for_intersection_no_x_monotone_2_6; theConstruct_intersect_2(circ_arc_no_x_monotone_7, circ_arc_no_x_monotone_4, std::back_inserter(vector_for_intersection_no_x_monotone_2_6)); assert(theDo_intersect_2(circ_arc_no_x_monotone_7, circ_arc_no_x_monotone_4)); std::cout << vector_for_intersection_no_x_monotone_2_6.size() << std::endl; assert(vector_for_intersection_no_x_monotone_2_6.size() == 2); assign(circ_arc_in_overlap,vector_for_intersection_no_x_monotone_2_6[0]); assign(circ_arc_in_overlap_2,vector_for_intersection_no_x_monotone_2_6[1]); assert((circ_arc_in_overlap.source() == circ_arc_no_x_monotone_7.source() && circ_arc_in_overlap.target() == circ_arc_no_x_monotone_4.target()) || (circ_arc_in_overlap_2.source() == circ_arc_no_x_monotone_7.source() && circ_arc_in_overlap_2.target() == circ_arc_no_x_monotone_4.target())); std::cout << "source4 : " << std::endl << circ_arc_no_x_monotone_4.source() << std::endl << "target4 : " << std::endl << circ_arc_no_x_monotone_4.target() << std::endl << "source7 : " << std::endl << circ_arc_no_x_monotone_7.source() << std::endl << "target7 : " << std::endl << circ_arc_no_x_monotone_7.target() << std::endl; std::cout << "res source : " << std::endl << circ_arc_in_overlap.source() << std::endl << "res target : " << std::endl << circ_arc_in_overlap.target() << std::endl; assert(circ_arc_in_overlap.is_x_monotone()); std::cout << "res source : " << std::endl << circ_arc_in_overlap.source() << std::endl << "res target : " << std::endl << circ_arc_in_overlap.target() << std::endl; if(circ_arc_in_overlap.source() == circ_arc_no_x_monotone_7.source() && circ_arc_in_overlap.target() == circ_arc_no_x_monotone_4.target()) { assert(circ_arc_in_overlap_2.source() == circ_arc_no_x_monotone_4.source()); assert(circ_arc_in_overlap_2.target() == circ_arc_no_x_monotone_7.target()); } else { assert(circ_arc_in_overlap.source() == circ_arc_no_x_monotone_4.source()); assert(circ_arc_in_overlap.target() == circ_arc_no_x_monotone_7.target()); } //Make_x_monotone_2 with a full circle Make_x_monotone_2 theMake_x_monotone = ck.make_x_monotone_2_object(); int x = theRandom.get_int(random_min,random_max); int y = theRandom.get_int(random_min,random_max); int r = theRandom.get_int(1,random_max); Point_2 center_circ_monotone(x,y); Circle_2 circ_monotone(center_circ_monotone, r*r); Circular_arc_2 theCircular_arc_2_full(circ_monotone); std::vector< CGAL::Object > outputIterator1, outputIterator1l; theMake_x_monotone(theCircular_arc_2_full, std::back_inserter(outputIterator1)); make_x_monotone(theCircular_arc_2_full, std::back_inserter(outputIterator1l)); std::cout << std::endl; std::cout << "x_monotone full circle : " << circ_monotone << std::endl; Circular_arc_2 circular_arc_2_full, circular_arc_2_fulll; for(std::size_t i = 0; i < outputIterator1.size(); i++){ assign(circular_arc_2_full, outputIterator1[i]); assign(circular_arc_2_fulll, outputIterator1l[i]); std::cout << "Circular_arc_2_" << i << " : " << circular_arc_2_full << std::endl; std::cout << "Circular_arc_2_" << i << "source : " << circular_arc_2_full.source() << std::endl; std::cout << "Circular_arc_2_" << i << "target : " << circular_arc_2_full.target() << std::endl; assert(circular_arc_2_full.is_x_monotone()); assert(circular_arc_2_full == circular_arc_2_fulll); } //Make_xy_monotone_2 with a full circle Make_xy_monotone_2 theMake_xy_monotone = ck.make_xy_monotone_2_object(); outputIterator1.clear(); outputIterator1l.clear(); theMake_xy_monotone(theCircular_arc_2_full, std::back_inserter(outputIterator1)); theMake_xy_monotone(theCircular_arc_2_full, std::back_inserter(outputIterator1l)); assert(outputIterator1.size() == 4); for(std::size_t i = 0; i < outputIterator1.size(); i++){ assign(circular_arc_2_full, outputIterator1[i]); assign(circular_arc_2_fulll, outputIterator1l[i]); assert(circular_arc_2_full.is_x_monotone()); assert(circular_arc_2_full.is_y_monotone()); assert(circular_arc_2_full == circular_arc_2_fulll); } //Make_xy_monotone_2 general test Point_2 ps[8]; ps[0] = Point_2(-5, 0); ps[1] = Point_2(-3, -4); ps[2] = Point_2(0, -5); ps[3] = Point_2(3, -4); ps[4] = Point_2(5, 0); ps[5] = Point_2(3, 4); ps[6] = Point_2(0, 5); ps[7] = Point_2(-3, 4); Circle_2 tc = Circle_2(Point_2(0,0),25); unsigned isize[2][8]; isize[0][1] = 1; isize[0][2] = 1; isize[0][3] = 2; isize[0][4] = 2; isize[0][5] = 3; isize[0][6] = 3; isize[0][7] = 4; isize[1][2] = 1; isize[1][3] = 2; isize[1][4] = 2; isize[1][5] = 3; isize[1][6] = 3; isize[1][7] = 4; isize[1][0] = 4; for(int i=0; i<2; i++) { for(int j=i+1; j!=i; j = (j+1)%8) { Circular_arc_2 ca; ca = Circular_arc_2(tc, ps[i], ps[j]); outputIterator1.clear(); theMake_xy_monotone(ca, std::back_inserter(outputIterator1)); std::cout << "T: " << i << " " << j << std::endl; assert(outputIterator1.size() == isize[i][j]); for(std::size_t k = 0; k < outputIterator1.size(); k++) { assign(circular_arc_2_full, outputIterator1[k]); assert(circular_arc_2_full.is_x_monotone()); assert(circular_arc_2_full.is_y_monotone()); } } } //Make_x_monotone_2 with a three quarter of last circle Point_2 pointLine_2_1(x,y+r); Line_2 theLine_2_1(center_circ_monotone, pointLine_2_1); Point_2 pointLine_2_2(x+r,y); Line_2 theLine_2_2(center_circ_monotone, pointLine_2_2); Circular_arc_2 theCircular_arc_2_quarter(circ_monotone, theLine_2_1, true, theLine_2_2, true); std::vector< CGAL::Object > vector_of_object_1; theMake_x_monotone(theCircular_arc_2_quarter, std::back_inserter(vector_of_object_1)); std::cout << std::endl; std::cout << "x_monotone a three quarter of last circle: " << circ_monotone << std::endl; std::cout << vector_of_object_1.size() << std::endl; Circular_arc_2 circular_arc_2_quarter; for(std::size_t i = 0; i < vector_of_object_1.size(); i++){ assign(circular_arc_2_quarter, vector_of_object_1[i]); std::cout << "Circular_arc_2_" << i << " : " << circular_arc_2_quarter << std::endl; assert(circular_arc_2_quarter.is_x_monotone()); } //Make_x_monotone_2 with half circle Circular_arc_2 theCircular_arc_2_half(circ_monotone, theLine_2_2, false, theLine_2_2, true); std::vector< CGAL::Object > vector_of_object_1_half; theMake_x_monotone(theCircular_arc_2_half, std::back_inserter(vector_of_object_1_half)); std::cout << std::endl; std::cout << "x_monotone a half circle" << std::endl; assert(vector_of_object_1_half.size() == 1); assign(circular_arc_2_quarter, vector_of_object_1_half[0]); assert(circular_arc_2_quarter.is_x_monotone()); assert(circular_arc_2_quarter.source() == theCircular_arc_2_half.source()); assert(circular_arc_2_quarter.target() == theCircular_arc_2_half.target()); //Make_x_monotone_2 with a random circular arc int pointLine_2_3_x = theRandom.get_int(random_min,random_max); int pointLine_2_3_y = theRandom.get_int(random_min,random_max); while((pointLine_2_3_x == x) && (pointLine_2_3_y == y)){ if(pointLine_2_3_x == x) pointLine_2_3_x = theRandom.get_int(random_min,random_max); else pointLine_2_3_y = theRandom.get_int(random_min,random_max); } Point_2 pointLine_2_3(pointLine_2_3_x, pointLine_2_3_y); Line_2 theLine_2_3(center_circ_monotone, pointLine_2_3); int pointLine_2_4_x = theRandom.get_int(random_min,random_max); int pointLine_2_4_y = theRandom.get_int(random_min,random_max); while((pointLine_2_4_x == x) && (pointLine_2_4_y == y)){ if(pointLine_2_4_x == x) pointLine_2_4_x = theRandom.get_int(random_min,random_max); else pointLine_2_4_y = theRandom.get_int(random_min,random_max); } Point_2 pointLine_2_4(pointLine_2_4_x, pointLine_2_4_y); Line_2 theLine_2_4(center_circ_monotone, pointLine_2_4); Circular_arc_2 theCircular_arc_2_random(circ_monotone, theLine_2_3, true, theLine_2_4, true); std::vector< CGAL::Object > vector_of_object_2; theMake_x_monotone(theCircular_arc_2_random, std::back_inserter(vector_of_object_2)); std::cout << std::endl; std::cout << "x_monotone random circular arc: " << circ_monotone << std::endl; Circular_arc_2 circular_arc_2_random; for(std::size_t i = 0; i < vector_of_object_2.size(); i++){ assign(circular_arc_2_random, vector_of_object_2[i]); std::cout << "Circular_arc_2_" << i << " : " << circular_arc_2_random << std::endl; assert(circular_arc_2_random.is_x_monotone()); } std::cout << "Split_2_object " << std::endl; //we make the circle1 int center1_x = theRandom.get_int(random_min, random_max); int center1_y = theRandom.get_int(random_min, random_max); Point_2 center1(center1_x,center1_y); int circ1_r = theRandom.get_int(1, random_max); Circle_2 circ1(center1, circ1_r * circ1_r); Point_2 center1_low_right(center1_x + circ1_r, center1_y - circ1_r); Circle_2 circ1_low_right(center1_low_right, circ1_r * circ1_r); Point_2 center1_low_left(center1_x - circ1_r, center1_y - circ1_r); Circle_2 circ1_low_left(center1_low_left, circ1_r * circ1_r); Point_2 point_2_left(center1_x - circ1_r, center1_y); Line_2 theLine_2_horizontal(center1, point_2_left); //The circ1_arc_high and circ1_arc_low are x_monotone Circular_arc_2 circ1_arc_low(circ1, theLine_2_horizontal,true, theLine_2_horizontal, false); //p1 is lefter and lower than p2 Circular_arc_point_2 circ1_arc_end_p1 = CGAL::circle_intersect<CK>(circ1, circ1_low_right, true); Split_2 theSplit_2 = ck.split_2_object(); Circular_arc_2 circ_arc_split_1; Circular_arc_2 circ_arc_split_2; theSplit_2(circ1_arc_low, circ1_arc_end_p1, circ_arc_split_1, circ_arc_split_2); assert(circ_arc_split_1.target() == circ1_arc_end_p1); assert(circ1_arc_low.source() == circ_arc_split_1.source()); assert(circ_arc_split_1.target() == circ_arc_split_2.source()); assert(circ1_arc_low.target() == circ_arc_split_2.target()); //We used a point created without the support circle Circular_arc_point_2 circ1_arc_end_p2 = CGAL::circle_intersect<CK>(circ1_low_left, circ1_low_right, true); theSplit_2(circ1_arc_low, circ1_arc_end_p2, circ_arc_split_1, circ_arc_split_2); assert(circ_arc_split_1.target() == circ1_arc_end_p2); assert(circ1_arc_low.source() == circ_arc_split_1.source()); assert(circ_arc_split_1.target() == circ_arc_split_2.source()); assert(circ1_arc_low.target() == circ_arc_split_2.target()); //The commented code in bottom must create an error ////We used a point which is not on the arc //Circular_arc_2 arc_aux(circ1_low_right, // Line_2(center1, center1_low_right),true, // Line_2(center1, center1_low_right),false); //theSplit_2(circ1_arc_low, arc_aux.source(), // circ_arc_split_1, circ_arc_split_2); // testing intersect_2(Line_2, Circular_arc_2) Line_2 lo1 = Line_2(Point_2(0,0), Point_2(0,10)); Circular_arc_2 cao1 = Circular_arc_2(Point_2(0,0), Point_2(-5,5), Point_2(0,10)); // = two intersection p/arc Circular_arc_2 cao2 = Circular_arc_2(Point_2(0,0), Point_2(5,5), Point_2(0,10)); Circular_arc_2 cao3 = Circular_arc_2(Point_2(-5,5), Point_2(0,0), Point_2(5,5)); // = one intersection p/ arc Circular_arc_2 cao4 = Circular_arc_2(Point_2(-5,5), Point_2(0,10), Point_2(5,5)); Circular_arc_2 cao5 = Circular_arc_2(Point_2(1,1), Point_2(-1,2), Point_2(1,4)); // = zero-two intersections Circular_arc_2 cao6 = Circular_arc_2(Point_2(1,4), Point_2(-1,2), Point_2(1,1)); Circular_arc_2 cao7 = Circular_arc_2(Point_2(10,10), Point_2(0,0), Point_2(10,-10)); // = tangency Circular_arc_2 cao8 = Circular_arc_2(Point_2(11,10), Point_2(1,0), Point_2(11,-10)); // = no intersection std::cout << "Testing intersect with lines" << std::endl; std::vector< CGAL::Object > v_ll1, v_ll2, v_ll3, v_ll4, v_ll5, v_ll6, v_ll7, v_ll8; theConstruct_intersect_2(lo1, cao1, std::back_inserter(v_ll1)); theConstruct_intersect_2(lo1, cao2, std::back_inserter(v_ll2)); theConstruct_intersect_2(lo1, cao3, std::back_inserter(v_ll3)); theConstruct_intersect_2(lo1, cao4, std::back_inserter(v_ll4)); theConstruct_intersect_2(lo1, cao5, std::back_inserter(v_ll5)); theConstruct_intersect_2(lo1, cao6, std::back_inserter(v_ll6)); theConstruct_intersect_2(lo1, cao7, std::back_inserter(v_ll7)); theConstruct_intersect_2(lo1, cao8, std::back_inserter(v_ll8)); assert(v_ll1.size() == 2); assert(theDo_intersect_2(lo1, cao1)); assert(v_ll2.size() == 2); assert(theDo_intersect_2(lo1, cao2)); assert(v_ll3.size() == 1); assert(theDo_intersect_2(lo1, cao3)); assert(v_ll4.size() == 1); assert(theDo_intersect_2(lo1, cao4)); assert(v_ll5.size() == 0); assert(!theDo_intersect_2(lo1, cao5)); assert(v_ll6.size() == 2); assert(theDo_intersect_2(lo1, cao6)); assert(v_ll7.size() == 1); assert(theDo_intersect_2(lo1, cao7)); assert(v_ll8.size() == 0); assert(!theDo_intersect_2(lo1, cao8)); Line_arc_2 llu1 = Line_arc_2(Point_2(-1,-1), Point_2(1,1)); Line_arc_2 llu2 = Line_arc_2(Point_2(-1,-1), Point_2(-1,1)); Line_arc_2 llu3 = Line_arc_2(Point_2(-2,-1), Point_2(-2,1)); Circle_2 ccu = Circle_2(Point_2(0,-1), Point_2(-1,0), Point_2(0,1)); std::vector< CGAL::Object > v_llc1, v_llc2, v_llc3; theConstruct_intersect_2(llu1, ccu, std::back_inserter(v_llc1)); theConstruct_intersect_2(llu2, ccu, std::back_inserter(v_llc2)); theConstruct_intersect_2(llu3, ccu, std::back_inserter(v_llc3)); assert(v_llc1.size() == 2); assert(theDo_intersect_2(llu1, ccu)); assert(CGAL::do_intersect(llu1, ccu)); assert(v_llc2.size() == 1); assert(theDo_intersect_2(llu2, ccu)); assert(CGAL::do_intersect(llu2, ccu)); assert(v_llc3.size() == 0); assert(!theDo_intersect_2(llu3, ccu)); assert(!CGAL::do_intersect(llu3, ccu)); std::vector< CGAL::Object > v_rllc1, v_rllc2, v_rllc3, v_rllc1l, v_rllc2l, v_rllc3l; theConstruct_intersect_2(llu1.supporting_line(), ccu, std::back_inserter(v_rllc1)); theConstruct_intersect_2(llu2.supporting_line(), ccu, std::back_inserter(v_rllc2)); theConstruct_intersect_2(llu3.supporting_line(), ccu, std::back_inserter(v_rllc3)); theConstruct_intersect_2(ccu, llu1.supporting_line(), std::back_inserter(v_rllc1l)); theConstruct_intersect_2(ccu, llu2.supporting_line(), std::back_inserter(v_rllc2l)); theConstruct_intersect_2(ccu, llu3.supporting_line(), std::back_inserter(v_rllc3l)); assert(v_rllc1.size() == 2); assert(theDo_intersect_2(llu1.supporting_line(), ccu)); assert(CGAL::do_intersect(llu1.supporting_line(), ccu)); assert(v_rllc1l.size() == 2); assert(theDo_intersect_2(ccu, llu1.supporting_line())); assert(CGAL::do_intersect(ccu, llu1.supporting_line())); assert(v_rllc2.size() == 1); assert(theDo_intersect_2(llu2.supporting_line(), ccu)); assert(CGAL::do_intersect(llu2.supporting_line(), ccu)); assert(v_rllc2l.size() == 1); assert(theDo_intersect_2(ccu, llu2.supporting_line())); assert(CGAL::do_intersect(ccu, llu2.supporting_line())); assert(v_rllc3.size() == 0); assert(!theDo_intersect_2(llu3.supporting_line(), ccu)); assert(!CGAL::do_intersect(llu3.supporting_line(), ccu)); assert(v_rllc3l.size() == 0); assert(!theDo_intersect_2(ccu, llu3.supporting_line())); assert(!CGAL::do_intersect(ccu, llu3.supporting_line())); // TEST THE FUNCTOR CALL (VC8 porting mainly reason) Circular_arc_2 ccaa = typename CK::Construct_circular_arc_2()(Point_2(1, 2), Point_2(2, 2), Point_2(3, 3)); Line_arc_2 llaa = typename CK::Construct_line_arc_2()(Point_2(1, 2), Point_2(2, 2)); Circular_arc_point_2 ccaapp = typename CK::Construct_circular_arc_point_2()(Point_2(1, 2)); typename CK::Construct_circular_min_vertex_2()(llaa); typename CK::Construct_circular_max_vertex_2()(llaa); typename CK::Construct_circular_source_vertex_2()(llaa); typename CK::Construct_circular_target_vertex_2()(llaa); #ifndef CGAL_NO_DEPRECATED_CODE // testing the deprecate stuffs typename CK::Construct_supporting_circle_2()(ccaa); typename CK::Construct_supporting_line_2()(llaa); #endif }
{ "content_hash": "2621f612ec62ff120d4ad87c8e65d18a", "timestamp": "", "source": "github", "line_count": 986, "max_line_length": 111, "avg_line_length": 46.36815415821501, "alnum_prop": 0.6618254992453904, "repo_name": "hlzz/dotfiles", "id": "863de4b674ee3e026d8e9ce5565f7c34f8ea842e", "size": "46862", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "graphics/cgal/Circular_kernel_2/test/Circular_kernel_2/include/CGAL/_test_circles_constructions.h", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "AppleScript", "bytes": "1240" }, { "name": "Arc", "bytes": "38" }, { "name": "Assembly", "bytes": "449468" }, { "name": "Batchfile", "bytes": "16152" }, { "name": "C", "bytes": "102303195" }, { "name": "C++", "bytes": "155056606" }, { "name": "CMake", "bytes": "7200627" }, { "name": "CSS", "bytes": "179330" }, { "name": "Cuda", "bytes": "30026" }, { "name": "D", "bytes": "2152" }, { "name": "Emacs Lisp", "bytes": "14892" }, { "name": "FORTRAN", "bytes": "5276" }, { "name": "Forth", "bytes": "3637" }, { "name": "GAP", "bytes": "14495" }, { "name": "GLSL", "bytes": "438205" }, { "name": "Gnuplot", "bytes": "327" }, { "name": "Groff", "bytes": "518260" }, { "name": "HLSL", "bytes": "965" }, { "name": "HTML", "bytes": "2003175" }, { "name": "Haskell", "bytes": "10370" }, { "name": "IDL", "bytes": "2466" }, { "name": "Java", "bytes": "219109" }, { "name": "JavaScript", "bytes": "1618007" }, { "name": "Lex", "bytes": "119058" }, { "name": "Lua", "bytes": "23167" }, { "name": "M", "bytes": "1080" }, { "name": "M4", "bytes": "292475" }, { "name": "Makefile", "bytes": "7112810" }, { "name": "Matlab", "bytes": "1582" }, { "name": "NSIS", "bytes": "34176" }, { "name": "Objective-C", "bytes": "65312" }, { "name": "Objective-C++", "bytes": "269995" }, { "name": "PAWN", "bytes": "4107117" }, { "name": "PHP", "bytes": "2690" }, { "name": "Pascal", "bytes": "5054" }, { "name": "Perl", "bytes": "485508" }, { "name": "Pike", "bytes": "1338" }, { "name": "Prolog", "bytes": "5284" }, { "name": "Python", "bytes": "16799659" }, { "name": "QMake", "bytes": "89858" }, { "name": "Rebol", "bytes": "291" }, { "name": "Ruby", "bytes": "21590" }, { "name": "Scilab", "bytes": "120244" }, { "name": "Shell", "bytes": "2266191" }, { "name": "Slash", "bytes": "1536" }, { "name": "Smarty", "bytes": "1368" }, { "name": "Swift", "bytes": "331" }, { "name": "Tcl", "bytes": "1911873" }, { "name": "TeX", "bytes": "11981" }, { "name": "Verilog", "bytes": "3893" }, { "name": "VimL", "bytes": "595114" }, { "name": "XSLT", "bytes": "62675" }, { "name": "Yacc", "bytes": "307000" }, { "name": "eC", "bytes": "366863" } ], "symlink_target": "" }
package tastytest object TargetNameAnnot { @annotation.targetName("doubleplus") def ++ : Unit = println("++") def foo = 23 }
{ "content_hash": "4f9c0db05a5eb39187875d39cc8455bb", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 68, "avg_line_length": 14.777777777777779, "alnum_prop": 0.6691729323308271, "repo_name": "lrytz/scala", "id": "157fd18095486b2ad75d78a70e5d2667e2ac8b84", "size": "133", "binary": false, "copies": "2", "ref": "refs/heads/2.13.x", "path": "test/tasty/neg/src-3/TargetNameAnnot.scala", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Awk", "bytes": "1380" }, { "name": "Batchfile", "bytes": "3041" }, { "name": "C", "bytes": "141" }, { "name": "CSS", "bytes": "61396" }, { "name": "HTML", "bytes": "11445" }, { "name": "Java", "bytes": "321503" }, { "name": "JavaScript", "bytes": "48777" }, { "name": "Ruby", "bytes": "142" }, { "name": "Scala", "bytes": "19517328" }, { "name": "Shell", "bytes": "27210" }, { "name": "XSLT", "bytes": "2266" } ], "symlink_target": "" }
package org.apache.beam.sdk.io; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem; import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.includesDisplayDataFor; import static org.hamcrest.MatcherAssert.assertThat; import java.io.IOException; import java.io.Serializable; import java.util.List; import javax.annotation.Nullable; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.io.UnboundedSource.CheckpointMark; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.display.DisplayData; import org.joda.time.Duration; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for {@link Read}. */ @RunWith(JUnit4.class) public class ReadTest implements Serializable { @Rule public transient ExpectedException thrown = ExpectedException.none(); @Test public void failsWhenCustomBoundedSourceIsNotSerializable() { thrown.expect(IllegalArgumentException.class); Read.from(new NotSerializableBoundedSource()); } @Test public void succeedsWhenCustomBoundedSourceIsSerializable() { Read.from(new SerializableBoundedSource()); } @Test public void failsWhenCustomUnboundedSourceIsNotSerializable() { thrown.expect(IllegalArgumentException.class); Read.from(new NotSerializableUnboundedSource()); } @Test public void succeedsWhenCustomUnboundedSourceIsSerializable() { Read.from(new SerializableUnboundedSource()); } @Test public void testDisplayData() { SerializableBoundedSource boundedSource = new SerializableBoundedSource() { @Override public void populateDisplayData(DisplayData.Builder builder) { builder.add(DisplayData.item("foo", "bar")); } }; SerializableUnboundedSource unboundedSource = new SerializableUnboundedSource() { @Override public void populateDisplayData(DisplayData.Builder builder) { builder.add(DisplayData.item("foo", "bar")); } }; Duration maxReadTime = Duration.standardMinutes(2345); Read.Bounded<String> bounded = Read.from(boundedSource); BoundedReadFromUnboundedSource<String> unbounded = Read.from(unboundedSource).withMaxNumRecords(1234).withMaxReadTime(maxReadTime); DisplayData boundedDisplayData = DisplayData.from(bounded); assertThat(boundedDisplayData, hasDisplayItem("source", boundedSource.getClass())); assertThat(boundedDisplayData, includesDisplayDataFor("source", boundedSource)); DisplayData unboundedDisplayData = DisplayData.from(unbounded); assertThat(unboundedDisplayData, hasDisplayItem("source", unboundedSource.getClass())); assertThat(unboundedDisplayData, includesDisplayDataFor("source", unboundedSource)); assertThat(unboundedDisplayData, hasDisplayItem("maxRecords", 1234)); assertThat(unboundedDisplayData, hasDisplayItem("maxReadTime", maxReadTime)); } private abstract static class CustomBoundedSource extends BoundedSource<String> { @Override public List<? extends BoundedSource<String>> split( long desiredBundleSizeBytes, PipelineOptions options) throws Exception { return null; } @Override public long getEstimatedSizeBytes(PipelineOptions options) throws Exception { return 0; } @Override public BoundedReader<String> createReader(PipelineOptions options) throws IOException { return null; } @Override public Coder<String> getOutputCoder() { return StringUtf8Coder.of(); } } private static class NotSerializableBoundedSource extends CustomBoundedSource { @SuppressWarnings("unused") private final NotSerializableClass notSerializableClass = new NotSerializableClass(); } private static class SerializableBoundedSource extends CustomBoundedSource {} private abstract static class CustomUnboundedSource extends UnboundedSource<String, NoOpCheckpointMark> { @Override public List<? extends UnboundedSource<String, NoOpCheckpointMark>> split( int desiredNumSplits, PipelineOptions options) throws Exception { return null; } @Override public UnboundedReader<String> createReader( PipelineOptions options, NoOpCheckpointMark checkpointMark) { return null; } @Override @Nullable public Coder<NoOpCheckpointMark> getCheckpointMarkCoder() { return null; } @Override public boolean requiresDeduping() { return true; } @Override public Coder<String> getOutputCoder() { return StringUtf8Coder.of(); } } private static class NoOpCheckpointMark implements CheckpointMark { @Override public void finalizeCheckpoint() throws IOException {} } private static class NotSerializableUnboundedSource extends CustomUnboundedSource { @SuppressWarnings("unused") private final NotSerializableClass notSerializableClass = new NotSerializableClass(); } private static class SerializableUnboundedSource extends CustomUnboundedSource {} private static class NotSerializableClass {} }
{ "content_hash": "0c68fe9895c3f257c2ac32693f967e84", "timestamp": "", "source": "github", "line_count": 158, "max_line_length": 96, "avg_line_length": 33.46835443037975, "alnum_prop": 0.7530257186081695, "repo_name": "rangadi/beam", "id": "77da271109653e20caa458f03d02010e642f8f0e", "size": "6093", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "sdks/java/core/src/test/java/org/apache/beam/sdk/io/ReadTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "40964" }, { "name": "Dockerfile", "bytes": "22943" }, { "name": "FreeMarker", "bytes": "7428" }, { "name": "Go", "bytes": "2283335" }, { "name": "Groovy", "bytes": "250105" }, { "name": "HTML", "bytes": "51517" }, { "name": "Java", "bytes": "23676245" }, { "name": "JavaScript", "bytes": "16472" }, { "name": "Jupyter Notebook", "bytes": "54182" }, { "name": "Python", "bytes": "4204946" }, { "name": "Ruby", "bytes": "4227" }, { "name": "Shell", "bytes": "171756" } ], "symlink_target": "" }
package org.visallo.web; import com.google.common.base.Joiner; import org.visallo.core.config.Configuration; import org.visallo.core.model.thumbnails.ThumbnailRepository; import org.visallo.core.model.properties.VisalloProperties; import org.visallo.core.model.properties.types.MetadataVisalloProperty; import org.visallo.core.model.properties.types.VisalloPropertyBase; import java.util.HashMap; import java.util.Map; public class WebConfiguration { public static final String PREFIX = Configuration.WEB_CONFIGURATION_PREFIX; public static final String THROTTLE_MESSAGING_SECONDS = PREFIX + "throttle.messaging.seconds"; public static final String CACHE_VERTEX_LRU_EXPIRATION_SECONDS = PREFIX + "cache.vertex.lru.expiration.seconds"; public static final String CACHE_VERTEX_MAX_SIZE = PREFIX + "cache.vertex.max_size"; public static final String CACHE_EDGE_LRU_EXPIRATION_SECONDS = PREFIX + "cache.edge.lru.expiration.seconds"; public static final String CACHE_EDGE_MAX_SIZE = PREFIX + "cache.edge.max_size"; public static final String VERTEX_LOAD_RELATED_MAX_BEFORE_PROMPT = PREFIX + "vertex.loadRelatedMaxBeforePrompt"; public static final String VERTEX_LOAD_RELATED_MAX_FORCE_SEARCH = PREFIX + "vertex.loadRelatedMaxForceSearch"; public static final String VERTEX_RELATIONSHIPS_MAX_PER_SECTION = PREFIX + "vertex.relationships.maxPerSection"; public static final String DETAIL_HISTORY_STACK_MAX = PREFIX + "detail.history.stack.max"; public static final String MAX_SELECTION_PARAGRAPHS_FOR_TERM_POPOVER = PREFIX + "detail.text.popover.maxSelectionParagraphs"; public static final String MAX_TEXT_LENGTH = PREFIX + "detail.text.maxTextLength"; public static final String FIELD_JUSTIFICATION_VALIDATION = PREFIX + "field.justification.validation"; public static final String SEARCH_DISABLE_WILDCARD_SEARCH = PREFIX + "search.disableWildcardSearch"; public static final String SEARCH_EXACT_MATCH = PREFIX + "search.exactMatch"; public static final String NOTIFICATIONS_LOCAL_AUTO_DISMISS_SECONDS = PREFIX + "notifications.local.autoDismissSeconds"; public static final String NOTIFICATIONS_SYSTEM_AUTO_DISMISS_SECONDS = PREFIX + "notifications.system.autoDismissSeconds"; public static final String NOTIFICATIONS_USER_AUTO_DISMISS_SECONDS = PREFIX + "notifications.user.autoDismissSeconds"; public static final String TYPEAHEAD_PROPERTIES_MAX_ITEMS = PREFIX + "typeahead.properties.maxItems"; public static final String TYPEAHEAD_CONCEPTS_MAX_ITEMS = PREFIX + "typeahead.concepts.maxItems"; public static final String TYPEAHEAD_EDGE_LABELS_MAX_ITEMS = PREFIX + "typeahead.edgeLabels.maxItems"; public static final String PROPERTIES_MULTIVALUE_DEFAULT_VISIBLE_COUNT = PREFIX + "properties.multivalue.defaultVisibleCount"; public static final String PROPERTIES_METADATA_PROPERTY_NAMES = PREFIX + "properties.metadata.propertyNames"; public static final String PROPERTIES_METADATA_PROPERTY_NAMES_DISPLAY = PREFIX + "properties.metadata.propertyNamesDisplay"; public static final String PROPERTIES_METADATA_PROPERTY_NAMES_TYPE = PREFIX + "properties.metadata.propertyNamesType"; public static final String MAP_PROVIDER = PREFIX + "map.provider"; public static final String MAP_PROVIDER_OSM_URL = PREFIX + "map.provider.osm.url"; public static final String LOGIN_SHOW_POWERED_BY = PREFIX + "login.showPoweredBy"; public static final String FORMATS_DATE_DATEDISPLAY = PREFIX + "formats.date.dateDisplay"; public static final String FORMATS_DATE_TIMEDISPLAY = PREFIX + "formats.date.timeDisplay"; public static final String FORMATS_DATE_SHOW_TIMEZONE = PREFIX + "formats.date.showTimezone"; public static final String SHOW_VERSION_COMMENTS = PREFIX + "showVersionComments"; public static final String SHOW_VISIBILITY_IN_DETAILS_PANE = PREFIX + "showVisibilityInDetailsPane"; public static final String TIMEZONE_DEFAULT_DETECT = PREFIX + "timezone.defaults.detect"; public static final String TIMEZONE_DEFAULT_TIMEZONE = PREFIX + "timezone.defaults.timezone"; public static final String DATE_DISPLAY = PREFIX + "date.default.display"; public static final PropertyMetadata PROPERTY_METADATA_SOURCE_TIMEZONE = new PropertyMetadata( "http://visallo.org#sourceTimezone", "properties.metadata.label.source_timezone", "timezone" ); public static final PropertyMetadata PROPERTY_METADATA_MODIFIED_DATE = new PropertyMetadata( VisalloProperties.MODIFIED_DATE, "properties.metadata.label.modified_date", "datetime" ); public static final PropertyMetadata PROPERTY_METADATA_MODIFIED_BY = new PropertyMetadata( VisalloProperties.MODIFIED_BY, "properties.metadata.label.modified_by", "user" ); public static final PropertyMetadata PROPERTY_METADATA_STATUS = new PropertyMetadata( "sandboxStatus", "properties.metadata.label.status", "sandboxStatus" ); public static final PropertyMetadata PROPERTY_METADATA_CONFIDENCE = new PropertyMetadata( VisalloProperties.CONFIDENCE_METADATA, "properties.metadata.label.confidence", "percent" ); public static final Map<String, String> DEFAULTS = new HashMap<>(); static { // To display exact date or relative date DEFAULTS.put(DATE_DISPLAY, "relative"); DEFAULTS.put(LOGIN_SHOW_POWERED_BY, "false"); DEFAULTS.put(SHOW_VERSION_COMMENTS, "true"); DEFAULTS.put(FORMATS_DATE_DATEDISPLAY, "YYYY-MM-DD"); DEFAULTS.put(FORMATS_DATE_TIMEDISPLAY, "HH:mm"); DEFAULTS.put(FORMATS_DATE_SHOW_TIMEZONE, "true"); DEFAULTS.put(SHOW_VISIBILITY_IN_DETAILS_PANE, "true"); DEFAULTS.put(THROTTLE_MESSAGING_SECONDS, "2"); // Local cache rules for vertices / edges (per workspace) DEFAULTS.put(CACHE_VERTEX_LRU_EXPIRATION_SECONDS, Integer.toString(10 * 60)); DEFAULTS.put(CACHE_VERTEX_MAX_SIZE, "500"); DEFAULTS.put(CACHE_EDGE_LRU_EXPIRATION_SECONDS, Integer.toString(10 * 60)); DEFAULTS.put(CACHE_EDGE_MAX_SIZE, "250"); // Load related vertices thresholds DEFAULTS.put(VERTEX_LOAD_RELATED_MAX_BEFORE_PROMPT, "50"); DEFAULTS.put(VERTEX_LOAD_RELATED_MAX_FORCE_SEARCH, "250"); DEFAULTS.put(VERTEX_RELATIONSHIPS_MAX_PER_SECTION, "5"); DEFAULTS.put(DETAIL_HISTORY_STACK_MAX, "5"); DEFAULTS.put(MAX_SELECTION_PARAGRAPHS_FOR_TERM_POPOVER, "5"); DEFAULTS.put(MAX_TEXT_LENGTH, "1500000"); DEFAULTS.put(Configuration.VIDEO_PREVIEW_FRAMES_COUNT, Integer.toString(ThumbnailRepository.DEFAULT_FRAMES_PER_PREVIEW)); // Justification field validation DEFAULTS.put(FIELD_JUSTIFICATION_VALIDATION, JustificationFieldValidation.OPTIONAL.toString()); // Search DEFAULTS.put(SEARCH_DISABLE_WILDCARD_SEARCH, "false"); DEFAULTS.put(SEARCH_EXACT_MATCH, "false"); // Notifications DEFAULTS.put(NOTIFICATIONS_LOCAL_AUTO_DISMISS_SECONDS, "2"); DEFAULTS.put(NOTIFICATIONS_SYSTEM_AUTO_DISMISS_SECONDS, "-1"); DEFAULTS.put(NOTIFICATIONS_USER_AUTO_DISMISS_SECONDS, "5"); DEFAULTS.put(TYPEAHEAD_CONCEPTS_MAX_ITEMS, "-1"); DEFAULTS.put(TYPEAHEAD_PROPERTIES_MAX_ITEMS, "-1"); DEFAULTS.put(TYPEAHEAD_EDGE_LABELS_MAX_ITEMS, "-1"); // Hide multivalue properties after this count DEFAULTS.put(PROPERTIES_MULTIVALUE_DEFAULT_VISIBLE_COUNT, "2"); // Property Metadata shown in info popover DEFAULTS.put(PROPERTIES_METADATA_PROPERTY_NAMES, Joiner.on(',').join( PROPERTY_METADATA_SOURCE_TIMEZONE.getName(), PROPERTY_METADATA_MODIFIED_DATE.getName(), PROPERTY_METADATA_MODIFIED_BY.getName(), PROPERTY_METADATA_STATUS.getName(), PROPERTY_METADATA_CONFIDENCE.getName() )); DEFAULTS.put(PROPERTIES_METADATA_PROPERTY_NAMES_DISPLAY, Joiner.on(',').join( PROPERTY_METADATA_SOURCE_TIMEZONE.getMessageKey(), PROPERTY_METADATA_MODIFIED_DATE.getMessageKey(), PROPERTY_METADATA_MODIFIED_BY.getMessageKey(), PROPERTY_METADATA_STATUS.getMessageKey(), PROPERTY_METADATA_CONFIDENCE.getMessageKey() )); DEFAULTS.put(PROPERTIES_METADATA_PROPERTY_NAMES_TYPE, Joiner.on(',').join( PROPERTY_METADATA_SOURCE_TIMEZONE.getDataType(), PROPERTY_METADATA_MODIFIED_DATE.getDataType(), PROPERTY_METADATA_MODIFIED_BY.getDataType(), PROPERTY_METADATA_STATUS.getDataType(), PROPERTY_METADATA_CONFIDENCE.getDataType() )); DEFAULTS.put(MAP_PROVIDER, MapProvider.OSM.toString()); DEFAULTS.put(MAP_PROVIDER_OSM_URL, "https://{a-c}.tile.openstreetmap.org/{z}/{x}/{y}.png"); } public static class PropertyMetadata { private String name; private String messageKey; private String dataType; public PropertyMetadata(VisalloPropertyBase visalloProperty, String messageKey, String dataType) { this(visalloProperty.getPropertyName(), messageKey, dataType); } public PropertyMetadata(MetadataVisalloProperty visalloProperty, String messageKey, String dataType) { this(visalloProperty.getMetadataKey(), messageKey, dataType); } public PropertyMetadata(String name, String messageKey, String dataType) { this.name = name; this.messageKey = messageKey; this.dataType = dataType; } public String getName() { return name; } public String getMessageKey() { return messageKey; } public String getDataType() { return dataType; } } public enum MapProvider { /** * @deprecated Google is not officially supported by OpenLayers, OpenStreetMap will be used. */ @Deprecated GOOGLE("google"), OSM("osm"), ARCGIS93REST("ArcGIS93Rest"); private String string; private MapProvider(String string) { this.string = string; } @Override public String toString() { return string; } } public enum JustificationFieldValidation { REQUIRED, OPTIONAL, NONE; } public static JustificationFieldValidation getJustificationFieldValidation(Configuration configuration) { return JustificationFieldValidation.valueOf(configuration.get(FIELD_JUSTIFICATION_VALIDATION, DEFAULTS.get(FIELD_JUSTIFICATION_VALIDATION))); } public static boolean justificationRequired(Configuration configuration) { return getJustificationFieldValidation(configuration).equals(JustificationFieldValidation.REQUIRED); } }
{ "content_hash": "dc67fda320e31e46f2e1a0b718c6c7eb", "timestamp": "", "source": "github", "line_count": 224, "max_line_length": 149, "avg_line_length": 49.151785714285715, "alnum_prop": 0.6992733878292461, "repo_name": "visallo/visallo", "id": "ce58bde51f791854ed6dde39adbef5ffeb28c724", "size": "11010", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "web/web-base/src/main/java/org/visallo/web/WebConfiguration.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "205371" }, { "name": "Go", "bytes": "2988" }, { "name": "HTML", "bytes": "66006" }, { "name": "Java", "bytes": "3686598" }, { "name": "JavaScript", "bytes": "4648797" }, { "name": "Makefile", "bytes": "623" }, { "name": "Shell", "bytes": "17212" } ], "symlink_target": "" }
(function (history) { if (history && history.length > 0) { result += '\n<scr'+'ipt>'; history.forEach(function (msg) { result += '\nconsole.' + msg.level + '.apply(console, ' + JSON.stringify(msg.arguments) + ');'; }); result += '\n</scr'+'ipt>'; } })(console.history);
{ "content_hash": "23d73c81969e27161f4bac6b5080fe17", "timestamp": "", "source": "github", "line_count": 9, "max_line_length": 101, "avg_line_length": 32.888888888888886, "alnum_prop": 0.5472972972972973, "repo_name": "reacuna/react-rails", "id": "e540558060bbb2b74b4f4a28d5b6d95d0a2a0a06", "size": "296", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "lib/react/server_rendering/sprockets_renderer/console_replay.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "546" }, { "name": "CoffeeScript", "bytes": "310" }, { "name": "HTML", "bytes": "9141" }, { "name": "JavaScript", "bytes": "497597" }, { "name": "Ruby", "bytes": "68658" } ], "symlink_target": "" }
package com.dreamchain.skeleton.service; import com.dreamchain.skeleton.model.User; import javax.servlet.http.HttpServletRequest; public interface AuthenticationService { void setSessionValue(HttpServletRequest request,User user); }
{ "content_hash": "1e2e32455fae4194c50c4b4163aa6723", "timestamp": "", "source": "github", "line_count": 11, "max_line_length": 63, "avg_line_length": 22, "alnum_prop": 0.8223140495867769, "repo_name": "abdul-kader138/projectguru", "id": "9831703689165fe58615f52b2d882ca9ffbcc564", "size": "242", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/dreamchain/skeleton/service/AuthenticationService.java", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "1233" }, { "name": "HTML", "bytes": "25246" }, { "name": "Java", "bytes": "820220" }, { "name": "JavaScript", "bytes": "1001" } ], "symlink_target": "" }
import { NgModule } from '@angular/core'; import { CommonModule } from '@angular/common'; import { FormsModule, ReactiveFormsModule } from '@angular/forms'; import { ForgetComponent } from './forget.component'; import { routing } from './forget.routing'; import { NgaModule } from '../../theme/nga.module'; import { Ui } from '../ui/ui.component'; import { AppTranslationModule } from '../../app.translation.module'; // import { DefaultModal } from '../ui/components/modals/default-modal/default-modal.component'; import { NgbDropdownModule, NgbModalModule } from '@ng-bootstrap/ng-bootstrap'; import { Modals } from '../ui/components/modals/modals.component'; @NgModule({ imports: [ CommonModule, FormsModule, ReactiveFormsModule, routing, AppTranslationModule, NgaModule, NgbDropdownModule, NgbModalModule, ], declarations: [ ForgetComponent, // DefaultModal, Modals, Ui ], // entryComponents: [ // DefaultModal // ], }) export class ForgetModule {}
{ "content_hash": "718d6a2bf3d8dc7588964a3023b02983", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 97, "avg_line_length": 29.228571428571428, "alnum_prop": 0.6842619745845552, "repo_name": "bajajpayal/ng2-admin", "id": "ad182b3405676207c47c64bc370642e9daabf93f", "size": "1023", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/app/pages/forget/forget.module.ts", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "143845" }, { "name": "HTML", "bytes": "103283" }, { "name": "JavaScript", "bytes": "2183" }, { "name": "Shell", "bytes": "153" }, { "name": "TypeScript", "bytes": "292263" } ], "symlink_target": "" }
 #include <aws/ec2/model/InstanceStorageEncryptionSupport.h> #include <aws/core/utils/HashingUtils.h> #include <aws/core/Globals.h> #include <aws/core/utils/EnumParseOverflowContainer.h> using namespace Aws::Utils; namespace Aws { namespace EC2 { namespace Model { namespace InstanceStorageEncryptionSupportMapper { static const int unsupported_HASH = HashingUtils::HashString("unsupported"); static const int required_HASH = HashingUtils::HashString("required"); InstanceStorageEncryptionSupport GetInstanceStorageEncryptionSupportForName(const Aws::String& name) { int hashCode = HashingUtils::HashString(name.c_str()); if (hashCode == unsupported_HASH) { return InstanceStorageEncryptionSupport::unsupported; } else if (hashCode == required_HASH) { return InstanceStorageEncryptionSupport::required; } EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer(); if(overflowContainer) { overflowContainer->StoreOverflow(hashCode, name); return static_cast<InstanceStorageEncryptionSupport>(hashCode); } return InstanceStorageEncryptionSupport::NOT_SET; } Aws::String GetNameForInstanceStorageEncryptionSupport(InstanceStorageEncryptionSupport enumValue) { switch(enumValue) { case InstanceStorageEncryptionSupport::unsupported: return "unsupported"; case InstanceStorageEncryptionSupport::required: return "required"; default: EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer(); if(overflowContainer) { return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue)); } return {}; } } } // namespace InstanceStorageEncryptionSupportMapper } // namespace Model } // namespace EC2 } // namespace Aws
{ "content_hash": "cc2b495f3dbdfc8ad079a8be9462492f", "timestamp": "", "source": "github", "line_count": 67, "max_line_length": 108, "avg_line_length": 31.28358208955224, "alnum_prop": 0.6502862595419847, "repo_name": "aws/aws-sdk-cpp", "id": "c27755a0c07aafe5663432497eda9e259e6c4bbb", "size": "2215", "binary": false, "copies": "2", "ref": "refs/heads/main", "path": "aws-cpp-sdk-ec2/source/model/InstanceStorageEncryptionSupport.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "309797" }, { "name": "C++", "bytes": "476866144" }, { "name": "CMake", "bytes": "1245180" }, { "name": "Dockerfile", "bytes": "11688" }, { "name": "HTML", "bytes": "8056" }, { "name": "Java", "bytes": "413602" }, { "name": "Python", "bytes": "79245" }, { "name": "Shell", "bytes": "9246" } ], "symlink_target": "" }
layout: default title: How-to Guides i18n: en --- <h1>How-to Guides</h1> <p class="lead">These short how-to guides give you the simple steps to achieve a well defined goal.</p> <h2>Installation</h2> <ul> <li><a href="install_windows">How to install Mu on Windows with the official installer</a>.</li> <li><a href="install_macos">How to install Mu on Mac OSX with the official installer</a>.</li> <li><a href="use_portamu">How to use PortaMu to run Mu anywhere</a>.</li> <li><a href="install_with_python">How to install Mu with Python packaging on Windows, OSX and Linux</a>.</li> <li><a href="install_raspberry_pi">How to install Mu on a Raspberry Pi</a>.</li> </ul> <h2>Using Mu</h2> <ul> <li><a href="create_load_save">How to create, load and save files in Mu</a>.</li> <li><a href="read_logs">How to read the logs in Mu</a>.</li> <li><a href="copy_files_microbit">How to copy files on and off a micro:bit</a>.</li> <li><a href="python3_envars">How to use Environment Variables to Configure GPIOZero in Python3 Mode</a>.</li> <li><a href="microbit_settings">How to minify your MicroPython scripts and use a custom runtime on the BBC micro:bit</a>.</li> <li><a href="pgzero_sounds_images">How to add new images, fonts, sound and music to Pygame Zero</a>.</li> </ul> <h2>Problem Solving</h2> <ul> <li><a href="bugs">How to report a bug in Mu</a>.</li> <li><a href="fix_code">How to try to fix your own broken code</a>.</li> </ul>
{ "content_hash": "2af7e21c91d3bb8ad6dbb058f1f4f6a4", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 130, "avg_line_length": 40.270270270270274, "alnum_prop": 0.6671140939597315, "repo_name": "mu-editor/mu-editor.github.io", "id": "2b63fbb7af8f5fb991aa55c520279bdedf4e91c2", "size": "1494", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "en/howto/1.0/index.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "3164" }, { "name": "HTML", "bytes": "160684" }, { "name": "JavaScript", "bytes": "766" }, { "name": "Ruby", "bytes": "11847" } ], "symlink_target": "" }
[![Build Status](https://scrutinizer-ci.com/g/WildPHP/module-linksniffer/badges/build.png?b=master)](https://scrutinizer-ci.com/g/WildPHP/module-linksniffer/build-status/master) [![Scrutinizer Code Quality](https://scrutinizer-ci.com/g/WildPHP/module-linksniffer/badges/quality-score.png?b=master)](https://scrutinizer-ci.com/g/WildPHP/module-linksniffer/?branch=master) [![Latest Stable Version](https://poser.pugx.org/wildphp/module-linksniffer/v/stable)](https://packagist.org/packages/wildphp/module-linksniffer) [![Latest Unstable Version](https://poser.pugx.org/wildphp/module-linksniffer/v/unstable)](https://packagist.org/packages/wildphp/module-linksniffer) [![Total Downloads](https://poser.pugx.org/wildphp/module-linksniffer/downloads)](https://packagist.org/packages/wildphp/module-linksniffer) This module shows information about a link when it is posted in a channel. ## System Requirements This module requires the `json` php extension for various sub-modules to work. ## Installation To install this module, we will use `composer`: ```composer require wildphp/module-linksniffer``` That will install all required files for the module. In order to activate the module, add the following line to your modules array in `config.neon`: - WildPHP\Modules\LinkSniffer\LinkSniffer The bot will run the module the next time it is started. ## Configuration It is possible to blacklist this module in certain channels. For this, add the following snippet to your `config.neon`: ```neon disablelinksniffer: - '#channel1' - '#channel2' ``` ## Usage This module does not have additional usage information. ## License This module is licensed under the MIT license. Please see `LICENSE` to read it.
{ "content_hash": "0069e7c0c71142827a27ce8491cd73aa", "timestamp": "", "source": "github", "line_count": 36, "max_line_length": 192, "avg_line_length": 48.083333333333336, "alnum_prop": 0.7723859041016753, "repo_name": "WildPHP/module-linksniffer", "id": "e3868d422597b5cfa7de6d643a43f999164db438", "size": "1752", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33261", "license": "mit", "language": [ { "name": "PHP", "bytes": "17111" } ], "symlink_target": "" }
[![Build Status](https://travis-ci.org/graphnode/vpl-runner.svg?branch=master)](https://travis-ci.org/graphnode/vpl-runner) [![Dependency Status](https://david-dm.org/graphnode/vpl-runner.svg)](https://david-dm.org/graphnode/vpl-runner) [![devDependency Status](https://david-dm.org/graphnode/vpl-runner/dev-status.svg)](https://david-dm.org/graphnode/vpl-runner#info=devDependencies) [![Coverage Status](https://coveralls.io/repos/github/graphnode/vpl-runner/badge.svg?branch=master)](https://coveralls.io/github/graphnode/vpl-runner?branch=master) ___ ## What is it? VPL-Runner is a small prototype on how a code graph, which connects single responsibility processes, would run. The graph is written in JSON and the schema based on the noflo schema seen [here](https://github.com/noflo/noflo/blob/master/graph-schema.json). ## How to install and run Just run `npm install` in the project directory and then `node index graph_file` where `graph_file` is the path to graph file in json format. ## Is it any good? Not yet. :worried:
{ "content_hash": "0b882ad2bd83b50574fe9928f72d5dd5", "timestamp": "", "source": "github", "line_count": 18, "max_line_length": 164, "avg_line_length": 57.72222222222222, "alnum_prop": 0.7593840230991338, "repo_name": "graphnode/vpl-runner", "id": "7b542e91af6b53fa61e3183ee3dadb6dfc32d953", "size": "1053", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "README.md", "mode": "33188", "license": "mit", "language": [ { "name": "JavaScript", "bytes": "8993" } ], "symlink_target": "" }
// // Copyright (c) Microsoft Corporation. All rights reserved. // namespace Microsoft.Zelig.Elf { using System; public class Symbol : IElfElement, IElfElementStatusPublisher<Symbol> { internal string m_name; internal UInt16 m_index; internal Elf32_Sym m_symbolDef; internal ElfSection m_sectionRef; internal SymbolTable m_parent; internal Symbol(SymbolTable parent, string name, UInt32 value, UInt32 size, SymbolType type, SymbolBinding binding, SymbolVisibility visibility, ElfSection sectionRef) { m_parent = parent; Name = name; m_symbolDef.st_value = value; m_symbolDef.st_size = size; m_symbolDef.st_info = (byte)(((byte)binding << 4) + (byte)type); m_symbolDef.st_other = (byte)visibility; if (sectionRef != null) { m_symbolDef.st_shndx = sectionRef.Index; m_sectionRef = sectionRef; sectionRef.ElementStatusChangedEvent += SectionReferenceStatusChanged; } } internal Symbol(SymbolTable parent, Elf32_Sym def, UInt16 index) { m_parent = parent; m_symbolDef = def; m_index = index; } public string Name { get { return m_name; } set { var tbl = m_parent.StringTable; if (tbl != null) { m_symbolDef.st_name = m_parent.m_stringTable.AddString(value); m_name = value; } else { throw new ElfConsistencyException("Missing string table"); } } } public UInt16 Index { get { return m_index; } set { m_index = value; // Update subcribers about index change if (ElementStatusChangedEvent != null) { ElementStatusChangedEvent(this, ElfElementStatus.IndexChanged); } } } public Elf32_Sym SymbolDef { get { return m_symbolDef; } } public SymbolType Type { get { return (SymbolType)(m_symbolDef.st_info & 0xf); } } public SymbolBinding Binding { get { return (SymbolBinding)(m_symbolDef.st_info >> 4); } } public SymbolVisibility Visibility { get { return (SymbolVisibility)(m_symbolDef.st_other & 0x3); } } public ElfSection ReferencedSection { get { return m_sectionRef; } } // // Interface Implementation Methods // public event Action<Symbol, ElfElementStatus> ElementStatusChangedEvent; public void BuildReferences() { // Set name var tbl = m_parent.StringTable; if (tbl != null) { m_name = tbl.GetString(m_symbolDef.st_name); } else { throw new ElfConsistencyException("Missing string table"); } // Set referenced section m_sectionRef = m_parent.m_parent[m_symbolDef.st_shndx]; if (m_sectionRef != null) { m_sectionRef.ElementStatusChangedEvent += SectionReferenceStatusChanged; } } // // Event Notification Methods // private void SectionReferenceStatusChanged(ElfSection section, ElfElementStatus status) { if (status == ElfElementStatus.IndexChanged) { m_symbolDef.st_shndx = section.Index; } } } }
{ "content_hash": "b4b2a590004c881d79cc614dc760bf46", "timestamp": "", "source": "github", "line_count": 167, "max_line_length": 95, "avg_line_length": 25.808383233532933, "alnum_prop": 0.4545243619489559, "repo_name": "jelin1/llilum", "id": "5fc8235ce521e875ea2d940915d9765ec8699b77", "size": "4312", "binary": false, "copies": "6", "ref": "refs/heads/dev", "path": "Zelig/Zelig/DebugTime/Elf/ElfLib/Symbol.cs", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "93636" }, { "name": "Batchfile", "bytes": "34786" }, { "name": "C", "bytes": "20007017" }, { "name": "C#", "bytes": "24168129" }, { "name": "C++", "bytes": "1934796" }, { "name": "Makefile", "bytes": "74189" }, { "name": "Objective-C", "bytes": "403" }, { "name": "Smalltalk", "bytes": "170596" } ], "symlink_target": "" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <title>fundamental-arithmetics: Not compatible 👼</title> <link rel="shortcut icon" type="image/png" href="../../../../../favicon.png" /> <link href="../../../../../bootstrap.min.css" rel="stylesheet"> <link href="../../../../../bootstrap-custom.css" rel="stylesheet"> <link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/font-awesome.min.css" rel="stylesheet"> <script src="../../../../../moment.min.js"></script> <!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries --> <!-- WARNING: Respond.js doesn't work if you view the page via file:// --> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body> <div class="container"> <div class="navbar navbar-default" role="navigation"> <div class="container-fluid"> <div class="navbar-header"> <a class="navbar-brand" href="../../../../.."><i class="fa fa-lg fa-flag-checkered"></i> Coq bench</a> </div> <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav"> <li><a href="../..">clean / released</a></li> <li class="active"><a href="">8.11.2 / fundamental-arithmetics - 8.9.0</a></li> </ul> </div> </div> </div> <div class="article"> <div class="row"> <div class="col-md-12"> <a href="../..">« Up</a> <h1> fundamental-arithmetics <small> 8.9.0 <span class="label label-info">Not compatible 👼</span> </small> </h1> <p>📅 <em><script>document.write(moment("2022-07-16 22:01:16 +0000", "YYYY-MM-DD HH:mm:ss Z").fromNow());</script> (2022-07-16 22:01:16 UTC)</em><p> <h2>Context</h2> <pre># Packages matching: installed # Name # Installed # Synopsis base-bigarray base base-threads base base-unix base conf-findutils 1 Virtual package relying on findutils coq 8.11.2 Formal proof management system num 1.4 The legacy Num library for arbitrary-precision integer and rational arithmetic ocaml 4.07.1 The OCaml compiler (virtual package) ocaml-base-compiler 4.07.1 Official release 4.07.1 ocaml-config 1 OCaml Switch Configuration ocamlfind 1.9.5 A library manager for OCaml # opam file: opam-version: &quot;2.0&quot; maintainer: &quot;[email protected]&quot; homepage: &quot;http://perso.ens-lyon.fr/sebastien.briais/tools/Arith_080201.tar.gz&quot; license: &quot;LGPL&quot; build: [make &quot;-j%{jobs}%&quot;] install: [make &quot;install&quot;] remove: [&quot;rm&quot; &quot;-R&quot; &quot;%{lib}%/coq/user-contrib/FundamentalArithmetics&quot;] depends: [ &quot;ocaml&quot; &quot;coq&quot; {&gt;= &quot;8.9&quot; &amp; &lt; &quot;8.10~&quot;} ] tags: [ &quot;keyword: arithmetic&quot; &quot;keyword: number theory&quot; &quot;category: Mathematics/Arithmetic and Number Theory/Miscellaneous&quot; &quot;date: 2008-02-1&quot; ] authors: [ &quot;Sébastien Briais &lt;sebastien.briais at ens-lyon.fr&gt; [http://perso.ens-lyon.fr/sebastien.briais/]&quot; ] bug-reports: &quot;https://github.com/coq-contribs/fundamental-arithmetics/issues&quot; dev-repo: &quot;git+https://github.com/coq-contribs/fundamental-arithmetics.git&quot; synopsis: &quot;Fundamental theorems of arithmetic&quot; flags: light-uninstall url { src: &quot;https://github.com/coq-contribs/fundamental-arithmetics/archive/v8.9.0.tar.gz&quot; checksum: &quot;md5=cf730613573d2738cfb63d9c1b887750&quot; } </pre> <h2>Lint</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Dry install 🏜️</h2> <p>Dry install with the current Coq version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam install -y --show-action coq-fundamental-arithmetics.8.9.0 coq.8.11.2</code></dd> <dt>Return code</dt> <dd>5120</dd> <dt>Output</dt> <dd><pre>[NOTE] Package coq is already installed (current version is 8.11.2). The following dependencies couldn&#39;t be met: - coq-fundamental-arithmetics -&gt; coq &lt; 8.10~ -&gt; ocaml &lt; 4.06.0 base of this switch (use `--unlock-base&#39; to force) Your request can&#39;t be satisfied: - No available version of coq satisfies the constraints No solution found, exiting </pre></dd> </dl> <p>Dry install without Coq/switch base, to test if the problem was incompatibility with the current Coq/OCaml version:</p> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>opam remove -y coq; opam install -y --show-action --unlock-base coq-fundamental-arithmetics.8.9.0</code></dd> <dt>Return code</dt> <dd>0</dd> </dl> <h2>Install dependencies</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Install 🚀</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Duration</dt> <dd>0 s</dd> </dl> <h2>Installation size</h2> <p>No files were installed.</p> <h2>Uninstall 🧹</h2> <dl class="dl-horizontal"> <dt>Command</dt> <dd><code>true</code></dd> <dt>Return code</dt> <dd>0</dd> <dt>Missing removes</dt> <dd> none </dd> <dt>Wrong removes</dt> <dd> none </dd> </dl> </div> </div> </div> <hr/> <div class="footer"> <p class="text-center"> Sources are on <a href="https://github.com/coq-bench">GitHub</a> © Guillaume Claret 🐣 </p> </div> </div> <script src="https://ajax.googleapis.com/ajax/libs/jquery/1.11.1/jquery.min.js"></script> <script src="../../../../../bootstrap.min.js"></script> </body> </html>
{ "content_hash": "c7034b6f2c18178910bedd5938a05729", "timestamp": "", "source": "github", "line_count": 168, "max_line_length": 159, "avg_line_length": 41.220238095238095, "alnum_prop": 0.5481588447653429, "repo_name": "coq-bench/coq-bench.github.io", "id": "6f00d2bdf33b5095b5efbb8452dc1f0fc84b083d", "size": "6951", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "clean/Linux-x86_64-4.07.1-2.0.6/released/8.11.2/fundamental-arithmetics/8.9.0.html", "mode": "33188", "license": "mit", "language": [], "symlink_target": "" }
using Microsoft.Owin; using Owin; [assembly: OwinStartupAttribute(typeof(web.Startup))] namespace web { public partial class Startup { public void Configuration(IAppBuilder app) { ConfigureAuth(app); } } }
{ "content_hash": "890e1276020055e5e13a6ee63c7bf22c", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 53, "avg_line_length": 19.285714285714285, "alnum_prop": 0.5962962962962963, "repo_name": "stevebargelt/CrossoverCI", "id": "75ea81edb1e4194b009a01c6a60fa4384f8b4689", "size": "272", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "web/Startup.cs", "mode": "33188", "license": "mit", "language": [ { "name": "ASP", "bytes": "95" }, { "name": "C#", "bytes": "145024" }, { "name": "CSS", "bytes": "537" }, { "name": "HTML", "bytes": "5277" }, { "name": "JavaScript", "bytes": "10918" } ], "symlink_target": "" }
<TS language="uz@Cyrl" version="2.0"> <context> <name>AddressBookPage</name> <message> <source>Right-click to edit address or label</source> <translation>Манзил ёки ёрлиқни таҳрирлаш учун икки марта босинг</translation> </message> <message> <source>Create a new address</source> <translation>Янги манзил яратинг</translation> </message> <message> <source>&amp;New</source> <translation>&amp;Янги</translation> </message> <message> <source>Copy the currently selected address to the system clipboard</source> <translation>Жорий танланган манзилни тизим вақтинчалик хотирасига нусха кўчиринг</translation> </message> <message> <source>&amp;Copy</source> <translation>&amp;Нусха олиш</translation> </message> <message> <source>C&amp;lose</source> <translation>&amp;Ёпиш</translation> </message> <message> <source>&amp;Copy Address</source> <translation>Манзилдан &amp;нусха олиш</translation> </message> <message> <source>Delete the currently selected address from the list</source> <translation>Жорий танланган манзилни рўйхатдан ўчириш</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Жорий ички ойна ичидаги маълумотларни файлга экспорт қилиш</translation> </message> <message> <source>&amp;Export</source> <translation>&amp;Экспорт</translation> </message> <message> <source>&amp;Delete</source> <translation>&amp;Ўчириш</translation> </message> <message> <source>Choose the address to send coins to</source> <translation>Тангаларни жўнатиш учун манзилни танланг</translation> </message> <message> <source>Choose the address to receive coins with</source> <translation>Тангаларни қабул қилиш учун манзилни танланг</translation> </message> <message> <source>C&amp;hoose</source> <translation>&amp;Танлаш</translation> </message> <message> <source>Sending addresses</source> <translation>Жўнатиладиган манзиллар</translation> </message> <message> <source>Receiving addresses</source> <translation>Қабул қилинадиган манзиллар</translation> </message> <message> <source>These are your VeriCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Улар тўловларни жўнатиш учун сизнинг VeriCoin манзилларингиз. Доимо тангаларни жўнатишдан олдин сумма ва қабул қилувчи манзилни текшириб кўринг. </translation> </message> <message> <source>These are your VeriCoin addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source> <translation>Улар тўловларни қабул қилиш учун сизнинг VeriCoin манзилларингиз. Ҳар бир ўтказма учун янги қабул қилувчи манзилдан фойдаланиш тавсия қилинади.</translation> </message> <message> <source>Copy &amp;Label</source> <translation>Нусха олиш ва ёрлиқ</translation> </message> <message> <source>&amp;Edit</source> <translation>&amp;Таҳрирлаш</translation> </message> <message> <source>Export Address List</source> <translation>Манзил рўйхатини экспорт қилиш</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Вергул билан ажратилган файл (*.csv)</translation> </message> <message> <source>Exporting Failed</source> <translation>Экспорт қилиб бўлмади</translation> </message> <message> <source>There was an error trying to save the address list to %1. Please try again.</source> <translation>Манзил рўйхатини %1.га сақлашда хатолик юз берди. Яна уриниб кўринг.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <source>Label</source> <translation>Ёрлиқ</translation> </message> <message> <source>Address</source> <translation>Манзил</translation> </message> <message> <source>(no label)</source> <translation>(Ёрлиқ мавжуд эмас)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <source>Passphrase Dialog</source> <translation>Махфий сўз ойнаси</translation> </message> <message> <source>Enter passphrase</source> <translation>Махфий сузни киритинг</translation> </message> <message> <source>New passphrase</source> <translation>Янги махфий суз</translation> </message> <message> <source>Repeat new passphrase</source> <translation>Янги махфий сузни такрорланг</translation> </message> <message> <source>Encrypt wallet</source> <translation>Ҳамённи қодлаш</translation> </message> <message> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Ушбу операцияни амалга ошириш учун ҳамённи қулфдан чиқариш парол сўзини талаб қилади.</translation> </message> <message> <source>Unlock wallet</source> <translation>Ҳамённи қулфдан чиқариш</translation> </message> <message> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Ушбу операцияни амалга ошириш учун ҳамённи коддан чиқариш парол сўзини талаб қилади.</translation> </message> <message> <source>Decrypt wallet</source> <translation>Ҳамённи коддан чиқариш</translation> </message> <message> <source>Change passphrase</source> <translation>Махфий сузни узгартириш</translation> </message> <message> <source>Confirm wallet encryption</source> <translation>Ҳамённи кодлашни тасдиқлаш</translation> </message> <message> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR BITCOINS&lt;/b&gt;!</source> <translation>Диққат: Агар сиз ҳамёнингизни кодласангиз ва махфий сўзингизни унутсангиз, сиз &lt;b&gt;БАРЧА BITCOIN ПУЛЛАРИНГИЗНИ ЙЎҚОТАСИЗ&lt;/b&gt;!</translation> </message> <message> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Ҳамёнингизни кодлашни ростдан хоҳлайсизми?</translation> </message> <message> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>МУҲИМ: Сиз қилган олдинги ҳамён файли заҳиралари янги яратилган, кодланган ҳамён файли билан алмаштирилиши керак. Хавфсизлик сабабларига кўра олдинги кодланган ҳамён файли заҳираси янги кодланган ҳамёндан фойдаланишингиз билан яроқсиз ҳолга келади.</translation> </message> <message> <source>Warning: The Caps Lock key is on!</source> <translation>Диққат: Caps Lock тугмаси ёқилган!</translation> </message> <message> <source>Wallet encrypted</source> <translation>Ҳамёни кодланган</translation> </message> <message> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;ten or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Ҳамёнга янги махфий сўз киритинг.&lt;br/&gt;Илтимос, &lt;b&gt;ўнта ёки тасодифий белгили&lt;/b&gt; махфий сўздан фойдаланинг ёки &lt;b&gt;саккизта ёки кўпроқ сўзлар&lt;/b&gt;дан фойдаланинг.</translation> </message> <message> <source>Wallet encryption failed</source> <translation>Ҳамённи кодлаш амалга ошмади</translation> </message> <message> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Ҳамённи кодлаш ташқи хато туфайли амалга ошмади. Ҳамёнингиз кодланмади.</translation> </message> <message> <source>The supplied passphrases do not match.</source> <translation>Киритилган пароллар мос келмади.</translation> </message> <message> <source>Wallet unlock failed</source> <translation>Ҳамённи қулфдан чиқариш амалга ошмади</translation> </message> <message> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Ҳамённи коддан чиқариш учун киритилган парол нотўғри.</translation> </message> <message> <source>Wallet decryption failed</source> <translation>Ҳамённи коддан чиқариш амалга ошмади</translation> </message> <message> <source>Wallet passphrase was successfully changed.</source> <translation>Ҳамён пароли муваффақиятли алмаштирилди.</translation> </message> </context> <context> <name>VeriCoinGUI</name> <message> <source>Sign &amp;message...</source> <translation>&amp;Хабар ёзиш...</translation> </message> <message> <source>Synchronizing with network...</source> <translation>Тармоқ билан синхронланмоқда...</translation> </message> <message> <source>&amp;Overview</source> <translation>&amp;Кўриб чиқиш</translation> </message> <message> <source>Node</source> <translation>Улам</translation> </message> <message> <source>Show general overview of wallet</source> <translation>Ҳамённинг умумий кўринишини кўрсатиш</translation> </message> <message> <source>&amp;Transactions</source> <translation>&amp;Пул ўтказмалари</translation> </message> <message> <source>Browse transaction history</source> <translation>Пул ўтказмалари тарихини кўриш</translation> </message> <message> <source>E&amp;xit</source> <translation>Ч&amp;иқиш</translation> </message> <message> <source>Quit application</source> <translation>Иловадан чиқиш</translation> </message> <message> <source>About &amp;Qt</source> <translation>&amp;Qt ҳақида</translation> </message> <message> <source>Show information about Qt</source> <translation>Qt ҳақидаги маълумотларни кўрсатиш</translation> </message> <message> <source>&amp;Options...</source> <translation>&amp;Мосламалар...</translation> </message> <message> <source>&amp;Encrypt Wallet...</source> <translation>Ҳамённи &amp;кодлаш...</translation> </message> <message> <source>&amp;Backup Wallet...</source> <translation>Ҳамённи &amp;заҳиралаш...</translation> </message> <message> <source>&amp;Change Passphrase...</source> <translation>Махфий сўзни &amp;ўзгартириш...</translation> </message> <message> <source>&amp;Sending addresses...</source> <translation>&amp;Жўнатилувчи манзиллар...</translation> </message> <message> <source>&amp;Receiving addresses...</source> <translation>&amp;Қабул қилувчи манзиллар...</translation> </message> <message> <source>Open &amp;URI...</source> <translation>Интернет манзилни очиш</translation> </message> <message> <source>VeriCoin Core client</source> <translation>VeriCoin асос мижози</translation> </message> <message> <source>Importing blocks from disk...</source> <translation>Дискдан блоклар импорт қилинмоқда...</translation> </message> <message> <source>Reindexing blocks on disk...</source> <translation>Дискдаги блоклар қайта индексланмоқда...</translation> </message> <message> <source>Send coins to a VeriCoin address</source> <translation>Тангаларни VeriCoin манзилига жўнатиш</translation> </message> <message> <source>Backup wallet to another location</source> <translation>Ҳамённи бошқа манзилга заҳиралаш</translation> </message> <message> <source>Change the passphrase used for wallet encryption</source> <translation>Паролни ўзгартириш ҳамённи кодлашда фойдаланилади</translation> </message> <message> <source>&amp;Debug window</source> <translation>&amp;Носозликни ҳал қилиш ойнаси</translation> </message> <message> <source>Open debugging and diagnostic console</source> <translation>Носозликни ҳал қилиш ва ташхис терминали</translation> </message> <message> <source>&amp;Verify message...</source> <translation>Хабарни &amp;тасдиқлаш...</translation> </message> <message> <source>VeriCoin</source> <translation>VeriCoin</translation> </message> <message> <source>Wallet</source> <translation>Ҳамён</translation> </message> <message> <source>&amp;Send</source> <translation>&amp;Жўнатиш</translation> </message> <message> <source>&amp;Receive</source> <translation>&amp;Қабул қилиш</translation> </message> <message> <source>Show information about VeriCoin Core</source> <translation>VeriCoin Core ҳақидаги маълумотларни кўрсатиш</translation> </message> <message> <source>&amp;Show / Hide</source> <translation>&amp;Кўрсатиш / Яшириш</translation> </message> <message> <source>Show or hide the main Window</source> <translation>Асосий ойнани кўрсатиш ёки яшириш</translation> </message> <message> <source>Encrypt the private keys that belong to your wallet</source> <translation>Ҳамёнингизга тегишли махфий калитларни кодлаш</translation> </message> <message> <source>Sign messages with your VeriCoin addresses to prove you own them</source> <translation>VeriCoin манзилидан унинг эгаси эканлигингизни исботлаш учун хабарлар ёзинг</translation> </message> <message> <source>Verify messages to ensure they were signed with specified VeriCoin addresses</source> <translation>Хабарларни махсус VeriCoin манзилларингиз билан ёзилганлигига ишонч ҳосил қилиш учун уларни тасдиқланг</translation> </message> <message> <source>&amp;File</source> <translation>&amp;Файл</translation> </message> <message> <source>&amp;Settings</source> <translation>&amp; Созламалар</translation> </message> <message> <source>&amp;Help</source> <translation>&amp;Ёрдам</translation> </message> <message> <source>Tabs toolbar</source> <translation>Ички ойналар асбоблар панели</translation> </message> <message> <source>VeriCoin Core</source> <translation>VeriCoin Core</translation> </message> <message> <source>Request payments (generates QR codes and bitcoin: URIs)</source> <translation>Тўловлар (QR кодлари ва bitcoin ёрдамида яратишлар: URI’лар) сўраш</translation> </message> <message> <source>&amp;About VeriCoin Core</source> <translation>VeriCoin Core &amp;ҳақида</translation> </message> <message> <source>Show the list of used sending addresses and labels</source> <translation>Фойдаланилган жўнатилган манзиллар ва ёрлиқлар рўйхатини кўрсатиш</translation> </message> <message> <source>Show the list of used receiving addresses and labels</source> <translation>Фойдаланилган қабул қилинган манзиллар ва ёрлиқлар рўйхатини кўрсатиш</translation> </message> <message> <source>Open a bitcoin: URI or payment request</source> <translation>VeriCoin’ни очиш: URI ёки тўлов сўрови</translation> </message> <message> <source>&amp;Command-line options</source> <translation>&amp;Буйруқлар сатри мосламалари</translation> </message> <message> <source>Show the VeriCoin Core help message to get a list with possible VeriCoin command-line options</source> <translation>Мавжуд VeriCoin буйруқлар матни мосламалари билан VeriCoin Core ёрдам хабарларини олиш рўйхатини кўрсатиш</translation> </message> <message numerus="yes"> <source>%n active connection(s) to VeriCoin network</source> <translation><numerusform>%n та VeriCoin тармоғига фаол уланиш мавжуд</numerusform></translation> </message> <message> <source>No block source available...</source> <translation>Блок манбалари мавжуд эмас...</translation> </message> <message numerus="yes"> <source>%n hour(s)</source> <translation><numerusform>%n соат</numerusform></translation> </message> <message numerus="yes"> <source>%n day(s)</source> <translation><numerusform>%n кун</numerusform></translation> </message> <message numerus="yes"> <source>%n week(s)</source> <translation><numerusform>%n ҳафта</numerusform></translation> </message> <message> <source>%1 and %2</source> <translation>%1 ва %2</translation> </message> <message numerus="yes"> <source>%n year(s)</source> <translation><numerusform>%n йил</numerusform></translation> </message> <message> <source>%1 behind</source> <translation>%1 орқада</translation> </message> <message> <source>Last received block was generated %1 ago.</source> <translation>Сўнги қабул қилинган блок %1 олдин яратилган.</translation> </message> <message> <source>Transactions after this will not yet be visible.</source> <translation>Бундан кейинги пул ўтказмалари кўринмайдиган бўлади.</translation> </message> <message> <source>Error</source> <translation>Хатолик</translation> </message> <message> <source>Warning</source> <translation>Диққат</translation> </message> <message> <source>Information</source> <translation>Маълумот</translation> </message> <message> <source>Up to date</source> <translation>Янгиланган</translation> </message> <message> <source>Catching up...</source> <translation>Банд қилинмоқда...</translation> </message> <message> <source>Sent transaction</source> <translation>Жўнатилган операция</translation> </message> <message> <source>Incoming transaction</source> <translation>Кирувчи операция</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>Ҳамён &lt;b&gt;кодланган&lt;/b&gt; ва вақтинча &lt;b&gt;қулфдан чиқарилган&lt;/b&gt;</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>Ҳамён &lt;b&gt;кодланган&lt;/b&gt; ва вақтинча &lt;b&gt;қулфланган&lt;/b&gt;</translation> </message> </context> <context> <name>ClientModel</name> <message> <source>Network Alert</source> <translation>Тармоқ огоҳлантиргичи</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <source>Quantity:</source> <translation>Сони:</translation> </message> <message> <source>Bytes:</source> <translation>Байт:</translation> </message> <message> <source>Amount:</source> <translation>Миқдори:</translation> </message> <message> <source>Priority:</source> <translation>Муҳимлиги:</translation> </message> <message> <source>Fee:</source> <translation>Солиқ:</translation> </message> <message> <source>Dust:</source> <translation>Ахлат қутиси:</translation> </message> <message> <source>After Fee:</source> <translation>Солиқдан сўнг:</translation> </message> <message> <source>Change:</source> <translation>Ўзгартириш:</translation> </message> <message> <source>(un)select all</source> <translation>барчасини танаш (бекор қилиш)</translation> </message> <message> <source>Tree mode</source> <translation>Дарахт усулида</translation> </message> <message> <source>List mode</source> <translation>Рўйхат усулида</translation> </message> <message> <source>Amount</source> <translation>Миқдори</translation> </message> <message> <source>Date</source> <translation>Сана</translation> </message> <message> <source>Confirmations</source> <translation>Тасдиқлашлар</translation> </message> <message> <source>Confirmed</source> <translation>Тасдиқланди</translation> </message> <message> <source>Priority</source> <translation>Муҳимлиги</translation> </message> <message> <source>Copy address</source> <translation>Манзилни нусхалаш</translation> </message> <message> <source>Copy label</source> <translation>Ёрликни нусхала</translation> </message> <message> <source>Copy amount</source> <translation>Кийматни нусхала</translation> </message> <message> <source>Copy transaction ID</source> <translation>Ўтказам рақамидан нусха олиш</translation> </message> <message> <source>Lock unspent</source> <translation>Сарфланмаганларни қулфлаш</translation> </message> <message> <source>Unlock unspent</source> <translation>Сарфланмаганларни қулфдан чиқариш</translation> </message> <message> <source>Copy quantity</source> <translation>Нусха сони</translation> </message> <message> <source>Copy fee</source> <translation>Нусха солиғи</translation> </message> <message> <source>Copy after fee</source> <translation>Нусха солиқдан сўнг</translation> </message> <message> <source>Copy bytes</source> <translation>Нусха байти</translation> </message> <message> <source>Copy priority</source> <translation>Нусха муҳимлиги</translation> </message> <message> <source>Copy dust</source> <translation>Нусха чангги</translation> </message> <message> <source>Copy change</source> <translation>Нусха қайтими</translation> </message> <message> <source>highest</source> <translation>энг юқори</translation> </message> <message> <source>higher</source> <translation>юқорирок</translation> </message> <message> <source>high</source> <translation>юқори</translation> </message> <message> <source>medium-high</source> <translation>ўртача-юқори</translation> </message> <message> <source>medium</source> <translation>ўрта</translation> </message> <message> <source>low-medium</source> <translation>паст-юқори</translation> </message> <message> <source>low</source> <translation>паст</translation> </message> <message> <source>lower</source> <translation>пастроқ</translation> </message> <message> <source>lowest</source> <translation>энг паст</translation> </message> <message> <source>(%1 locked)</source> <translation>(%1 қулфланган)</translation> </message> <message> <source>none</source> <translation>йўқ</translation> </message> <message> <source>Can vary +/- %1 satoshi(s) per input.</source> <translation>Ҳар бир кирим +/- %1 сатоши(лар) билан ўзгариши мумкин.</translation> </message> <message> <source>yes</source> <translation>ҳа</translation> </message> <message> <source>no</source> <translation>йўқ</translation> </message> <message> <source>This means a fee of at least %1 per kB is required.</source> <translation>Бу дегани солиқ ҳар кб учун камида %1 талаб қилинади.</translation> </message> <message> <source>Can vary +/- 1 byte per input.</source> <translation>Ҳар бир кирим +/- 1 байт билан ўзгариши мумкин.</translation> </message> <message> <source>Transactions with higher priority are more likely to get included into a block.</source> <translation>Юқори муҳимликка эга бўлган ўтказмалар тезда блокнинг ичига қўшимча олади.</translation> </message> <message> <source>(no label)</source> <translation>(Ёрлик мавжуд эмас)</translation> </message> <message> <source>change from %1 (%2)</source> <translation>%1 (%2)дан ўзгартириш</translation> </message> <message> <source>(change)</source> <translation>(ўзгартириш)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <source>Edit Address</source> <translation>Манзилларни таҳрирлаш</translation> </message> <message> <source>&amp;Label</source> <translation>&amp;Ёрлик</translation> </message> <message> <source>The label associated with this address list entry</source> <translation>Ёрлиқ ушбу манзилар рўйхати ёзуви билан боғланган</translation> </message> <message> <source>The address associated with this address list entry. This can only be modified for sending addresses.</source> <translation>Манзил ушбу манзиллар рўйхати ёзуви билан боғланган. Уни фақат жўнатиладиган манзиллар учун ўзгартирса бўлади.</translation> </message> <message> <source>&amp;Address</source> <translation>&amp;Манзил</translation> </message> <message> <source>New receiving address</source> <translation>Янги кабул килувчи манзил</translation> </message> <message> <source>New sending address</source> <translation>Янги жунатилувчи манзил</translation> </message> <message> <source>Edit receiving address</source> <translation>Кабул килувчи манзилни тахрирлаш</translation> </message> <message> <source>Edit sending address</source> <translation>Жунатилувчи манзилни тахрирлаш</translation> </message> <message> <source>The entered address "%1" is already in the address book.</source> <translation>Киритилган "%1" манзили аллақачон манзил китобида.</translation> </message> <message> <source>The entered address "%1" is not a valid VeriCoin address.</source> <translation>Киритилган "%1" манзили тўғри VeriCoin манзили эмас.</translation> </message> <message> <source>Could not unlock wallet.</source> <translation>Ҳамён қулфдан чиқмади.</translation> </message> <message> <source>New key generation failed.</source> <translation>Янги калит яратиш амалга ошмади.</translation> </message> </context> <context> <name>FreespaceChecker</name> <message> <source>A new data directory will be created.</source> <translation>Янги маълумотлар директорияси яратилади.</translation> </message> <message> <source>name</source> <translation>номи</translation> </message> <message> <source>Directory already exists. Add %1 if you intend to create a new directory here.</source> <translation>Директория аллақачон мавжуд. Агар бу ерда янги директория яратмоқчи бўлсангиз, %1 қўшинг.</translation> </message> <message> <source>Path already exists, and is not a directory.</source> <translation>Йўл аллақачон мавжуд. У директория эмас.</translation> </message> <message> <source>Cannot create data directory here.</source> <translation>Маълумотлар директориясини бу ерда яратиб бўлмайди..</translation> </message> </context> <context> <name>HelpMessageDialog</name> <message> <source>VeriCoin Core</source> <translation>VeriCoin Core</translation> </message> <message> <source>version</source> <translation>версияси</translation> </message> <message> <source>(%1-bit)</source> <translation>(%1-bit)</translation> </message> <message> <source>About VeriCoin Core</source> <translation>VeriCoin Core ҳақида</translation> </message> <message> <source>Usage:</source> <translation>Фойдаланиш:</translation> </message> <message> <source>command-line options</source> <translation>буйруқлар қатори орқали мослаш</translation> </message> </context> <context> <name>Intro</name> <message> <source>Welcome</source> <translation>Хуш келибсиз</translation> </message> <message> <source>Welcome to VeriCoin Core.</source> <translation>"VeriCoin Core"га хуш келибсиз.</translation> </message> <message> <source>As this is the first time the program is launched, you can choose where VeriCoin Core will store its data.</source> <translation>Биринчи марта дастур ишга тушгани каби сиз VeriCoin Core маълумотларини жойлаштирадиган жойни танлашингиз мумкин.</translation> </message> <message> <source>VeriCoin Core will download and store a copy of the VeriCoin block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source> <translation>VeriCoin Core юклаб олинади ва VeriCoin блок занжири нусхаси жойлаштирилади. Камида %1GB маълумот ушбу директорияга жойлаштирилади ва вақт давомида ўсиб боради. Ҳамён ҳам ушбу директорияда жойлашади.</translation> </message> <message> <source>Use the default data directory</source> <translation>Стандарт маълумотлар директориясидан фойдаланиш</translation> </message> <message> <source>Use a custom data directory:</source> <translation>Бошқа маълумотлар директориясида фойдаланинг:</translation> </message> <message> <source>VeriCoin Core</source> <translation>VeriCoin Core</translation> </message> <message> <source>Error: Specified data directory "%1" cannot be created.</source> <translation>Хато: кўрсатилган "%1" маълумотлар директориясини яратиб бўлмайди.</translation> </message> <message> <source>Error</source> <translation>Хатолик</translation> </message> </context> <context> <name>OpenURIDialog</name> <message> <source>Open URI</source> <translation>URI ни очиш</translation> </message> <message> <source>Open payment request from URI or file</source> <translation>URL файлдан тўлов сўровларини очиш</translation> </message> <message> <source>URI:</source> <translation>URI:</translation> </message> <message> <source>Select payment request file</source> <translation>Тўлов сўрови файлини танлаш</translation> </message> <message> <source>Select payment request file to open</source> <translation>Очиш учун тўлов сўрови файлини танлаш</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <source>Options</source> <translation>Танламалар</translation> </message> <message> <source>&amp;Main</source> <translation>&amp;Асосий</translation> </message> <message> <source>Size of &amp;database cache</source> <translation>&amp;Маълумотлар базаси кеши</translation> </message> <message> <source>MB</source> <translation>МБ</translation> </message> <message> <source>Number of script &amp;verification threads</source> <translation>Мавзуларни &amp;тўғрилаш скрипти миқдори</translation> </message> <message> <source>Accept connections from outside</source> <translation>Ташқаридан уланишларга рози бўлиш</translation> </message> <message> <source>Allow incoming connections</source> <translation>Кирувчи уланишларга рухсат бериш</translation> </message> <message> <source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source> <translation>Прокси IP манзили (масалан: IPv4: 127.0.0.1 / IPv6: ::1)</translation> </message> <message> <source>Third party transaction URLs</source> <translation>Бегона тараф ўтказмалари URL манзиллари</translation> </message> <message> <source>Proxy &amp;IP:</source> <translation>Прокси &amp;IP рақами:</translation> </message> <message> <source>&amp;Port:</source> <translation>&amp;Порт:</translation> </message> <message> <source>Port of the proxy (e.g. 9050)</source> <translation>Прокси порти (e.g. 9050)</translation> </message> <message> <source>&amp;Window</source> <translation>&amp;Ойна</translation> </message> <message> <source>Show only a tray icon after minimizing the window.</source> <translation>Ойна йиғилгандан сўнг фақат трэй нишончаси кўрсатилсин.</translation> </message> <message> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>Манзиллар панели ўрнига трэйни &amp;йиғиш</translation> </message> <message> <source>M&amp;inimize on close</source> <translation>Ёпишда й&amp;иғиш</translation> </message> <message> <source>&amp;Display</source> <translation>&amp;Кўрсатиш</translation> </message> <message> <source>User Interface &amp;language:</source> <translation>Фойдаланувчи интерфейси &amp;тили:</translation> </message> <message> <source>&amp;Unit to show amounts in:</source> <translation>Миқдорларни кўрсатиш учун &amp;қисм:</translation> </message> <message> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <source>&amp;Cancel</source> <translation>&amp;Бекор қилиш</translation> </message> <message> <source>default</source> <translation>стандарт</translation> </message> <message> <source>none</source> <translation>йўқ</translation> </message> <message> <source>Confirm options reset</source> <translation>Тасдиқлаш танловларини рад қилиш</translation> </message> <message> <source>Client restart required to activate changes.</source> <translation>Ўзгаришлар амалга ошиши учун мижозни қайта ишга тушириш талаб қилинади.</translation> </message> <message> <source>This change would require a client restart.</source> <translation>Ушбу ўзгариш мижозни қайтадан ишга туширишни талаб қилади.</translation> </message> <message> <source>The supplied proxy address is invalid.</source> <translation>Келтирилган прокси манзили ишламайди.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <source>Form</source> <translation>Шакл</translation> </message> <message> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the VeriCoin network after a connection is established, but this process has not completed yet.</source> <translation>Кўрсатилган маълумот эскирган бўлиши мумкин. Ҳамёнингиз алоқа ўрнатилгандан сўнг VeriCoin тармоқ билан автоматик тарзда синхронланади, аммо жараён ҳалигача тугалланмади.</translation> </message> <message> <source>Watch-only:</source> <translation>Фақат кўришга</translation> </message> <message> <source>Available:</source> <translation>Мавжуд:</translation> </message> <message> <source>Your current spendable balance</source> <translation>Жорий сарфланадиган балансингиз</translation> </message> <message> <source>Pending:</source> <translation>Кутилмоқда:</translation> </message> <message> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source> <translation>Жами ўтказмалар ҳозиргача тасдиқланган ва сафланадиган баланс томонга ҳали ҳам ҳисобланмади</translation> </message> <message> <source>Immature:</source> <translation>Тайёр эмас:</translation> </message> <message> <source>Mined balance that has not yet matured</source> <translation>Миналаштирилган баланс ҳалигача тайёр эмас</translation> </message> <message> <source>Balances</source> <translation>Баланслар</translation> </message> <message> <source>Total:</source> <translation>Жами:</translation> </message> <message> <source>Your current total balance</source> <translation>Жорий умумий балансингиз</translation> </message> <message> <source>Your current balance in watch-only addresses</source> <translation>Жорий балансингиз фақат кўринадиган манзилларда</translation> </message> <message> <source>Spendable:</source> <translation>Сарфланадиган:</translation> </message> <message> <source>Recent transactions</source> <translation>Сўнгги пул ўтказмалари</translation> </message> </context> <context> <name>PaymentServer</name> </context> <context> <name>PeerTableModel</name> <message> <source>Ping Time</source> <translation>Ping вақти</translation> </message> </context> <context> <name>QObject</name> <message> <source>Amount</source> <translation>Миқдори</translation> </message> <message> <source>%1 m</source> <translation>%1 д</translation> </message> <message> <source>%1 s</source> <translation>%1 с</translation> </message> <message> <source>None</source> <translation>Йўқ</translation> </message> <message> <source>N/A</source> <translation>Тўғри келмайди</translation> </message> <message> <source>%1 ms</source> <translation>%1 мс</translation> </message> </context> <context> <name>QRImageWidget</name> <message> <source>&amp;Save Image...</source> <translation>Расмни &amp;сақлаш</translation> </message> <message> <source>&amp;Copy Image</source> <translation>Расмдан &amp;нусха олиш</translation> </message> <message> <source>Save QR Code</source> <translation>QR кодни сақлаш</translation> </message> <message> <source>PNG Image (*.png)</source> <translation>PNG расм (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <source>Client name</source> <translation>Мижоз номи</translation> </message> <message> <source>N/A</source> <translation>Тўғри келмайди</translation> </message> <message> <source>Client version</source> <translation>Мижоз номи</translation> </message> <message> <source>&amp;Information</source> <translation>&amp;Маълумот</translation> </message> <message> <source>Debug window</source> <translation>Тузатиш ойнаси</translation> </message> <message> <source>General</source> <translation>Асосий</translation> </message> <message> <source>Using OpenSSL version</source> <translation>Фойдаланилаётган OpenSSL версияси</translation> </message> <message> <source>Using BerkeleyDB version</source> <translation>Фойдаланилаётган BerkeleyDB версияси</translation> </message> <message> <source>Startup time</source> <translation>Бошланиш вақти</translation> </message> <message> <source>Network</source> <translation>Тармоқ</translation> </message> <message> <source>Name</source> <translation>Ном</translation> </message> <message> <source>&amp;Peers</source> <translation>&amp;Уламлар</translation> </message> <message> <source>Select a peer to view detailed information.</source> <translation>Батафсил маълумотларни кўриш учун уламни танланг.</translation> </message> <message> <source>Starting Height</source> <translation>Узунликнинг бошланиши</translation> </message> <message> <source>Sync Height</source> <translation>Узунликни синхронлаш</translation> </message> <message> <source>Ban Score</source> <translation>Тезликни бан қилиш</translation> </message> <message> <source>Connection Time</source> <translation>Уланиш вақти</translation> </message> <message> <source>Last Send</source> <translation>Сўнгги жўнатилган</translation> </message> <message> <source>Last Receive</source> <translation>Сўнгги қабул қилинган</translation> </message> <message> <source>Bytes Sent</source> <translation>Жўнатилган байтлар</translation> </message> <message> <source>Bytes Received</source> <translation>Қабул қилинган байтлар</translation> </message> <message> <source>Ping Time</source> <translation>Ping вақти</translation> </message> <message> <source>Last block time</source> <translation>Сўнгги блок вақти</translation> </message> <message> <source>&amp;Open</source> <translation>&amp;Очиш</translation> </message> <message> <source>&amp;Console</source> <translation>&amp;Терминал</translation> </message> <message> <source>&amp;Network Traffic</source> <translation>&amp;Тармоқ трафиги</translation> </message> <message> <source>&amp;Clear</source> <translation>&amp;Тозалаш</translation> </message> <message> <source>Totals</source> <translation>Жами</translation> </message> <message> <source>In:</source> <translation>Ичига:</translation> </message> <message> <source>Out:</source> <translation>Ташқарига:</translation> </message> <message> <source>Build date</source> <translation>Тузилган санаси</translation> </message> <message> <source>Debug log file</source> <translation>Тузатиш журнали файли</translation> </message> <message> <source>Clear console</source> <translation>Терминални тозалаш</translation> </message> <message> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Тарихни кўриш учун тепага ва пастга кўрсаткичларидан фойдаланинг, экранни тозалаш учун &lt;b&gt;Ctrl-L&lt;/b&gt; тугмалар бирикмасидан фойдаланинг.</translation> </message> <message> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Мавжуд буйруқларни кўриш учун &lt;b&gt;help&lt;/b&gt; деб ёзинг.</translation> </message> <message> <source>%1 B</source> <translation>%1 Б</translation> </message> <message> <source>%1 KB</source> <translation>%1 КБ</translation> </message> <message> <source>%1 MB</source> <translation>%1 МБ</translation> </message> <message> <source>%1 GB</source> <translation>%1 ГБ</translation> </message> <message> <source>via %1</source> <translation>%1 орқали</translation> </message> <message> <source>never</source> <translation>ҳеч қачон</translation> </message> <message> <source>Unknown</source> <translation>Номаълум</translation> </message> <message> <source>Fetching...</source> <translation>Олинмоқда...</translation> </message> </context> <context> <name>ReceiveCoinsDialog</name> <message> <source>&amp;Amount:</source> <translation>&amp;Миқдор:</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Ёрлиқ:</translation> </message> <message> <source>&amp;Message:</source> <translation>&amp;Хабар:</translation> </message> <message> <source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source> <translation>Олдинги фойдаланилган қабул қилинган манзиллардан биридан қайта фойдаланилсин. Хавсизлик ва махфийлик муаммолар мавжуд манзиллардан қайта фойдаланилмоқда. Бундан тўлов сўров қайта яратилмагунича фойдаланманг.</translation> </message> <message> <source>An optional label to associate with the new receiving address.</source> <translation>Янги қабул қилинаётган манзил билан боғланган танланадиган ёрлиқ.</translation> </message> <message> <source>Use this form to request payments. All fields are &lt;b&gt;optional&lt;/b&gt;.</source> <translation>Ушбу сўровдан тўловларни сўраш учун фойдаланинг. Барча майдонлар &lt;b&gt;мажбурий эмас&lt;/b&gt;.</translation> </message> <message> <source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source> <translation>Хоҳланган миқдор сўрови. Кўрсатилган миқдорни сўраш учун буни бўш ёки ноль қолдиринг.</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Шаклнинг барча майдончаларини тозалаш</translation> </message> <message> <source>Clear</source> <translation>Тозалаш</translation> </message> <message> <source>Requested payments history</source> <translation>Сўралган тўлов тарихи</translation> </message> <message> <source>&amp;Request payment</source> <translation>Тўловни &amp;сўраш</translation> </message> <message> <source>Show the selected request (does the same as double clicking an entry)</source> <translation>Танланган сўровни кўрсатиш (икки марта босилганда ҳам бир хил амал бажарилсин)</translation> </message> <message> <source>Show</source> <translation>Кўрсатиш</translation> </message> <message> <source>Remove the selected entries from the list</source> <translation>Танланганларни рўйхатдан ўчириш</translation> </message> <message> <source>Remove</source> <translation>Ўчириш</translation> </message> <message> <source>Copy label</source> <translation>Ёрликни нусхала</translation> </message> <message> <source>Copy message</source> <translation>Хабарни нусхала</translation> </message> <message> <source>Copy amount</source> <translation>Кийматни нусхала</translation> </message> </context> <context> <name>ReceiveRequestDialog</name> <message> <source>&amp;Save Image...</source> <translation>Расмни &amp;сақлаш</translation> </message> <message> <source>Address</source> <translation>Манзил</translation> </message> <message> <source>Amount</source> <translation>Миқдори</translation> </message> <message> <source>Label</source> <translation>Ёрлик</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <source>Date</source> <translation>Сана</translation> </message> <message> <source>Label</source> <translation>Ёрлик</translation> </message> <message> <source>Amount</source> <translation>Миқдори</translation> </message> <message> <source>(no label)</source> <translation>(Ёрлик мавжуд эмас)</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <source>Send Coins</source> <translation>Тангаларни жунат</translation> </message> <message> <source>Quantity:</source> <translation>Сони:</translation> </message> <message> <source>Bytes:</source> <translation>Байт:</translation> </message> <message> <source>Amount:</source> <translation>Миқдори:</translation> </message> <message> <source>Priority:</source> <translation>Муҳимлиги:</translation> </message> <message> <source>Fee:</source> <translation>Солиқ:</translation> </message> <message> <source>After Fee:</source> <translation>Солиқдан сўнг:</translation> </message> <message> <source>Change:</source> <translation>Ўзгартириш:</translation> </message> <message> <source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source> <translation>Агар бу фаоллаштирилса, аммо ўзгартирилган манзил бўл ёки нотўғри бўлса, ўзгариш янги яратилган манзилга жўнатилади.</translation> </message> <message> <source>Custom change address</source> <translation>Бошқа ўзгартирилган манзил</translation> </message> <message> <source>Send to multiple recipients at once</source> <translation>Бирданига бир нечта қабул қилувчиларга жўнатиш</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Шаклнинг барча майдончаларини тозалаш</translation> </message> <message> <source>Dust:</source> <translation>Ахлат қутиси:</translation> </message> <message> <source>Balance:</source> <translation>Баланс</translation> </message> <message> <source>Confirm the send action</source> <translation>Жўнатиш амалини тасдиқлаш</translation> </message> <message> <source>Confirm send coins</source> <translation>Тангалар жўнаишни тасдиқлаш</translation> </message> <message> <source>Copy quantity</source> <translation>Нусха сони</translation> </message> <message> <source>Copy amount</source> <translation>Кийматни нусхала</translation> </message> <message> <source>Copy fee</source> <translation>Нусха солиғи</translation> </message> <message> <source>Copy after fee</source> <translation>Нусха солиқдан сўнг</translation> </message> <message> <source>Copy bytes</source> <translation>Нусха байти</translation> </message> <message> <source>Copy priority</source> <translation>Нусха муҳимлиги</translation> </message> <message> <source>Copy change</source> <translation>Нусха қайтими</translation> </message> <message> <source>The amount to pay must be larger than 0.</source> <translation>Тўлов миқдори 0. дан катта бўлиши керак. </translation> </message> <message> <source>Warning: Invalid VeriCoin address</source> <translation>Диққат: Нотўғр VeriCoin манзили</translation> </message> <message> <source>(no label)</source> <translation>(Ёрлик мавжуд эмас)</translation> </message> <message> <source>Warning: Unknown change address</source> <translation>Диққат: Номаълум ўзгариш манзили</translation> </message> <message> <source>Copy dust</source> <translation>Нусха чангги</translation> </message> <message> <source>Are you sure you want to send?</source> <translation>Жўнатишни хоҳлашингизга ишончингиз комилми?</translation> </message> <message> <source>added as transaction fee</source> <translation>ўтказма солиғи қўшилди</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <source>A&amp;mount:</source> <translation>&amp;Миқдори:</translation> </message> <message> <source>Pay &amp;To:</source> <translation>&amp;Тўлов олувчи:</translation> </message> <message> <source>Enter a label for this address to add it to your address book</source> <translation>Манзил китобингизга қўшиш учун ушбу манзил учун ёрлиқ киритинг</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Ёрлиқ:</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Клипбоарддан манзилни қўйиш</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> </context> <context> <name>ShutdownWindow</name> </context> <context> <name>SignVerifyMessageDialog</name> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Клипбоарддан манзилни қўйиш</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Signature</source> <translation>Имзо</translation> </message> </context> <context> <name>SplashScreen</name> <message> <source>VeriCoin Core</source> <translation>VeriCoin Core</translation> </message> <message> <source>The VeriCoin Core developers</source> <translation>VeriCoin Core дастурчилари</translation> </message> <message> <source>[testnet]</source> <translation>[testnet]</translation> </message> </context> <context> <name>TrafficGraphWidget</name> </context> <context> <name>TransactionDesc</name> <message> <source>Open until %1</source> <translation>%1 гача очиш</translation> </message> <message> <source>%1/unconfirmed</source> <translation>%1/тасдиқланмади</translation> </message> <message> <source>%1 confirmations</source> <translation>%1 тасдиқлашлар</translation> </message> <message> <source>Date</source> <translation>Сана</translation> </message> <message> <source>Transaction ID</source> <translation>ID</translation> </message> <message> <source>Amount</source> <translation>Миқдори</translation> </message> <message> <source>, has not been successfully broadcast yet</source> <translation>, ҳалигача трансляция қилингани йўқ</translation> </message> <message> <source>unknown</source> <translation>Номаълум</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <source>Transaction details</source> <translation>Операция тафсилотлари</translation> </message> <message> <source>This pane shows a detailed description of the transaction</source> <translation>Ушбу ойна операциянинг батафсил таърифини кўрсатади</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <source>Date</source> <translation>Сана</translation> </message> <message> <source>Type</source> <translation>Тури</translation> </message> <message> <source>Open until %1</source> <translation>%1 гача очиш</translation> </message> <message> <source>Confirmed (%1 confirmations)</source> <translation>Тасдиқланди (%1 та тасдиқ)</translation> </message> <message> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Ушбу тўсиқ бирорта бошқа уланишлар томонидан қабул қилинмаган ва тасдиқланмаган!</translation> </message> <message> <source>Generated but not accepted</source> <translation>Яратилди, аммо қабул қилинмади</translation> </message> <message> <source>Label</source> <translation>Ёрлиқ</translation> </message> <message> <source>Received with</source> <translation>Ёрдамида қабул қилиш</translation> </message> <message> <source>Sent to</source> <translation>Жўнатиш</translation> </message> <message> <source>Payment to yourself</source> <translation>Ўзингизга тўлов</translation> </message> <message> <source>Mined</source> <translation>Фойда</translation> </message> <message> <source>(n/a)</source> <translation>(қ/қ)</translation> </message> <message> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Ўтказма ҳолати. Ушбу майдон бўйлаб тасдиқлашлар сонини кўрсатиш.</translation> </message> <message> <source>Date and time that the transaction was received.</source> <translation>Ўтказма қабул қилинган сана ва вақт.</translation> </message> <message> <source>Type of transaction.</source> <translation>Пул ўтказмаси тури</translation> </message> <message> <source>Amount removed from or added to balance.</source> <translation>Миқдор ўчирилган ёки балансга қўшилган.</translation> </message> </context> <context> <name>TransactionView</name> <message> <source>All</source> <translation>Барча</translation> </message> <message> <source>Today</source> <translation>Бугун</translation> </message> <message> <source>This week</source> <translation>Шу ҳафта</translation> </message> <message> <source>This month</source> <translation>Шу ой</translation> </message> <message> <source>Last month</source> <translation>Ўтган хафта</translation> </message> <message> <source>This year</source> <translation>Шу йил</translation> </message> <message> <source>Range...</source> <translation>Оралиқ...</translation> </message> <message> <source>Received with</source> <translation>Ёрдамида қабул қилинган</translation> </message> <message> <source>Sent to</source> <translation>Жўнатиш</translation> </message> <message> <source>To yourself</source> <translation>Ўзингизга</translation> </message> <message> <source>Mined</source> <translation>Фойда</translation> </message> <message> <source>Other</source> <translation>Бошка</translation> </message> <message> <source>Enter address or label to search</source> <translation>Излаш учун манзил ёки ёрлиқни киритинг</translation> </message> <message> <source>Min amount</source> <translation>Мин қиймат</translation> </message> <message> <source>Copy address</source> <translation>Манзилни нусхалаш</translation> </message> <message> <source>Copy label</source> <translation>Ёрликни нусхалаш</translation> </message> <message> <source>Copy amount</source> <translation>Кийматни нусхала</translation> </message> <message> <source>Copy transaction ID</source> <translation>Ўтказам рақамидан нусха олиш</translation> </message> <message> <source>Edit label</source> <translation>Ёрликни тахрирлаш</translation> </message> <message> <source>Exporting Failed</source> <translation>Экспорт қилиб бўлмади</translation> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Вергул билан ажратилган файл (*.csv)</translation> </message> <message> <source>Confirmed</source> <translation>Тасдиқланди</translation> </message> <message> <source>Date</source> <translation>Сана</translation> </message> <message> <source>Type</source> <translation>Туркум</translation> </message> <message> <source>Label</source> <translation>Ёрлик</translation> </message> <message> <source>Address</source> <translation>Манзил</translation> </message> <message> <source>ID</source> <translation>ID</translation> </message> <message> <source>Range:</source> <translation>Оралиқ:</translation> </message> <message> <source>to</source> <translation>Кимга</translation> </message> </context> <context> <name>UnitDisplayStatusBarControl</name> </context> <context> <name>WalletFrame</name> </context> <context> <name>WalletModel</name> <message> <source>Send Coins</source> <translation>Тангаларни жунат</translation> </message> </context> <context> <name>WalletView</name> <message> <source>&amp;Export</source> <translation>&amp;Экспорт</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Жорий ички ойна ичидаги маълумотларни файлга экспорт қилиш</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <source>Options:</source> <translation>Танламалар:</translation> </message> <message> <source>Specify data directory</source> <translation>Маълумотлар директориясини кўрсатинг</translation> </message> <message> <source>Accept command line and JSON-RPC commands</source> <translation>Буйруқлар сатри ва JSON-RPC буйруқларига рози бўлинг</translation> </message> <message> <source>Run in the background as a daemon and accept commands</source> <translation>Демон сифатида орқа фонда ишга туширинг ва буйруқларга рози бўлинг</translation> </message> <message> <source>Use the test network</source> <translation>Синов тармоғидан фойдаланинг</translation> </message> <message> <source>Choose data directory on startup (default: 0)</source> <translation>Ишга тушиш вақтида маълумотлар директориясини танлаш (стандарт: 0)</translation> </message> <message> <source>Information</source> <translation>Маълумот</translation> </message> <message> <source>Set SSL root certificates for payment request (default: -system-)</source> <translation>Тўлов сўровлари учун SSL асос сертификатларини ўрнатиш (стандарт: -system-)</translation> </message> <message> <source>Start minimized</source> <translation>Йиғилганларни бошлаш</translation> </message> <message> <source>Username for JSON-RPC connections</source> <translation>JSON-RPC уланишлари учун фойдаланувчи номи</translation> </message> <message> <source>Warning</source> <translation>Диққат</translation> </message> <message> <source>Password for JSON-RPC connections</source> <translation>JSON-RPC уланишлари учун парол</translation> </message> <message> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>JSON-RPC уланишлари учун OpenSSL (https)дан фойдаланиш</translation> </message> <message> <source>This help message</source> <translation>Бу ёрдам хабари</translation> </message> <message> <source>Loading addresses...</source> <translation>Манзиллар юкланмоқда...</translation> </message> <message> <source>Loading block index...</source> <translation>Тўсиқ индекси юкланмоқда...</translation> </message> <message> <source>Loading wallet...</source> <translation>Ҳамён юкланмоқда...</translation> </message> <message> <source>Rescanning...</source> <translation>Қайта текшириб чиқилмоқда...</translation> </message> <message> <source>Done loading</source> <translation>Юклаш тайёр</translation> </message> <message> <source>Error</source> <translation>Хатолик</translation> </message> </context> </TS>
{ "content_hash": "0dfbc741cfac90c3228694eeae185a97", "timestamp": "", "source": "github", "line_count": 1914, "max_line_length": 302, "avg_line_length": 34.40282131661442, "alnum_prop": 0.6426260877488724, "repo_name": "vericoin/vericoin-core", "id": "feb239a1cfc494efea159f77325d549773ea02f3", "size": "75417", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/qt/locale/[email protected]", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "415522" }, { "name": "C++", "bytes": "3775139" }, { "name": "CSS", "bytes": "1127" }, { "name": "HTML", "bytes": "50621" }, { "name": "Java", "bytes": "2100" }, { "name": "M4", "bytes": "134402" }, { "name": "Makefile", "bytes": "89021" }, { "name": "Objective-C", "bytes": "2023" }, { "name": "Objective-C++", "bytes": "7243" }, { "name": "Protocol Buffer", "bytes": "2309" }, { "name": "Python", "bytes": "453747" }, { "name": "QMake", "bytes": "2019" }, { "name": "Roff", "bytes": "19809" }, { "name": "Shell", "bytes": "38074" } ], "symlink_target": "" }
python manage.py dumpdata auth.User --format json > $1/user.json python manage.py dumpdata arbeitsplan --format json > $1/arbeitsplan.json python manage.py dumpdata post_office --format json > $1/po.json python manage.py dumpdata auth.Group --format json > $1/groups.json # prettty-print: for d in $1/*.json ; do echo $d echo $1/`basename -s .json $d`.html python -m json.tool $d | pygmentize -O full,style=emacs -l javascript -f html -o $1/`basename -s .json $d`.html done # and cycle back the previous database mv old.sq svpb.sq
{ "content_hash": "9ba31d2ceccf99da97e43d894ff71728", "timestamp": "", "source": "github", "line_count": 14, "max_line_length": 118, "avg_line_length": 39.785714285714285, "alnum_prop": 0.6947935368043088, "repo_name": "hkarl/svpb", "id": "5eb37ac796500149cf69d1b9c19415fa07710970", "size": "586", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "scripts/dumptest.sh", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "109065" }, { "name": "HTML", "bytes": "136016" }, { "name": "JavaScript", "bytes": "13819" }, { "name": "Python", "bytes": "341582" }, { "name": "Shell", "bytes": "1908" } ], "symlink_target": "" }
package org.apache.hadoop.hdfs.server.namenode; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInput; import java.io.DataInputStream; import java.io.DataOutput; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.InputStream; import java.io.IOException; import java.io.OutputStream; import java.io.InterruptedIOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.security.DigestInputStream; import java.security.DigestOutputStream; import java.security.MessageDigest; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.FSConstants; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.server.common.HdfsConstants; import org.apache.hadoop.hdfs.server.common.InconsistentFSStateException; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.common.UpgradeManager; import org.apache.hadoop.hdfs.server.common.HdfsConstants.NodeType; import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption; import org.apache.hadoop.hdfs.server.namenode.BlocksMap.BlockInfo; import org.apache.hadoop.hdfs.server.namenode.FSEditLog.EditLogFileInputStream; import org.apache.hadoop.hdfs.util.DataTransferThrottler; import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.UTF8; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.compress.CompressionCodec; import org.apache.hadoop.io.compress.CompressionCodecFactory; /** * FSImage handles checkpointing and logging of the namespace edits. * */ public class FSImage extends Storage { private static final SimpleDateFormat DATE_FORM = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); static final String MESSAGE_DIGEST_PROPERTY = "imageMD5Digest"; // // The filenames used for storing the images // enum NameNodeFile { IMAGE ("fsimage"), TIME ("fstime"), EDITS ("edits"), IMAGE_NEW ("fsimage.ckpt"), EDITS_NEW ("edits.new"); private String fileName = null; private NameNodeFile(String name) {this.fileName = name;} String getName() {return fileName;} } // checkpoint states enum CheckpointStates { START(0), ROLLED_EDITS(1), UPLOAD_START(2), UPLOAD_DONE(3); private final int code; CheckpointStates(int code) { this.code = code; } public int serialize() { return this.code; } public static CheckpointStates deserialize(int code) { switch(code) { case 0: return CheckpointStates.START; case 1: return CheckpointStates.ROLLED_EDITS; case 2: return CheckpointStates.UPLOAD_START; case 3: return CheckpointStates.UPLOAD_DONE; default: // illegal return null; } } } /** * Implementation of StorageDirType specific to namenode storage * A Storage directory could be of type IMAGE which stores only fsimage, * or of type EDITS which stores edits or of type IMAGE_AND_EDITS which * stores both fsimage and edits. */ static enum NameNodeDirType implements StorageDirType { UNDEFINED, IMAGE, EDITS, IMAGE_AND_EDITS; public StorageDirType getStorageDirType() { return this; } public boolean isOfType(StorageDirType type) { if ((this == IMAGE_AND_EDITS) && (type == IMAGE || type == EDITS)) return true; return this == type; } } protected FSNamesystem namesystem = null; protected long checkpointTime = -1L; FSEditLog editLog = null; private boolean isUpgradeFinalized = false; MD5Hash imageDigest = new MD5Hash(); private boolean newImageDigest = true; MD5Hash checkpointImageDigest = null; /** * flag that controls if we try to restore failed storages */ private boolean restoreFailedStorage = false; public void setRestoreFailedStorage(boolean val) { LOG.info("enabled failed storage replicas restore"); restoreFailedStorage=val; } public boolean getRestoreFailedStorage() { return restoreFailedStorage; } /** * list of failed (and thus removed) storages */ protected List<StorageDirectory> removedStorageDirs = new ArrayList<StorageDirectory>(); /** * Directories for importing an image from a checkpoint. */ private Collection<File> checkpointDirs; private Collection<File> checkpointEditsDirs; /** * Image compression related fields */ private boolean compressImage = false; // if image should be compressed private CompressionCodec saveCodec; // the compression codec private CompressionCodecFactory codecFac; // all the supported codecs private boolean saveOnStartup; // Should the namenode save image on startup or not DataTransferThrottler imageTransferThrottler = null; // throttle image transfer /** * Can fs-image be rolled? */ volatile CheckpointStates ckptState = FSImage.CheckpointStates.START; /** * Used for saving the image to disk */ static private final ThreadLocal<FsPermission> FILE_PERM = new ThreadLocal<FsPermission>() { @Override protected FsPermission initialValue() { return new FsPermission((short) 0); } }; static private final byte[] PATH_SEPARATOR = DFSUtil.string2Bytes(Path.SEPARATOR); /* * stores a temporary string used to serailize/deserialize objects to fsimage */ private static final ThreadLocal<UTF8> U_STR = new ThreadLocal<UTF8>() { protected synchronized UTF8 initialValue() { return new UTF8(); } }; /** */ FSImage() { this((FSNamesystem)null); } FSImage(FSNamesystem ns) { super(NodeType.NAME_NODE); this.editLog = new FSEditLog(this); setFSNamesystem(ns); } /** * Constructor * @param conf Configuration */ FSImage(Configuration conf) throws IOException { this(); setCheckpointDirectories(FSImage.getCheckpointDirs(conf, null), FSImage.getCheckpointEditsDirs(conf, null)); this.compressImage = conf.getBoolean( HdfsConstants.DFS_IMAGE_COMPRESS_KEY, HdfsConstants.DFS_IMAGE_COMPRESS_DEFAULT); this.codecFac = new CompressionCodecFactory(conf); this.saveOnStartup = conf.getBoolean( HdfsConstants.DFS_IMAGE_SAVE_ON_START_KEY, HdfsConstants.DFS_IMAGE_SAVE_ON_START_DEFAULT); if (this.compressImage) { String codecClassName = conf.get( HdfsConstants.DFS_IMAGE_COMPRESSION_CODEC_KEY, HdfsConstants.DFS_IMAGE_COMPRESSION_CODEC_DEFAULT); this.saveCodec = codecFac.getCodecByClassName(codecClassName); if (this.saveCodec == null) { throw new IOException("Not supported codec: " + codecClassName); } } long transferBandwidth = conf.getLong( HdfsConstants.DFS_IMAGE_TRANSFER_RATE_KEY, HdfsConstants.DFS_IMAGE_TRANSFER_RATE_DEFAULT); if (transferBandwidth > 0) { this.imageTransferThrottler = new DataTransferThrottler(transferBandwidth); } } /** */ FSImage(Collection<File> fsDirs, Collection<File> fsEditsDirs) throws IOException { this(); setStorageDirectories(fsDirs, fsEditsDirs); } public FSImage(StorageInfo storageInfo) { super(NodeType.NAME_NODE, storageInfo); } /** * Represents an Image (image and edit file). */ public FSImage(File imageDir) throws IOException { this(); ArrayList<File> dirs = new ArrayList<File>(1); ArrayList<File> editsDirs = new ArrayList<File>(1); dirs.add(imageDir); editsDirs.add(imageDir); setStorageDirectories(dirs, editsDirs); } protected FSNamesystem getFSNamesystem() { return namesystem; } protected void setFSNamesystem(FSNamesystem ns) { namesystem = ns; } void setStorageDirectories(Collection<File> fsNameDirs, Collection<File> fsEditsDirs ) throws IOException { this.storageDirs = new ArrayList<StorageDirectory>(); this.removedStorageDirs = new ArrayList<StorageDirectory>(); // Add all name dirs with appropriate NameNodeDirType for (File dirName : fsNameDirs) { boolean isAlsoEdits = false; for (File editsDirName : fsEditsDirs) { if (editsDirName.compareTo(dirName) == 0) { isAlsoEdits = true; fsEditsDirs.remove(editsDirName); break; } } NameNodeDirType dirType = (isAlsoEdits) ? NameNodeDirType.IMAGE_AND_EDITS : NameNodeDirType.IMAGE; this.addStorageDir(new StorageDirectory(dirName, dirType)); } // Add edits dirs if they are different from name dirs for (File dirName : fsEditsDirs) { this.addStorageDir(new StorageDirectory(dirName, NameNodeDirType.EDITS)); } } void setCheckpointDirectories(Collection<File> dirs, Collection<File> editsDirs) { checkpointDirs = dirs; checkpointEditsDirs = editsDirs; } static File getImageFile(StorageDirectory sd, NameNodeFile type) { return new File(sd.getCurrentDir(), type.getName()); } List<StorageDirectory> getRemovedStorageDirs() { return this.removedStorageDirs; } File getEditFile(StorageDirectory sd) { return getImageFile(sd, NameNodeFile.EDITS); } File getEditNewFile(StorageDirectory sd) { return getImageFile(sd, NameNodeFile.EDITS_NEW); } File[] getFileNames(NameNodeFile type, NameNodeDirType dirType) { ArrayList<File> list = new ArrayList<File>(); Iterator<StorageDirectory> it = (dirType == null) ? dirIterator() : dirIterator(dirType); for ( ;it.hasNext(); ) { list.add(getImageFile(it.next(), type)); } return list.toArray(new File[list.size()]); } File[] getImageFiles() { return getFileNames(NameNodeFile.IMAGE, NameNodeDirType.IMAGE); } File[] getEditsFiles() { return getFileNames(NameNodeFile.EDITS, NameNodeDirType.EDITS); } File[] getEditsNewFiles() { return getFileNames(NameNodeFile.EDITS_NEW, NameNodeDirType.EDITS); } File[] getTimeFiles() { return getFileNames(NameNodeFile.TIME, null); } /** * Get the MD5 digest of the current image * @return the MD5 digest of the current image */ MD5Hash getImageDigest() { return imageDigest; } void setImageDigest(MD5Hash imageDigest) { newImageDigest = false; this.imageDigest.set(imageDigest); } /** * Analyze storage directories. * Recover from previous transitions if required. * Perform fs state transition if necessary depending on the namespace info. * Read storage info. * * @param dataDirs * @param startOpt startup option * @throws IOException * @return true if the image needs to be saved or false otherwise */ boolean recoverTransitionRead(Collection<File> dataDirs, Collection<File> editsDirs, StartupOption startOpt ) throws IOException { assert startOpt != StartupOption.FORMAT : "NameNode formatting should be performed before reading the image"; // none of the data dirs exist if (dataDirs.size() == 0 || editsDirs.size() == 0) throw new IOException( "All specified directories are not accessible or do not exist."); if(startOpt == StartupOption.IMPORT && (checkpointDirs == null || checkpointDirs.isEmpty())) throw new IOException("Cannot import image from a checkpoint. " + "\"fs.checkpoint.dir\" is not set." ); if(startOpt == StartupOption.IMPORT && (checkpointEditsDirs == null || checkpointEditsDirs.isEmpty())) throw new IOException("Cannot import image from a checkpoint. " + "\"fs.checkpoint.edits.dir\" is not set." ); setStorageDirectories(dataDirs, editsDirs); // 1. For each data directory calculate its state and // check whether all is consistent before transitioning. Map<StorageDirectory, StorageState> dataDirStates = new HashMap<StorageDirectory, StorageState>(); boolean isFormatted = false; for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); StorageState curState; try { curState = sd.analyzeStorage(startOpt); // sd is locked but not opened switch(curState) { case NON_EXISTENT: // name-node fails if any of the configured storage dirs are missing throw new InconsistentFSStateException(sd.getRoot(), "storage directory does not exist or is not accessible."); case NOT_FORMATTED: break; case NORMAL: break; default: // recovery is possible sd.doRecover(curState); } if (curState != StorageState.NOT_FORMATTED && startOpt != StartupOption.ROLLBACK) { sd.read(); // read and verify consistency with other directories isFormatted = true; } if (startOpt == StartupOption.IMPORT && isFormatted) // import of a checkpoint is allowed only into empty image directories throw new IOException("Cannot import image from a checkpoint. " + " NameNode already contains an image in " + sd.getRoot()); } catch (IOException ioe) { sd.unlock(); throw ioe; } dataDirStates.put(sd,curState); } if (!isFormatted && startOpt != StartupOption.ROLLBACK && startOpt != StartupOption.IMPORT) throw new IOException("NameNode is not formatted."); if (layoutVersion < LAST_PRE_UPGRADE_LAYOUT_VERSION) { checkVersionUpgradable(layoutVersion); } if (startOpt != StartupOption.UPGRADE && layoutVersion < LAST_PRE_UPGRADE_LAYOUT_VERSION && layoutVersion != FSConstants.LAYOUT_VERSION) throw new IOException( "\nFile system image contains an old layout version " + layoutVersion + ".\nAn upgrade to version " + FSConstants.LAYOUT_VERSION + " is required.\nPlease restart NameNode with -upgrade option."); // check whether distributed upgrade is reguired and/or should be continued verifyDistributedUpgradeProgress(startOpt); // 2. Format unformatted dirs. this.checkpointTime = 0L; for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); StorageState curState = dataDirStates.get(sd); switch(curState) { case NON_EXISTENT: assert false : StorageState.NON_EXISTENT + " state cannot be here"; case NOT_FORMATTED: LOG.info("Storage directory " + sd.getRoot() + " is not formatted."); LOG.info("Formatting ..."); sd.clearDirectory(); // create empty currrent dir break; default: break; } } // 3. Do transitions switch(startOpt) { case UPGRADE: doUpgrade(); return false; // upgrade saved image already case IMPORT: doImportCheckpoint(); return true; case ROLLBACK: doRollback(); break; case REGULAR: // just load the image } return loadFSImage(); } private void doUpgrade() throws IOException { if(getDistributedUpgradeState()) { // only distributed upgrade need to continue // don't do version upgrade this.loadFSImage(); initializeDistributedUpgrade(); return; } // Upgrade is allowed only if there are // no previous fs states in any of the directories for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); if (sd.getPreviousDir().exists()) throw new InconsistentFSStateException(sd.getRoot(), "previous fs state should not exist during upgrade. " + "Finalize or rollback first."); } // load the latest image this.loadFSImage(); // Do upgrade for each directory long oldCTime = this.getCTime(); this.cTime = FSNamesystem.now(); // generate new cTime for the state int oldLV = this.getLayoutVersion(); this.layoutVersion = FSConstants.LAYOUT_VERSION; this.checkpointTime = FSNamesystem.now(); List<Thread> savers = new ArrayList<Thread>(); List<StorageDirectory> errorSDs = Collections.synchronizedList(new ArrayList<StorageDirectory>()); for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); LOG.info("Upgrading image directory " + sd.getRoot() + ".\n old LV = " + oldLV + "; old CTime = " + oldCTime + ".\n new LV = " + this.getLayoutVersion() + "; new CTime = " + this.getCTime()); File curDir = sd.getCurrentDir(); File prevDir = sd.getPreviousDir(); File tmpDir = sd.getPreviousTmp(); try { assert curDir.exists() : "Current directory must exist."; assert !prevDir.exists() : "prvious directory must not exist."; assert !tmpDir.exists() : "prvious.tmp directory must not exist."; // rename current to tmp rename(curDir, tmpDir); // save new image if (!curDir.mkdir()) throw new IOException("Cannot create directory " + curDir); } catch (Exception e) { LOG.error("Error upgrading " + sd.getRoot(), e); errorSDs.add(sd); continue; } } for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.IMAGE); it.hasNext();) { StorageDirectory sd = it.next(); // save image to image directory FSImageSaver saver = new FSImageSaver(sd, errorSDs, false, NameNodeFile.IMAGE); Thread saverThread = new Thread(saver, saver.toString()); savers.add(saverThread); saverThread.start(); } // wait until all images are saved for (Thread saver : savers) { try { saver.join(); } catch (InterruptedException iex) { LOG.info("Caught exception while waiting for thread " + saver.getName() + " to finish. Retrying join"); throw (IOException)new InterruptedIOException().initCause(iex); } } for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.EDITS); it.hasNext();) { // create empty edit log file StorageDirectory sd = it.next(); editLog.createEditLogFile(getImageFile(sd, NameNodeFile.EDITS)); } for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); if (errorSDs.contains(sd)) continue; try { // write version and time files sd.write(); // rename tmp to previous rename(sd.getPreviousTmp(), sd.getPreviousDir()); } catch (IOException ioe) { LOG.error("Error upgrading " + sd.getRoot(), ioe); errorSDs.add(sd); continue; } isUpgradeFinalized = false; LOG.info("Upgrade of " + sd.getRoot() + " is complete."); } processIOError(errorSDs); initializeDistributedUpgrade(); editLog.open(); } private void doRollback() throws IOException { // Rollback is allowed only if there is // a previous fs states in at least one of the storage directories. // Directories that don't have previous state do not rollback boolean canRollback = false; FSImage prevState = new FSImage(getFSNamesystem()); prevState.layoutVersion = FSConstants.LAYOUT_VERSION; for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); File prevDir = sd.getPreviousDir(); if (!prevDir.exists()) { // use current directory then LOG.info("Storage directory " + sd.getRoot() + " does not contain previous fs state."); sd.read(); // read and verify consistency with other directories continue; } StorageDirectory sdPrev = prevState.new StorageDirectory(sd.getRoot()); sdPrev.read(sdPrev.getPreviousVersionFile()); // read and verify consistency of the prev dir canRollback = true; } if (!canRollback) throw new IOException("Cannot rollback. " + "None of the storage directories contain previous fs state."); // Now that we know all directories are going to be consistent // Do rollback for each directory containing previous state for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); File prevDir = sd.getPreviousDir(); if (!prevDir.exists()) continue; LOG.info("Rolling back storage directory " + sd.getRoot() + ".\n new LV = " + prevState.getLayoutVersion() + "; new CTime = " + prevState.getCTime()); File tmpDir = sd.getRemovedTmp(); assert !tmpDir.exists() : "removed.tmp directory must not exist."; // rename current to tmp File curDir = sd.getCurrentDir(); assert curDir.exists() : "Current directory must exist."; rename(curDir, tmpDir); // rename previous to current rename(prevDir, curDir); // delete tmp dir deleteDir(tmpDir); LOG.info("Rollback of " + sd.getRoot()+ " is complete."); } isUpgradeFinalized = true; // check whether name-node can start in regular mode verifyDistributedUpgradeProgress(StartupOption.REGULAR); } private void doFinalize(StorageDirectory sd) throws IOException { File prevDir = sd.getPreviousDir(); if (!prevDir.exists()) { // already discarded LOG.info("Directory " + prevDir + " does not exist."); LOG.info("Finalize upgrade for " + sd.getRoot()+ " is not required."); return; } LOG.info("Finalizing upgrade for storage directory " + sd.getRoot() + "." + (getLayoutVersion()==0 ? "" : "\n cur LV = " + this.getLayoutVersion() + "; cur CTime = " + this.getCTime())); assert sd.getCurrentDir().exists() : "Current directory must exist."; final File tmpDir = sd.getFinalizedTmp(); // rename previous to tmp and remove rename(prevDir, tmpDir); deleteDir(tmpDir); isUpgradeFinalized = true; LOG.info("Finalize upgrade for " + sd.getRoot()+ " is complete."); } /** * Load image from a checkpoint directory and save it into the current one. * @throws IOException */ void doImportCheckpoint() throws IOException { FSNamesystem fsNamesys = getFSNamesystem(); FSImage ckptImage = new FSImage(fsNamesys); // replace real image with the checkpoint image FSImage realImage = fsNamesys.getFSImage(); assert realImage == this; ckptImage.codecFac = realImage.codecFac; fsNamesys.dir.fsImage = ckptImage; // load from the checkpoint dirs try { ckptImage.recoverTransitionRead(checkpointDirs, checkpointEditsDirs, StartupOption.REGULAR); } finally { ckptImage.close(); } // return back the real image realImage.setStorageInfo(ckptImage); fsNamesys.dir.fsImage = realImage; // and save it saveNamespace(false); } void finalizeUpgrade() throws IOException { for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { doFinalize(it.next()); } } boolean isUpgradeFinalized() { return isUpgradeFinalized; } protected void getFields(Properties props, StorageDirectory sd ) throws IOException { super.getFields(props, sd); if (layoutVersion == 0) throw new IOException("NameNode directory " + sd.getRoot() + " is not formatted."); String sDUS, sDUV; sDUS = props.getProperty("distributedUpgradeState"); sDUV = props.getProperty("distributedUpgradeVersion"); setDistributedUpgradeState( sDUS == null? false : Boolean.parseBoolean(sDUS), sDUV == null? getLayoutVersion() : Integer.parseInt(sDUV)); String sMd5 = props.getProperty(MESSAGE_DIGEST_PROPERTY); if (layoutVersion <= -26) { if (sMd5 == null) { throw new InconsistentFSStateException(sd.getRoot(), "file " + STORAGE_FILE_VERSION + " does not have MD5 image digest."); } this.setImageDigest(new MD5Hash(sMd5)); } else if (sMd5 != null) { throw new InconsistentFSStateException(sd.getRoot(), "file " + STORAGE_FILE_VERSION + " has image MD5 digest when version is " + layoutVersion); } this.checkpointTime = readCheckpointTime(sd); } long readCheckpointTime(StorageDirectory sd) throws IOException { File timeFile = getImageFile(sd, NameNodeFile.TIME); long timeStamp = 0L; if (timeFile.exists() && timeFile.canRead()) { DataInputStream in = new DataInputStream(new FileInputStream(timeFile)); try { timeStamp = in.readLong(); } finally { in.close(); } } return timeStamp; } /** * Write last checkpoint time and version file into the storage directory. * * The version file should always be written last. * Missing or corrupted version file indicates that * the checkpoint is not valid. * * @param sd storage directory * @throws IOException */ protected void setFields(Properties props, StorageDirectory sd ) throws IOException { super.setFields(props, sd); boolean uState = getDistributedUpgradeState(); int uVersion = getDistributedUpgradeVersion(); if(uState && uVersion != getLayoutVersion()) { props.setProperty("distributedUpgradeState", Boolean.toString(uState)); props.setProperty("distributedUpgradeVersion", Integer.toString(uVersion)); } if (this.newImageDigest) { this.setImageDigest(MD5Hash.digest( new FileInputStream(getImageFile(sd, NameNodeFile.IMAGE)))); } props.setProperty(MESSAGE_DIGEST_PROPERTY, this.getImageDigest().toString()); writeCheckpointTime(sd); } /** * Write last checkpoint time into a separate file. * * @param sd * @throws IOException */ void writeCheckpointTime(StorageDirectory sd) throws IOException { if (checkpointTime < 0L) return; // do not write negative time File timeFile = getImageFile(sd, NameNodeFile.TIME); if (timeFile.exists()) { timeFile.delete(); } DataOutputStream out = new DataOutputStream( new FileOutputStream(timeFile)); try { out.writeLong(checkpointTime); } finally { out.close(); } } /** * Record new checkpoint time in order to * distinguish healthy directories from the removed ones. * If there is an error writing new checkpoint time, the corresponding * storage directory is removed from the list. */ void incrementCheckpointTime() { this.checkpointTime++; // Write new checkpoint time in all storage directories for(Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); try { writeCheckpointTime(sd); } catch(IOException e) { // Close any edits stream associated with this dir and remove directory LOG.warn("incrementCheckpointTime failed on " + sd.getRoot().getPath() + ";type="+sd.getStorageDirType()); if (sd.getStorageDirType().isOfType(NameNodeDirType.EDITS)) editLog.processIOError(sd); //add storage to the removed list removedStorageDirs.add(sd); it.remove(); } } } /** * Remove storage directory given directory */ void processIOError(File dirName) { for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); if (sd.getRoot().getPath().equals(dirName.getPath())) { //add storage to the removed list LOG.warn("FSImage:processIOError: removing storage: " + dirName.getPath()); try { sd.unlock(); //try to unlock before removing (in case it is restored) } catch (Exception e) { LOG.info("Unable to unlock bad storage directory : " + dirName.getPath()); } removedStorageDirs.add(sd); it.remove(); } } } /** * @param sds - array of SDs to process */ void processIOError(List<StorageDirectory> sds) { ArrayList<EditLogOutputStream> al = null; synchronized (sds) { for (StorageDirectory sd : sds) { for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd1 = it.next(); if (sd.equals(sd1)) { // add storage to the removed list LOG.warn("FSImage:processIOError: removing storage: " + sd.getRoot().getPath()); try { sd1.unlock(); // unlock before removing (in case it will be // restored) } catch (Exception e) { LOG.info("Unable to unlock bad storage directory : " + sd.getRoot().getPath()); } removedStorageDirs.add(sd1); it.remove(); break; } } } } } public FSEditLog getEditLog() { return editLog; } public boolean isConversionNeeded(StorageDirectory sd) throws IOException { File oldImageDir = new File(sd.getRoot(), "image"); if (!oldImageDir.exists()) { if(sd.getVersionFile().exists()) throw new InconsistentFSStateException(sd.getRoot(), oldImageDir + " does not exist."); return false; } // check the layout version inside the image file File oldF = new File(oldImageDir, "fsimage"); RandomAccessFile oldFile = new RandomAccessFile(oldF, "rws"); try { oldFile.seek(0); int odlVersion = oldFile.readInt(); if (odlVersion < LAST_PRE_UPGRADE_LAYOUT_VERSION) return false; } finally { oldFile.close(); } return true; } // // Atomic move sequence, to recover from interrupted checkpoint // boolean recoverInterruptedCheckpoint(StorageDirectory nameSD, StorageDirectory editsSD) throws IOException { boolean needToSave = false; File curFile = getImageFile(nameSD, NameNodeFile.IMAGE); File ckptFile = getImageFile(nameSD, NameNodeFile.IMAGE_NEW); // // If we were in the midst of a checkpoint // if (ckptFile.exists()) { needToSave = true; if (getImageFile(editsSD, NameNodeFile.EDITS_NEW).exists()) { // // checkpointing migth have uploaded a new // merged image, but we discard it here because we are // not sure whether the entire merged image was uploaded // before the namenode crashed. // if (!ckptFile.delete()) { throw new IOException("Unable to delete " + ckptFile); } } else { // // checkpointing was in progress when the namenode // shutdown. The fsimage.ckpt was created and the edits.new // file was moved to edits. We complete that checkpoint by // moving fsimage.new to fsimage. There is no need to // update the fstime file here. renameTo fails on Windows // if the destination file already exists. // if (!ckptFile.renameTo(curFile)) { if (!curFile.delete()) LOG.warn("Unable to delete dir " + curFile + " before rename"); if (!ckptFile.renameTo(curFile)) { throw new IOException("Unable to rename " + ckptFile + " to " + curFile); } } } } return needToSave; } /** * Choose latest image from one of the directories, * load it and merge with the edits from that directory. * * @return whether the image should be saved * @throws IOException */ boolean loadFSImage() throws IOException { // Now check all curFiles and see which is the newest long latestNameCheckpointTime = Long.MIN_VALUE; long latestEditsCheckpointTime = Long.MIN_VALUE; StorageDirectory latestNameSD = null; StorageDirectory latestEditsSD = null; boolean needToSave = false; isUpgradeFinalized = true; Collection<String> imageDirs = new ArrayList<String>(); Collection<String> editsDirs = new ArrayList<String>(); for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); if (!sd.getVersionFile().exists()) { needToSave |= true; continue; // some of them might have just been formatted } boolean imageExists = false, editsExists = false; if (sd.getStorageDirType().isOfType(NameNodeDirType.IMAGE)) { imageExists = getImageFile(sd, NameNodeFile.IMAGE).exists(); imageDirs.add(sd.getRoot().getCanonicalPath()); } if (sd.getStorageDirType().isOfType(NameNodeDirType.EDITS)) { editsExists = getImageFile(sd, NameNodeFile.EDITS).exists(); editsDirs.add(sd.getRoot().getCanonicalPath()); } checkpointTime = readCheckpointTime(sd); if ((checkpointTime != latestNameCheckpointTime && latestNameCheckpointTime != Long.MIN_VALUE) || (checkpointTime != latestEditsCheckpointTime && latestEditsCheckpointTime != Long.MIN_VALUE)) { // Force saving of new image if checkpoint time // is not same in all of the storage directories. needToSave |= true; } if (sd.getStorageDirType().isOfType(NameNodeDirType.IMAGE) && (latestNameCheckpointTime < checkpointTime) && imageExists) { latestNameCheckpointTime = checkpointTime; latestNameSD = sd; } if (sd.getStorageDirType().isOfType(NameNodeDirType.EDITS) && (latestEditsCheckpointTime < checkpointTime) && editsExists) { latestEditsCheckpointTime = checkpointTime; latestEditsSD = sd; } if (checkpointTime <= 0L) needToSave |= true; // set finalized flag isUpgradeFinalized = isUpgradeFinalized && !sd.getPreviousDir().exists(); } // We should have at least one image and one edits dirs if (latestNameSD == null) throw new IOException("Image file is not found in " + imageDirs); if (latestEditsSD == null) throw new IOException("Edits file is not found in " + editsDirs); // Make sure we are loading image and edits from same checkpoint if (latestNameCheckpointTime > latestEditsCheckpointTime && latestNameSD != latestEditsSD && latestNameSD.getStorageDirType() == NameNodeDirType.IMAGE && latestEditsSD.getStorageDirType() == NameNodeDirType.EDITS) { // This is a rare failure when NN has image-only and edits-only // storage directories, and fails right after saving images, // in some of the storage directories, but before purging edits. // See -NOTE- in saveNamespace(). LOG.error("This is a rare failure scenario!!!"); LOG.error("Image checkpoint time " + latestNameCheckpointTime + " > edits checkpoint time " + latestEditsCheckpointTime); LOG.error("Name-node will treat the image as the latest state of " + "the namespace. Old edits will be discarded."); } else if (latestNameCheckpointTime != latestEditsCheckpointTime) throw new IOException("Inconsistent storage detected, " + "image and edits checkpoint times do not match. " + "image checkpoint time = " + latestNameCheckpointTime + "edits checkpoint time = " + latestEditsCheckpointTime); // Recover from previous interrrupted checkpoint if any needToSave |= recoverInterruptedCheckpoint(latestNameSD, latestEditsSD); // // Load in bits // latestNameSD.read(); needToSave |= loadFSImage(getImageFile(latestNameSD, NameNodeFile.IMAGE)); if (latestNameCheckpointTime > latestEditsCheckpointTime) { // the image is already current, discard edits needToSave |= true; } else { // latestNameCheckpointTime == latestEditsCheckpointTime needToSave |= (loadFSEdits(latestEditsSD) > 0); } return needToSave; } /** * Load in the filesystem imagefrom file. It's a big list of * filenames and blocks. Return whether we should * "re-save" and consolidate the edit-logs */ boolean loadFSImage(File curFile) throws IOException { assert curFile != null : "curFile is null"; long startTime = FSNamesystem.now(); boolean needToSave = loadFSImage(curFile.getCanonicalPath(), new FileInputStream(curFile)); LOG.info("Image file of size " + curFile.length() + " loaded in " + (FSNamesystem.now() - startTime)/1000 + " seconds."); return needToSave; } boolean loadFSImage(String src, InputStream fstream) throws IOException { assert this.getLayoutVersion() < 0 : "Negative layout version is expected."; FSNamesystem fsNamesys = getFSNamesystem(); // // Load in bits // boolean needToSave = true; MessageDigest digester = MD5Hash.getDigester(); DigestInputStream fin = new DigestInputStream(fstream, digester); DataInputStream in = new DataInputStream(fin); try { /* * Note: Remove any checks for version earlier than * Storage.LAST_UPGRADABLE_LAYOUT_VERSION since we should never get * to here with older images. */ /* * TODO we need to change format of the image file * it should not contain version and namespace fields */ // read image version: first appeared in version -1 int imgVersion = in.readInt(); needToSave = (imgVersion != FSConstants.LAYOUT_VERSION); // read namespaceID: first appeared in version -2 this.namespaceID = in.readInt(); // read number of files long numFiles; if (imgVersion <= -16) { numFiles = in.readLong(); } else { numFiles = in.readInt(); } this.layoutVersion = imgVersion; // read in the last generation stamp. if (imgVersion <= -12) { long genstamp = in.readLong(); fsNamesys.setGenerationStamp(genstamp); } // read compression related info boolean isCompressed = false; if (imgVersion <= -25) { // -25: 1st version providing compression option isCompressed = in.readBoolean(); if (isCompressed) { String codecClassName = Text.readString(in); CompressionCodec loadCodec = codecFac.getCodecByClassName(codecClassName); if (loadCodec == null) { throw new IOException("Image compression codec not supported: " + codecClassName); } in = new DataInputStream(loadCodec.createInputStream(fin)); LOG.info("Loading image file " + src + " compressed using codec " + codecClassName); } } if (!isCompressed) { in = new DataInputStream(new BufferedInputStream(fin)); } // load all inodes LOG.info("Number of files = " + numFiles); if (imgVersion <= -30) { loadLocalNameINodes(imgVersion, numFiles, in); } else { loadFullNameINodes(imgVersion, numFiles, in); } // load Files Under Construction this.loadFilesUnderConstruction(imgVersion, in, fsNamesys); // make sure to read to the end of file int eof = in.read(); assert eof == -1 : "Should have reached the end of image file " + src; } finally { in.close(); } // verify checksum MD5Hash readImageMd5 = new MD5Hash(digester.digest()); if (this.newImageDigest) { this.setImageDigest(readImageMd5); // set this fsimage's checksum } else if (!this.getImageDigest().equals(readImageMd5)) { throw new IOException("Image file " + src + " is corrupt!"); } return needToSave; } /** Update the root node's attributes * @throws QuotaExceededException */ private void updateRootAttr(INode root, FSNamesystem namesystem) throws QuotaExceededException { long nsQuota = root.getNsQuota(); long dsQuota = root.getDsQuota(); FSDirectory fsDir = namesystem.dir; if (nsQuota != -1 || dsQuota != -1) { fsDir.rootDir.setQuota(nsQuota, dsQuota); } fsDir.rootDir.setModificationTime(root.getModificationTime()); fsDir.rootDir.setPermissionStatus(root.getPermissionStatus()); } private static int printProgress(long numOfFilesProcessed, long totalFiles, int percentDone) { return printProgress(numOfFilesProcessed, totalFiles, percentDone, "Loaded"); } private static int printProgress(long numOfFilesProcessed, long totalFiles, int percentDone, String message) { int newPercentDone = (int)(numOfFilesProcessed * 100 / totalFiles); if (newPercentDone > percentDone) { LOG.info(message + " " + newPercentDone + "% of the image"); } return newPercentDone; } /** * load fsimage files assuming only local names are stored * * @param imageVersion the image version * @param numFiles number of files expected to be read * @param in image input stream * @throws IOException */ private void loadLocalNameINodes(long imageVersion, long numFiles, DataInputStream in) throws IOException { assert imageVersion <= -30; assert numFiles > 0; long filesLoaded = 0; // load root if( in.readShort() != 0) { throw new IOException("First node is not root"); } FSNamesystem namesystem = getFSNamesystem(); INode root = loadINode(imageVersion, namesystem, in); // update the root's attributes updateRootAttr(root, namesystem); filesLoaded++; // load rest of the nodes directory by directory int percentDone = 0; while (filesLoaded < numFiles) { filesLoaded += loadDirectory(imageVersion, namesystem, in); percentDone = printProgress(filesLoaded, numFiles, percentDone); } if (numFiles != filesLoaded) { throw new IOException("Read unexpect number of files: " + filesLoaded); } } /** * Load all children of a directory * * @param imageVersion the image version * @param namesystem the name system * @param in * @return number of child inodes read * @throws IOException */ private int loadDirectory(long imageVersion, FSNamesystem namesystem, DataInputStream in) throws IOException { String parentPath = readString(in); FSDirectory fsDir = namesystem.dir; INode parent = fsDir.rootDir.getNode(parentPath); if (parent == null || !parent.isDirectory()) { throw new IOException("Path " + parentPath + "is not a directory."); } int numChildren = in.readInt(); for(int i=0; i<numChildren; i++) { // load single inode byte[] localName = new byte[in.readShort()]; in.readFully(localName); // read local name INode newNode = loadINode(imageVersion, namesystem, in); // read rest of inode // add to parent fsDir.addToParent(localName, (INodeDirectory)parent, newNode, false); } return numChildren; } /** * load fsimage files assuming full path names are stored * * @param imgVersion image version number * @param numFiles total number of files to load * @param in data input stream * @throws IOException if any error occurs */ private void loadFullNameINodes(long imgVersion, long numFiles, DataInputStream in) throws IOException { FSNamesystem fsNamesys = getFSNamesystem(); FSDirectory fsDir = fsNamesys.dir; byte[][] pathComponents; byte[][] parentPath = {{}}; INodeDirectory parentINode = fsDir.rootDir; int percentDone = 0; for (long i = 0; i < numFiles; i++) { percentDone = printProgress(i, numFiles, percentDone); pathComponents = readPathComponents(in); INode newNode = loadINode(imgVersion, fsNamesys, in); if (isRoot(pathComponents)) { // it is the root // update the root's attributes updateRootAttr(newNode, fsNamesys); continue; } // check if the new inode belongs to the same parent if(!isParent(pathComponents, parentPath)) { parentINode = fsDir.getParent(pathComponents); parentPath = getParent(pathComponents); } // add new inode parentINode = fsDir.addToParent(pathComponents[pathComponents.length-1], parentINode, newNode, false); } } /** * load an inode from fsimage except for its name * * @param imgVersion image version number * @param fsNamesystem namesystem * @param in data input stream from which image is read * @return an inode */ private INode loadINode(long imgVersion, FSNamesystem fsNamesys, DataInputStream in) throws IOException { long modificationTime = 0; long atime = 0; long blockSize = 0; short replication = in.readShort(); replication = editLog.adjustReplication(replication); modificationTime = in.readLong(); if (imgVersion <= -17) { atime = in.readLong(); } if (imgVersion <= -8) { blockSize = in.readLong(); } int numBlocks = in.readInt(); BlockInfo blocks[] = null; // for older versions, a blocklist of size 0 // indicates a directory. if ((-9 <= imgVersion && numBlocks > 0) || (imgVersion < -9 && numBlocks >= 0)) { blocks = new BlockInfo[numBlocks]; for (int j = 0; j < numBlocks; j++) { blocks[j] = new BlockInfo(replication); if (-14 < imgVersion) { blocks[j].set(in.readLong(), in.readLong(), Block.GRANDFATHER_GENERATION_STAMP); } else { blocks[j].readFields(in); } } } // Older versions of HDFS does not store the block size in inode. // If the file has more than one block, use the size of the // first block as the blocksize. Otherwise use the default block size. // if (-8 <= imgVersion && blockSize == 0) { if (numBlocks > 1) { blockSize = blocks[0].getNumBytes(); } else { long first = ((numBlocks == 1) ? blocks[0].getNumBytes(): 0); blockSize = Math.max(fsNamesys.getDefaultBlockSize(), first); } } // get quota only when the node is a directory long nsQuota = -1L; if (imgVersion <= -16 && blocks == null) { nsQuota = in.readLong(); } long dsQuota = -1L; if (imgVersion <= -18 && blocks == null) { dsQuota = in.readLong(); } PermissionStatus permissions = fsNamesys.getUpgradePermission(); if (imgVersion <= -11) { permissions = PermissionStatus.read(in); } return INode.newINode(permissions, blocks, replication, modificationTime, atime, nsQuota, dsQuota, blockSize); } /** * Return string representing the parent of the given path. */ String getParent(String path) { return path.substring(0, path.lastIndexOf(Path.SEPARATOR)); } byte[][] getParent(byte[][] path) { byte[][] result = new byte[path.length - 1][]; for (int i = 0; i < result.length; i++) { result[i] = new byte[path[i].length]; System.arraycopy(path[i], 0, result[i], 0, path[i].length); } return result; } private boolean isRoot(byte[][] path) { return path.length == 1 && path[0] == null; } private boolean isParent(byte[][] path, byte[][] parent) { if (path == null || parent == null) return false; if (parent.length == 0 || path.length != parent.length + 1) return false; boolean isParent = true; for (int i = 0; i < parent.length; i++) { isParent = isParent && Arrays.equals(path[i], parent[i]); } return isParent; } /** * Load and merge edits from two edits files * * @param sd storage directory * @return number of edits loaded * @throws IOException */ int loadFSEdits(StorageDirectory sd) throws IOException { int numEdits = 0; EditLogFileInputStream edits = new EditLogFileInputStream(getImageFile(sd, NameNodeFile.EDITS)); numEdits = editLog.loadFSEdits(edits); edits.close(); File editsNew = getImageFile(sd, NameNodeFile.EDITS_NEW); if (editsNew.exists() && editsNew.length() > 0) { edits = new EditLogFileInputStream(editsNew); numEdits += editLog.loadFSEdits(edits); edits.close(); } // update the counts. getFSNamesystem().dir.updateCountForINodeWithQuota(); return numEdits; } /** * Save the contents of the FS image to the file. */ void saveFSImage(File newFile) throws IOException { saveFSImage(newFile, false); } /** * Save the contents of the FS image to the file. * If forceUncompressed, the image will be saved uncompressed regardless of * the fsimage compression configuration. */ void saveFSImage(File newFile, boolean forceUncompressed) throws IOException { long startTime = FSNamesystem.now(); FileOutputStream fstream = new FileOutputStream(newFile); saveFSImage(newFile.getCanonicalPath(), fstream, forceUncompressed); LOG.info("Image file of size " + newFile.length() + " saved in " + (FSNamesystem.now() - startTime)/1000 + " seconds."); } void saveFSImage(String dest, OutputStream fstream) throws IOException { saveFSImage(dest, fstream, false); } void saveFSImage(String dest, OutputStream fstream, boolean forceUncompressed) throws IOException { FSNamesystem fsNamesys = getFSNamesystem(); FSDirectory fsDir = fsNamesys.dir; // // Write out data // MessageDigest digester = MD5Hash.getDigester(); DigestOutputStream fout = new DigestOutputStream(fstream, digester); DataOutputStream out = new DataOutputStream(fout); try { out.writeInt(FSConstants.LAYOUT_VERSION); out.writeInt(namespaceID); out.writeLong(fsDir.rootDir.numItemsInTree()); out.writeLong(fsNamesys.getGenerationStamp()); if (forceUncompressed) { out.writeBoolean(false); } else { out.writeBoolean(compressImage); } if (!forceUncompressed && compressImage) { String codecClassName = saveCodec.getClass().getCanonicalName(); Text.writeString(out, codecClassName); out = new DataOutputStream(saveCodec.createOutputStream(fout)); LOG.info("Saving image file " + dest + " compressed using codec " + codecClassName); } else { out = new DataOutputStream(new BufferedOutputStream(fout)); } byte[] byteStore = new byte[4*FSConstants.MAX_PATH_LENGTH]; ByteBuffer strbuf = ByteBuffer.wrap(byteStore); // save the root saveINode2Image(fsDir.rootDir, out); // save the rest of the nodes saveImage(strbuf, fsDir.rootDir, out, fsDir.totalInodes()); // save files under construction fsNamesys.saveFilesUnderConstruction(out); strbuf = null; out.flush(); if (fstream instanceof FileOutputStream) { ((FileOutputStream)fstream).getChannel().force(true); } } finally { out.close(); } // set md5 of the saved image this.setImageDigest(new MD5Hash(digester.digest())); } private class FSImageSaver implements Runnable { private StorageDirectory sd; private File imageFile; private List<StorageDirectory> errorSDs; private boolean forceUncompressed; FSImageSaver(StorageDirectory sd, List<StorageDirectory> errorSDs,boolean forceUncompressed) { this(sd, errorSDs, forceUncompressed, NameNodeFile.IMAGE_NEW); } FSImageSaver(StorageDirectory sd, List<StorageDirectory> errorSDs,boolean forceUncompressed, NameNodeFile type) { this.sd = sd; this.errorSDs = errorSDs; this.imageFile = getImageFile(sd, type); this.forceUncompressed = forceUncompressed; } public String toString() { return "FSImage saver for " + imageFile.getAbsolutePath(); } public void run() { try { saveCurrent(sd, forceUncompressed); } catch (IOException ex) { LOG.error("Unable to write image to " + imageFile.getAbsolutePath()); errorSDs.add(sd); } } } /** * Save the contents of the FS image * and create empty edits. */ public void saveNamespace(boolean renewCheckpointTime) throws IOException { saveNamespace(false, renewCheckpointTime); } /** * Save the contents of the FS image * and create empty edits. * If forceUncompressed, the image will be saved uncompressed regardless of * the fsimage compression configuration. */ public void saveNamespace(boolean forUncompressed, boolean renewCheckpointTime) throws IOException { // try to restore all failed edit logs here assert editLog != null : "editLog must be initialized"; attemptRestoreRemovedStorage(); List<StorageDirectory> errorSDs = Collections.synchronizedList(new ArrayList<StorageDirectory>()); editLog.close(); // close all open streams before truncating if (renewCheckpointTime) this.checkpointTime = FSNamesystem.now(); // mv current -> lastcheckpoint.tmp for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); try { moveCurrent(sd); } catch (IOException ex) { LOG.error("Unable to move current for " + sd.getRoot(), ex); processIOError(sd.getRoot()); } } // Save image into current using parallel threads for saving List<Thread> savers = new ArrayList<Thread>(); for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.IMAGE); it.hasNext();) { StorageDirectory sd = it.next(); FSImageSaver saver = new FSImageSaver(sd, errorSDs, forUncompressed); Thread saverThread = new Thread(saver, saver.toString()); savers.add(saverThread); saverThread.start(); } for (Thread saver : savers) { while (saver.isAlive()) { try { saver.join(); } catch (InterruptedException iex) { LOG.error("Caught exception while waiting for thread " + saver.getName() + " to finish. Retrying join"); } } } // -NOTE- // If NN has image-only and edits-only storage directories and fails here // the image will have the latest namespace state. // During startup the image-only directories will recover by discarding // lastcheckpoint.tmp, while // the edits-only directories will recover by falling back // to the old state contained in their lastcheckpoint.tmp. // The edits directories should be discarded during startup because their // checkpointTime is older than that of image directories. // recreate edits in current for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.EDITS); it.hasNext();) { StorageDirectory sd = it.next(); try { saveCurrent(sd, forUncompressed); } catch (IOException ex) { LOG.error("Unable to save edits for " + sd.getRoot(), ex); processIOError(sd.getRoot()); } } // mv lastcheckpoint.tmp -> previous.checkpoint for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); try { moveLastCheckpoint(sd); } catch (IOException ex) { LOG.error("Unable to move last checkpoint for " + sd.getRoot(), ex); processIOError(sd.getRoot()); } } if (!editLog.isOpen()) editLog.open(); processIOError(errorSDs); ckptState = CheckpointStates.UPLOAD_DONE; } /** * Save current image and empty journal into {@code current} directory. */ protected void saveCurrent(StorageDirectory sd, boolean forceUncompressed) throws IOException { File curDir = sd.getCurrentDir(); LOG.info("Saving image to: " + sd.getRoot().getAbsolutePath()); NameNodeDirType dirType = (NameNodeDirType) sd.getStorageDirType(); // save new image or new edits if (!curDir.exists() && !curDir.mkdir()) throw new IOException("Cannot create directory " + curDir); if (dirType.isOfType(NameNodeDirType.IMAGE)) saveFSImage(getImageFile(sd, NameNodeFile.IMAGE), forceUncompressed); if (dirType.isOfType(NameNodeDirType.EDITS)) editLog.createEditLogFile(getImageFile(sd, NameNodeFile.EDITS)); // write version and time files sd.write(); } /* * Move {@code current} to {@code lastcheckpoint.tmp} and recreate empty * {@code current}. {@code current} is moved only if it is well formatted, * that is contains VERSION file. * * @see org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory# * getLastCheckpointTmp() * * @see org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory# * getPreviousCheckpoint() */ protected void moveCurrent(StorageDirectory sd) throws IOException { File curDir = sd.getCurrentDir(); File tmpCkptDir = sd.getLastCheckpointTmp(); // mv current -> lastcheckpoint.tmp // only if current is formatted - has VERSION file if (sd.getVersionFile().exists()) { assert curDir.exists() : curDir + " directory must exist."; assert !tmpCkptDir.exists() : tmpCkptDir + " directory must not exist."; rename(curDir, tmpCkptDir); } // recreate current if (!curDir.exists() && !curDir.mkdir()) throw new IOException("Cannot create directory " + curDir); } /** * Move {@code lastcheckpoint.tmp} to {@code previous.checkpoint} * * @see org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory# * getPreviousCheckpoint() * @see org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory# * getLastCheckpointTmp() */ protected void moveLastCheckpoint(StorageDirectory sd) throws IOException { File tmpCkptDir = sd.getLastCheckpointTmp(); File prevCkptDir = sd.getPreviousCheckpoint(); // remove previous.checkpoint if (prevCkptDir.exists()) deleteDir(prevCkptDir); // rename lastcheckpoint.tmp -> previous.checkpoint if (tmpCkptDir.exists()) rename(tmpCkptDir, prevCkptDir); } /** * Generate new namespaceID. * * namespaceID is a persistent attribute of the namespace. * It is generated when the namenode is formatted and remains the same * during the life cycle of the namenode. * When a datanodes register they receive it as the registrationID, * which is checked every time the datanode is communicating with the * namenode. Datanodes that do not 'know' the namespaceID are rejected. * * @return new namespaceID */ private int newNamespaceID() { Random r = new Random(); r.setSeed(FSNamesystem.now()); int newID = 0; while(newID == 0) newID = r.nextInt(0x7FFFFFFF); // use 31 bits only return newID; } /** Create new dfs name directory. Caution: this destroys all files * in this filesystem. */ void format(StorageDirectory sd) throws IOException { sd.clearDirectory(); // create currrent dir sd.lock(); try { saveCurrent(sd, false); } finally { sd.unlock(); } LOG.info("Storage directory " + sd.getRoot() + " has been successfully formatted."); } public void format() throws IOException { this.layoutVersion = FSConstants.LAYOUT_VERSION; this.namespaceID = newNamespaceID(); this.cTime = 0L; this.checkpointTime = FSNamesystem.now(); for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); format(sd); } } /* * Save one inode's attributes to the image. */ private static void saveINode2Image(INode node, DataOutputStream out) throws IOException { byte[] name = node.getLocalNameBytes(); out.writeShort(name.length); out.write(name); FsPermission filePerm = FILE_PERM.get(); if (!node.isDirectory()) { // write file inode INodeFile fileINode = (INodeFile)node; out.writeShort(fileINode.getReplication()); out.writeLong(fileINode.getModificationTime()); out.writeLong(fileINode.getAccessTime()); out.writeLong(fileINode.getPreferredBlockSize()); Block[] blocks = fileINode.getBlocks(); out.writeInt(blocks.length); for (Block blk : blocks) blk.write(out); filePerm.fromShort(fileINode.getFsPermissionShort()); PermissionStatus.write(out, fileINode.getUserName(), fileINode.getGroupName(), filePerm); } else { // write directory inode out.writeShort(0); // replication out.writeLong(node.getModificationTime()); out.writeLong(0); // access time out.writeLong(0); // preferred block size out.writeInt(-1); // # of blocks out.writeLong(node.getNsQuota()); out.writeLong(node.getDsQuota()); filePerm.fromShort(node.getFsPermissionShort()); PermissionStatus.write(out, node.getUserName(), node.getGroupName(), filePerm); } } /** * Save file tree image starting from the given root. * This is a recursive procedure, which first saves all children of * a current directory and then moves inside the sub-directories. */ private static void saveImage(ByteBuffer currentDirName, INodeDirectory current, DataOutputStream out, long inodesTotal) throws IOException { saveImage(currentDirName, current, out, inodesTotal, 0); } private static long saveImage(ByteBuffer currentDirName, INodeDirectory current, DataOutputStream out, long inodesTotal, long inodesProcessed) throws IOException { List<INode> children = current.getChildrenRaw(); if (children == null || children.isEmpty()) // empty directory return inodesProcessed; // print prefix (parent directory name) int prefixLen = currentDirName.position(); if (prefixLen == 0) { // root out.writeShort(PATH_SEPARATOR.length); out.write(PATH_SEPARATOR); } else { // non-root directories out.writeShort(prefixLen); out.write(currentDirName.array(), 0, prefixLen); } // print all children first out.writeInt(children.size()); int percentDone = (int)(inodesProcessed * 100 / inodesTotal); for(INode child : children) { percentDone = printProgress(++inodesProcessed, inodesTotal, percentDone, "Saved"); saveINode2Image(child, out); } // print sub-directories for(INode child : children) { if(!child.isDirectory()) continue; currentDirName.put(PATH_SEPARATOR).put(child.getLocalNameBytes()); inodesProcessed = saveImage(currentDirName, (INodeDirectory)child, out, inodesTotal, inodesProcessed); currentDirName.position(prefixLen); } return inodesProcessed; } void loadDatanodes(int version, DataInputStream in) throws IOException { if (version > -3) // pre datanode image version return; if (version <= -12) { return; // new versions do not store the datanodes any more. } int size = in.readInt(); for(int i = 0; i < size; i++) { DatanodeImage nodeImage = new DatanodeImage(); nodeImage.readFields(in); // We don't need to add these descriptors any more. } } private void loadFilesUnderConstruction(int version, DataInputStream in, FSNamesystem fs) throws IOException { FSDirectory fsDir = fs.dir; if (version > -13) // pre lease image version return; int size = in.readInt(); LOG.info("Number of files under construction = " + size); for (int i = 0; i < size; i++) { INodeFileUnderConstruction cons = readINodeUnderConstruction(in); // verify that file exists in namespace String path = cons.getLocalName(); INode old = fsDir.getFileINode(path); if (old == null) { throw new IOException("Found lease for non-existent file " + path); } if (old.isDirectory()) { throw new IOException("Found lease for directory " + path); } INodeFile oldnode = (INodeFile) old; fsDir.replaceNode(path, oldnode, cons); fs.leaseManager.addLease(cons.clientName, path); } } // Helper function that reads in an INodeUnderConstruction // from the input stream // static INodeFileUnderConstruction readINodeUnderConstruction( DataInputStream in) throws IOException { byte[] name = readBytes(in); short blockReplication = in.readShort(); long modificationTime = in.readLong(); long preferredBlockSize = in.readLong(); int numBlocks = in.readInt(); BlockInfo[] blocks = new BlockInfo[numBlocks]; Block blk = new Block(); for (int i = 0; i < numBlocks; i++) { blk.readFields(in); blocks[i] = new BlockInfo(blk, blockReplication); } PermissionStatus perm = PermissionStatus.read(in); String clientName = readString(in); String clientMachine = readString(in); // These locations are not used at all int numLocs = in.readInt(); DatanodeDescriptor[] locations = new DatanodeDescriptor[numLocs]; for (int i = 0; i < numLocs; i++) { locations[i] = new DatanodeDescriptor(); locations[i].readFields(in); } return new INodeFileUnderConstruction(name, blockReplication, modificationTime, preferredBlockSize, blocks, perm, clientName, clientMachine, null); } // Helper function that writes an INodeUnderConstruction // into the input stream // static void writeINodeUnderConstruction(DataOutputStream out, INodeFileUnderConstruction cons, String path) throws IOException { writeString(path, out); out.writeShort(cons.getReplication()); out.writeLong(cons.getModificationTime()); out.writeLong(cons.getPreferredBlockSize()); int nrBlocks = cons.getBlocks().length; out.writeInt(nrBlocks); for (int i = 0; i < nrBlocks; i++) { cons.getBlocks()[i].write(out); } cons.getPermissionStatus().write(out); writeString(cons.getClientName(), out); writeString(cons.getClientMachine(), out); out.writeInt(0); // do not store locations of last block } /** * Moves fsimage.ckpt to fsImage and edits.new to edits * Reopens the new edits file. * * @param newImageSignature the signature of the new image */ void rollFSImage(CheckpointSignature newImageSignature) throws IOException { MD5Hash newImageDigest = newImageSignature.getImageDigest(); if (!newImageDigest.equals(checkpointImageDigest)) { throw new IOException( "Checkpoint image is corrupt: expecting an MD5 checksum of" + newImageDigest + " but is " + checkpointImageDigest); } rollFSImage(newImageSignature.getImageDigest()); } private void rollFSImage(MD5Hash newImageDigest) throws IOException { if (ckptState != CheckpointStates.UPLOAD_DONE) { throw new IOException("Cannot roll fsImage before rolling edits log."); } // // First, verify that edits.new and fsimage.ckpt exists in all // checkpoint directories. // if (!editLog.existsNew()) { throw new IOException("New Edits file does not exist"); } for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.IMAGE); it.hasNext();) { StorageDirectory sd = it.next(); File ckpt = getImageFile(sd, NameNodeFile.IMAGE_NEW); if (!ckpt.exists()) { throw new IOException("Checkpoint file " + ckpt + " does not exist"); } } editLog.purgeEditLog(); // renamed edits.new to edits LOG.debug("rollFSImage after purgeEditLog: storageList=" + listStorageDirectories()); // // Renames new image // for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.IMAGE); it.hasNext();) { StorageDirectory sd = it.next(); File ckpt = getImageFile(sd, NameNodeFile.IMAGE_NEW); File curFile = getImageFile(sd, NameNodeFile.IMAGE); // renameTo fails on Windows if the destination file // already exists. LOG.debug("renaming " + ckpt.getAbsolutePath() + " to " + curFile.getAbsolutePath()); if (!ckpt.renameTo(curFile)) { curFile.delete(); if (!ckpt.renameTo(curFile)) { LOG.warn("renaming " + ckpt.getAbsolutePath() + " to " + curFile.getAbsolutePath() + " FAILED"); // Close edit stream, if this directory is also used for edits if (sd.getStorageDirType().isOfType(NameNodeDirType.EDITS)) editLog.processIOError(sd); // add storage to the removed list removedStorageDirs.add(sd); it.remove(); } } } // // Updates the fstime file on all directories (fsimage and edits) // and write version file // this.layoutVersion = FSConstants.LAYOUT_VERSION; this.checkpointTime = FSNamesystem.now(); this.setImageDigest(newImageDigest); for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) { StorageDirectory sd = it.next(); // delete old edits if sd is the image only the directory if (!sd.getStorageDirType().isOfType(NameNodeDirType.EDITS)) { File editsFile = getImageFile(sd, NameNodeFile.EDITS); editsFile.delete(); } // delete old fsimage if sd is the edits only the directory if (!sd.getStorageDirType().isOfType(NameNodeDirType.IMAGE)) { File imageFile = getImageFile(sd, NameNodeFile.IMAGE); imageFile.delete(); } try { sd.write(); } catch (IOException e) { LOG.error("Cannot write file " + sd.getRoot(), e); // Close edit stream, if this directory is also used for edits if (sd.getStorageDirType().isOfType(NameNodeDirType.EDITS)) editLog.processIOError(sd); //add storage to the removed list removedStorageDirs.add(sd); it.remove(); } } ckptState = FSImage.CheckpointStates.START; } CheckpointSignature rollEditLog() throws IOException { getEditLog().rollEditLog(); ckptState = CheckpointStates.ROLLED_EDITS; return new CheckpointSignature(this); } /** * This is called just before a new checkpoint is uploaded to the * namenode. */ void validateCheckpointUpload(CheckpointSignature sig) throws IOException { if (ckptState != CheckpointStates.ROLLED_EDITS) { throw new IOException("Namenode is not expecting an new image " + ckptState); } // verify token long modtime = getEditLog().getFsEditTime(); if (sig.editsTime != modtime) { throw new IOException("Namenode has an edit log with timestamp of " + DATE_FORM.format(new Date(modtime)) + " but new checkpoint was created using editlog " + " with timestamp " + DATE_FORM.format(new Date(sig.editsTime)) + ". Checkpoint Aborted."); } sig.validateStorageInfo(this); ckptState = FSImage.CheckpointStates.UPLOAD_START; } /** * This is called when a checkpoint upload finishes successfully. */ synchronized void checkpointUploadDone(MD5Hash checkpointImageMd5) { checkpointImageDigest = checkpointImageMd5; ckptState = CheckpointStates.UPLOAD_DONE; } void close() throws IOException { getEditLog().close(true); unlockAll(); } /** * Return the name of the image file. */ File getFsImageName() { StorageDirectory sd = null; for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.IMAGE); it.hasNext();) { sd = it.next(); File fsImage = getImageFile(sd, NameNodeFile.IMAGE); if (sd.getRoot().canRead() && fsImage.exists()) { return fsImage; } } return null; } /** * See if any of removed storages iw "writable" again, and can be returned * into service */ void attemptRestoreRemovedStorage() { // if directory is "alive" - copy the images there... if(!restoreFailedStorage || removedStorageDirs.size() == 0) return; //nothing to restore LOG.info("FSImage.attemptRestoreRemovedStorage: check removed(failed) " + "storarge. removedStorages size = " + removedStorageDirs.size()); for(Iterator<StorageDirectory> it = this.removedStorageDirs.iterator(); it.hasNext();) { StorageDirectory sd = it.next(); File root = sd.getRoot(); LOG.info("currently disabled dir " + root.getAbsolutePath() + "; type="+sd.getStorageDirType() + ";canwrite="+root.canWrite()); try { if(root.exists() && root.canWrite()) { // when we try to restore we just need to remove all the data // without saving current in-memory state (which could've changed). sd.clearDirectory(); LOG.info("restoring dir " + sd.getRoot().getAbsolutePath()); this.addStorageDir(sd); // restore it.remove(); } } catch(IOException e) { LOG.warn("failed to restore " + sd.getRoot().getAbsolutePath(), e); } } } public File getFsEditName() throws IOException { return getEditLog().getFsEditName(); } File getFsTimeName() { StorageDirectory sd = null; // NameNodeFile.TIME shoul be same on all directories for (Iterator<StorageDirectory> it = dirIterator(); it.hasNext();) sd = it.next(); return getImageFile(sd, NameNodeFile.TIME); } /** * Return the name of the image file that is uploaded by periodic * checkpointing. */ File[] getFsImageNameCheckpoint() { ArrayList<File> list = new ArrayList<File>(); for (Iterator<StorageDirectory> it = dirIterator(NameNodeDirType.IMAGE); it.hasNext();) { list.add(getImageFile(it.next(), NameNodeFile.IMAGE_NEW)); } return list.toArray(new File[list.size()]); } /** * DatanodeImage is used to store persistent information * about datanodes into the fsImage. */ static class DatanodeImage implements Writable { DatanodeDescriptor node = new DatanodeDescriptor(); ///////////////////////////////////////////////// // Writable ///////////////////////////////////////////////// /** * Public method that serializes the information about a * Datanode to be stored in the fsImage. */ public void write(DataOutput out) throws IOException { new DatanodeID(node).write(out); out.writeLong(node.getCapacity()); out.writeLong(node.getRemaining()); out.writeLong(node.getLastUpdate()); out.writeInt(node.getXceiverCount()); } /** * Public method that reads a serialized Datanode * from the fsImage. */ public void readFields(DataInput in) throws IOException { DatanodeID id = new DatanodeID(); id.readFields(in); long capacity = in.readLong(); long remaining = in.readLong(); long lastUpdate = in.readLong(); int xceiverCount = in.readInt(); // update the DatanodeDescriptor with the data we read in node.updateRegInfo(id); node.setStorageID(id.getStorageID()); node.setCapacity(capacity); node.setRemaining(remaining); node.setLastUpdate(lastUpdate); node.setXceiverCount(xceiverCount); } } protected void corruptPreUpgradeStorage(File rootDir) throws IOException { File oldImageDir = new File(rootDir, "image"); if (!oldImageDir.exists()) if (!oldImageDir.mkdir()) throw new IOException("Cannot create directory " + oldImageDir); File oldImage = new File(oldImageDir, "fsimage"); if (!oldImage.exists()) // recreate old image file to let pre-upgrade versions fail if (!oldImage.createNewFile()) throw new IOException("Cannot create file " + oldImage); RandomAccessFile oldFile = new RandomAccessFile(oldImage, "rws"); // write new version into old image file try { writeCorruptedData(oldFile); } finally { oldFile.close(); } } private boolean getDistributedUpgradeState() { FSNamesystem ns = getFSNamesystem(); return ns == null ? false : ns.getDistributedUpgradeState(); } private int getDistributedUpgradeVersion() { FSNamesystem ns = getFSNamesystem(); return ns == null ? 0 : ns.getDistributedUpgradeVersion(); } private void setDistributedUpgradeState(boolean uState, int uVersion) { getFSNamesystem().upgradeManager.setUpgradeState(uState, uVersion); } private void verifyDistributedUpgradeProgress(StartupOption startOpt ) throws IOException { if(startOpt == StartupOption.ROLLBACK || startOpt == StartupOption.IMPORT) return; UpgradeManager um = getFSNamesystem().upgradeManager; assert um != null : "FSNameSystem.upgradeManager is null."; if(startOpt != StartupOption.UPGRADE) { if(um.getUpgradeState()) throw new IOException( "\n Previous distributed upgrade was not completed. " + "\n Please restart NameNode with -upgrade option."); if(um.getDistributedUpgrades() != null) throw new IOException("\n Distributed upgrade for NameNode version " + um.getUpgradeVersion() + " to current LV " + FSConstants.LAYOUT_VERSION + " is required.\n Please restart NameNode with -upgrade option."); } } private void initializeDistributedUpgrade() throws IOException { UpgradeManagerNamenode um = getFSNamesystem().upgradeManager; if(! um.initializeUpgrade()) return; // write new upgrade state into disk writeAll(); NameNode.LOG.info("\n Distributed upgrade for NameNode version " + um.getUpgradeVersion() + " to current LV " + FSConstants.LAYOUT_VERSION + " is initialized."); } static Collection<File> getCheckpointDirs(Configuration conf, String defaultName) { Collection<String> dirNames = conf.getStringCollection("fs.checkpoint.dir"); if (dirNames.size() == 0 && defaultName != null) { dirNames.add(defaultName); } Collection<File> dirs = new ArrayList<File>(dirNames.size()); for(String name : dirNames) { dirs.add(new File(name)); } return dirs; } static Collection<File> getCheckpointEditsDirs(Configuration conf, String defaultName) { Collection<String> dirNames = conf.getStringCollection("fs.checkpoint.edits.dir"); if (dirNames.size() == 0 && defaultName != null) { dirNames.add(defaultName); } Collection<File> dirs = new ArrayList<File>(dirNames.size()); for(String name : dirNames) { dirs.add(new File(name)); } return dirs; } public static String readString(DataInputStream in) throws IOException { U_STR.get().readFields(in); return U_STR.get().toString(); } /** * Reading the path from the image and converting it to byte[][] directly this * saves us an array copy and conversions to and from String * * @param in * @return the array each element of which is a byte[] representation of a * path component * @throws IOException */ public static byte[][] readPathComponents(DataInputStream in) throws IOException { U_STR.get().readFields(in); return DFSUtil.bytes2byteArray(U_STR.get().getBytes(), U_STR.get().getLength(), (byte) Path.SEPARATOR_CHAR); } static String readString_EmptyAsNull(DataInputStream in) throws IOException { final String s = readString(in); return s.isEmpty()? null: s; } public static byte[] readBytes(DataInputStream in) throws IOException { U_STR.get().readFields(in); int len = U_STR.get().getLength(); byte[] bytes = new byte[len]; System.arraycopy(U_STR.get().getBytes(), 0, bytes, 0, len); return bytes; } static void writeString(String str, DataOutputStream out) throws IOException { U_STR.get().set(str); U_STR.get().write(out); } }
{ "content_hash": "dc45051141e604a4f45491410af66623", "timestamp": "", "source": "github", "line_count": 2339, "max_line_length": 117, "avg_line_length": 35.5143223599829, "alnum_prop": 0.6353108296817066, "repo_name": "rvadali/fb-raid-refactoring", "id": "8254bd21674b1af4f1347cd155db26960cff379c", "size": "83874", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/hdfs/org/apache/hadoop/hdfs/server/namenode/FSImage.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "451987" }, { "name": "C++", "bytes": "419590" }, { "name": "Java", "bytes": "17173220" }, { "name": "JavaScript", "bytes": "10513" }, { "name": "Objective-C", "bytes": "118273" }, { "name": "PHP", "bytes": "152555" }, { "name": "Perl", "bytes": "140392" }, { "name": "Python", "bytes": "621619" }, { "name": "Ruby", "bytes": "28485" }, { "name": "Shell", "bytes": "4077329" }, { "name": "Smalltalk", "bytes": "56562" } ], "symlink_target": "" }
layout: default --- <section class="collection-head small geopattern" data-pattern-id="{{ page.title | truncate: 15}}"> <div class="container"> <div class="collection-title"> <h1 class="collection-header">{{ page.title }}</h1> {% if page.subtitle %} <div class="collection-info"> <span class="meta-info"> {{ page.subtitle}} </span> </div> {% endif %} </div> </div> </section> <!-- / .banner --> <section class="container content"> <div class="columns"> <div class="column two-thirds" > <article class="article-content markdown-body"> {{ content }} </article> <div class="comment"> {% include comments.html %} </div> </div> <div class="column one-third"> {% include sidebar-search.html %} <!-- {% include sidebar-popular-repo.html %} --> </div> </div> </section> <!-- /section.content -->
{ "content_hash": "8dbf314bb05fb465bc1f3ac20ec82794", "timestamp": "", "source": "github", "line_count": 35, "max_line_length": 99, "avg_line_length": 32.94285714285714, "alnum_prop": 0.44752818733738076, "repo_name": "jssx100/jssx100.github.io", "id": "a56f76458a6c3812f7c9737a16f348596222e686", "size": "1157", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "_layouts/page.html", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "23262" }, { "name": "HTML", "bytes": "22236" }, { "name": "JavaScript", "bytes": "8727" }, { "name": "Ruby", "bytes": "73" } ], "symlink_target": "" }
/* * A simple server to demonstrate TCP sockets implementation. The server * takes input from server and shows dump of the data. */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #define DATA 50 #define S_PORT 7890 void dump(char *, const unsigned int); int main(void) { /* server socket variables */ int sockfd, new_sockfd; struct sockaddr_in host_addr, client_addr; socklen_t sin_size = sizeof(struct sockaddr_in); int recv_length = 1, yes = 1; /* data holding */ char buffer[1024]; /* defining listening socket descriptor */ if ((sockfd = socket(AF_INET, SOCK_STREAM, 0)) == -1) perror("in socket"); if (setsockopt(sockfd, SOL_SOCKET, SO_REUSEADDR, &yes, sizeof(int)) == -1) perror("setting socket option SO_REUSEADDR"); /* initializing server address socket structure */ host_addr.sin_family = AF_INET; /* host byte order */ host_addr.sin_port = htons(S_PORT); /* port, network byte order */ host_addr.sin_addr.s_addr = 0; /* use host address */ /* zero the rest of struct */ memset(&(host_addr.sin_zero), '\0', sizeof(host_addr.sin_zero)); /* bind socket to IP address */ if (bind(sockfd, (struct sockaddr *)&host_addr, sizeof(struct sockaddr)) == -1) perror("binding to socket"); /* listen on socket */ if (listen(sockfd, 5) == -1) perror("listening on socket"); /* main procedure */ while (1) { new_sockfd = accept(sockfd, (struct sockaddr *) &client_addr, &sin_size); if (new_sockfd == -1) perror("accepting connection"); printf("server: got connection from %s port %d\n", inet_ntoa(client_addr.sin_addr), ntohs(client_addr.sin_port)); recv_length = recv(new_sockfd, &buffer, DATA, 0); while (recv_length > 0) { printf("RECV: %d bytes\n", recv_length); dump(buffer, recv_length); recv_length = recv(new_sockfd, &buffer, DATA, 0); } close(new_sockfd); } return 0; } /* dumps raw memory in hex byte and printable split format */ void dump(char *data_buffer, const unsigned int length) { unsigned char byte; unsigned int i, j; for (i = 0; i < length; i++) { byte = data_buffer[i]; printf("%02x ", data_buffer[i]); /* displays byte in hex */ if (((i % 16) == 15) || (i == length - 1)) { for (j = 0; j < 15 - (i % 16); j++) printf(" "); printf("| "); /* display printable bytes from line */ for (j = (i - (i % 16)); j <= i; j++) { byte = data_buffer[j]; /* outside printable char range */ if ((byte > 31) && (byte < 127)) printf("%c", byte); else printf("."); } /* end of the dump line (each line 16 bytes) */ printf("\n"); } } }
{ "content_hash": "96c3e193cc3fed19cfd8d5e017e9120c", "timestamp": "", "source": "github", "line_count": 103, "max_line_length": 73, "avg_line_length": 26.29126213592233, "alnum_prop": 0.619645494830133, "repo_name": "jarrocha/sockets", "id": "802529448de41ac5f53adf21dabf9fd9c09e5d33", "size": "2708", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "tcp_server/simple_server.c", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "8179" }, { "name": "Makefile", "bytes": "558" } ], "symlink_target": "" }
package db import ( "database/sql" "encoding/json" "errors" "fmt" "strings" ) const teamContainerJoins = containerJoins + "\nLEFT JOIN teams t ON c.team_id = t.id" func (db *teamDB) FindContainersByDescriptors(id Container) ([]SavedContainer, error) { err := db.deleteExpiredContainers() if err != nil { return nil, err } var whereCriteria []string var params []interface{} if id.ResourceName != "" { whereCriteria = append(whereCriteria, fmt.Sprintf("r.name = $%d", len(params)+1)) params = append(params, id.ResourceName) } if id.StepName != "" { whereCriteria = append(whereCriteria, fmt.Sprintf("c.step_name = $%d", len(params)+1)) params = append(params, id.StepName) } if id.JobName != "" { whereCriteria = append(whereCriteria, fmt.Sprintf("j.name = $%d", len(params)+1)) params = append(params, id.JobName) } if id.PipelineName != "" { whereCriteria = append(whereCriteria, fmt.Sprintf("p.name = $%d", len(params)+1)) params = append(params, id.PipelineName) } if id.BuildID != 0 { whereCriteria = append(whereCriteria, fmt.Sprintf("build_id = $%d", len(params)+1)) params = append(params, id.BuildID) } if id.Type != "" { whereCriteria = append(whereCriteria, fmt.Sprintf("type = $%d", len(params)+1)) params = append(params, id.Type.String()) } if id.WorkerName != "" { whereCriteria = append(whereCriteria, fmt.Sprintf("worker_name = $%d", len(params)+1)) params = append(params, id.WorkerName) } if id.CheckType != "" { whereCriteria = append(whereCriteria, fmt.Sprintf("check_type = $%d", len(params)+1)) params = append(params, id.CheckType) } if id.BuildName != "" { whereCriteria = append(whereCriteria, fmt.Sprintf("b.name = $%d", len(params)+1)) params = append(params, id.BuildName) } var checkSourceBlob []byte if id.CheckSource != nil { checkSourceBlob, err = json.Marshal(id.CheckSource) if err != nil { return nil, err } whereCriteria = append(whereCriteria, fmt.Sprintf("check_source = $%d", len(params)+1)) params = append(params, checkSourceBlob) } if len(id.Attempts) > 0 { attemptsBlob, err := json.Marshal(id.Attempts) if err != nil { return nil, err } whereCriteria = append(whereCriteria, fmt.Sprintf("attempts = $%d", len(params)+1)) params = append(params, attemptsBlob) } var rows *sql.Rows team, found, err := db.GetTeam() if err != nil { return nil, err } if !found { return nil, errors.New("team-not-found") } selectQuery := fmt.Sprintf(` SELECT `+containerColumns+` FROM containers c `+teamContainerJoins+` WHERE c.team_id = %d `, team.ID) if len(whereCriteria) > 0 { selectQuery += fmt.Sprintf(" AND %s", strings.Join(whereCriteria, " AND ")) } rows, err = db.conn.Query(selectQuery, params...) if err != nil { return nil, err } defer rows.Close() infos := []SavedContainer{} for rows.Next() { info, err := scanContainer(rows) if err != nil { return nil, err } infos = append(infos, info) } return infos, nil } func (db *teamDB) GetContainer(handle string) (SavedContainer, bool, error) { err := db.deleteExpiredContainers() if err != nil { return SavedContainer{}, false, err } team, found, err := db.GetTeam() if err != nil { return SavedContainer{}, false, err } if !found { return SavedContainer{}, false, errors.New("team-not-found") } container, err := scanContainer(db.conn.QueryRow(fmt.Sprintf(` SELECT `+containerColumns+` FROM containers c `+teamContainerJoins+` WHERE c.handle = $1 AND c.team_id = %d `, team.ID), handle)) if err != nil { if err == sql.ErrNoRows { return SavedContainer{}, false, nil } return SavedContainer{}, false, err } return container, true, nil } func (db *teamDB) deleteExpiredContainers() error { _, err := db.conn.Exec(` DELETE FROM containers WHERE expires_at IS NOT NULL AND expires_at < NOW() `) if err != nil { return err } return nil }
{ "content_hash": "77f9c4a39c8c1d1348518fef45911d0d", "timestamp": "", "source": "github", "line_count": 172, "max_line_length": 89, "avg_line_length": 22.930232558139537, "alnum_prop": 0.6592292089249493, "repo_name": "homedepot/github-webhook", "id": "4ca3ed56181e65e29d4543b116e8a3a09951fbbc", "size": "3944", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "vendor/github.com/concourse/atc/db/team_db_containers.go", "mode": "33261", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "32379" }, { "name": "JavaScript", "bytes": "1033" }, { "name": "Ruby", "bytes": "3027" }, { "name": "Shell", "bytes": "20114" } ], "symlink_target": "" }
echo "Listing available backups" echo "-------------------------" ls /backups/
{ "content_hash": "0bc4dfc953decf27df300b2e5d78d8b4", "timestamp": "", "source": "github", "line_count": 3, "max_line_length": 32, "avg_line_length": 26.333333333333332, "alnum_prop": 0.5063291139240507, "repo_name": "ameistad/django-template", "id": "c88614b12f61125aecbad19bed1269e2e7cc7563", "size": "91", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "{{cookiecutter.repo_name}}/docker/postgres/list-backups.sh", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "166" }, { "name": "Python", "bytes": "11223" }, { "name": "Shell", "bytes": "3329" } ], "symlink_target": "" }
package com.github.sunnysuperman.pim.client; import io.netty.channel.ChannelHandlerContext; import com.github.sunnysuperman.pim.protocol.ClientID; import com.github.sunnysuperman.pim.protocol.Packet; public interface ClientPacketRouter { public static final int ROUTE_NONE = 0; public static final int ROUTE_LOCAL = (1 << 0); public static final int ROUTE_CLUSTER = (1 << 1); public static final int ROUTE_GLOBAL = (1 << 2); int route(Packet packet, ClientID clientID); int route(Packet packet, ClientID clientID, int routeType); boolean write(Packet packet, ChannelHandlerContext channel); int getCompressThreshold(); void setCompressThreshold(int compressThreshold); }
{ "content_hash": "ed04dd495b740256c69f4fb35d46d1cd", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 64, "avg_line_length": 29.833333333333332, "alnum_prop": 0.7486033519553073, "repo_name": "sunnysuperman/pim-server", "id": "2c3e8d82145e69d331e5a8ee48a3c2040c500c61", "size": "716", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/com/github/sunnysuperman/pim/client/ClientPacketRouter.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "204041" } ], "symlink_target": "" }
package org.openapitools.server.api.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import org.openapitools.server.api.model.GithubRepositories; import org.openapitools.server.api.model.GithubRespositoryContainerlinks; @JsonInclude(JsonInclude.Include.NON_NULL) public class GithubRespositoryContainer { private String propertyClass; private GithubRespositoryContainerlinks links; private GithubRepositories repositories; public GithubRespositoryContainer () { } public GithubRespositoryContainer (String propertyClass, GithubRespositoryContainerlinks links, GithubRepositories repositories) { this.propertyClass = propertyClass; this.links = links; this.repositories = repositories; } @JsonProperty("_class") public String getPropertyClass() { return propertyClass; } public void setPropertyClass(String propertyClass) { this.propertyClass = propertyClass; } @JsonProperty("_links") public GithubRespositoryContainerlinks getLinks() { return links; } public void setLinks(GithubRespositoryContainerlinks links) { this.links = links; } @JsonProperty("repositories") public GithubRepositories getRepositories() { return repositories; } public void setRepositories(GithubRepositories repositories) { this.repositories = repositories; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } GithubRespositoryContainer githubRespositoryContainer = (GithubRespositoryContainer) o; return Objects.equals(propertyClass, githubRespositoryContainer.propertyClass) && Objects.equals(links, githubRespositoryContainer.links) && Objects.equals(repositories, githubRespositoryContainer.repositories); } @Override public int hashCode() { return Objects.hash(propertyClass, links, repositories); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class GithubRespositoryContainer {\n"); sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n"); sb.append(" links: ").append(toIndentedString(links)).append("\n"); sb.append(" repositories: ").append(toIndentedString(repositories)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
{ "content_hash": "c6d6e9248f6d7e4d2835fbd07a9bbd9b", "timestamp": "", "source": "github", "line_count": 95, "max_line_length": 132, "avg_line_length": 29.03157894736842, "alnum_prop": 0.7179115300942712, "repo_name": "cliffano/swaggy-jenkins", "id": "c5e732720f1e5aa8460645d57bd3d8025fd41f03", "size": "2758", "binary": false, "copies": "1", "ref": "refs/heads/main", "path": "clients/java-vertx/generated/src/main/java/org/openapitools/server/api/model/GithubRespositoryContainer.java", "mode": "33188", "license": "mit", "language": [ { "name": "Ada", "bytes": "569823" }, { "name": "Apex", "bytes": "741346" }, { "name": "Batchfile", "bytes": "14792" }, { "name": "C", "bytes": "971274" }, { "name": "C#", "bytes": "5131336" }, { "name": "C++", "bytes": "7799032" }, { "name": "CMake", "bytes": "20609" }, { "name": "CSS", "bytes": "4873" }, { "name": "Clojure", "bytes": "129018" }, { "name": "Crystal", "bytes": "864941" }, { "name": "Dart", "bytes": "876777" }, { "name": "Dockerfile", "bytes": "7385" }, { "name": "Eiffel", "bytes": "424642" }, { "name": "Elixir", "bytes": "139252" }, { "name": "Elm", "bytes": "187067" }, { "name": "Emacs Lisp", "bytes": "191" }, { "name": "Erlang", "bytes": "373074" }, { "name": "F#", "bytes": "556012" }, { "name": "Gherkin", "bytes": "951" }, { "name": "Go", "bytes": "345227" }, { "name": "Groovy", "bytes": "89524" }, { "name": "HTML", "bytes": "2367424" }, { "name": "Haskell", "bytes": "680841" }, { "name": "Java", "bytes": "12164874" }, { "name": "JavaScript", "bytes": "1959006" }, { "name": "Kotlin", "bytes": "1280953" }, { "name": "Lua", "bytes": "322316" }, { "name": "Makefile", "bytes": "11882" }, { "name": "Nim", "bytes": "65818" }, { "name": "OCaml", "bytes": "94665" }, { "name": "Objective-C", "bytes": "464903" }, { "name": "PHP", "bytes": "4383673" }, { "name": "Perl", "bytes": "743304" }, { "name": "PowerShell", "bytes": "678274" }, { "name": "Python", "bytes": "5529523" }, { "name": "QMake", "bytes": "6915" }, { "name": "R", "bytes": "840841" }, { "name": "Raku", "bytes": "10945" }, { "name": "Ruby", "bytes": "328360" }, { "name": "Rust", "bytes": "1735375" }, { "name": "Scala", "bytes": "1387368" }, { "name": "Shell", "bytes": "407167" }, { "name": "Swift", "bytes": "342562" }, { "name": "TypeScript", "bytes": "3060093" } ], "symlink_target": "" }
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. #pragma once #include <vespa/searchcommon/attribute/i_multi_value_read_view.h> #include <vespa/vespalib/stllike/allocator.h> namespace search::attribute { /** * Read view for the data stored in an extendable multi-value string * array attribute vector (used by streaming visitor) that handles * optional addition of weight. * @tparam MultiValueType The multi-value type of the data to access. */ template <typename MultiValueType> class ExtendableStringArrayMultiValueReadView : public attribute::IMultiValueReadView<MultiValueType> { using Offsets = std::vector<uint32_t, vespalib::allocator_large<uint32_t>>; const std::vector<char>& _buffer; const Offsets & _offsets; const std::vector<uint32_t>& _idx; mutable std::vector<MultiValueType> _copy; public: ExtendableStringArrayMultiValueReadView(const std::vector<char>& buffer, const Offsets & offsets, const std::vector<uint32_t>& idx); ~ExtendableStringArrayMultiValueReadView() override; vespalib::ConstArrayRef<MultiValueType> get_values(uint32_t doc_id) const override; }; }
{ "content_hash": "55a2f52cee953ad7d8892d3f5b837d35", "timestamp": "", "source": "github", "line_count": 30, "max_line_length": 136, "avg_line_length": 40.56666666666667, "alnum_prop": 0.7378800328677074, "repo_name": "vespa-engine/vespa", "id": "d83398b5568d78d6fcf74546c52e5314fb00bf19", "size": "1217", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "searchlib/src/vespa/searchlib/attribute/extendable_string_array_multi_value_read_view.h", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ANTLR", "bytes": "8130" }, { "name": "C", "bytes": "60315" }, { "name": "C++", "bytes": "29580035" }, { "name": "CMake", "bytes": "593981" }, { "name": "Emacs Lisp", "bytes": "91" }, { "name": "GAP", "bytes": "3312" }, { "name": "Go", "bytes": "560664" }, { "name": "HTML", "bytes": "54520" }, { "name": "Java", "bytes": "40814190" }, { "name": "JavaScript", "bytes": "73436" }, { "name": "LLVM", "bytes": "6152" }, { "name": "Lex", "bytes": "11499" }, { "name": "Makefile", "bytes": "5553" }, { "name": "Objective-C", "bytes": "12369" }, { "name": "Perl", "bytes": "23134" }, { "name": "Python", "bytes": "52392" }, { "name": "Roff", "bytes": "17506" }, { "name": "Ruby", "bytes": "10690" }, { "name": "Shell", "bytes": "268737" }, { "name": "Yacc", "bytes": "14735" } ], "symlink_target": "" }
import matplotlib from getdist import plots, MCSamples import getdist import numpy as np import matplotlib.pyplot as plt import astropy from loadMontePython import load as loadMCMC import glob basedir = '../chains/nonzero_model/' #"/home/zequnl/Projects/isocurvature_2017/analysis/plot_triangle/nonzero/" burnin = 1000 data1 = loadMCMC('../chains/planckdata/r1.txt', '../chains/planckdata/param') data2 = loadMCMC('../chains/planckdata/r2.txt', '../chains/planckdata/param') data = astropy.table.vstack( [data1[burnin:], data2[burnin:]]) data_planck = data[:] weights_planck = data['acceptance'][:] for col in ['likelihood', 'acceptance','omega_b','omega_cdm','100theta_s','tau_reio']: data.remove_column(col) nparr_planck = np.array(data.as_array().tolist()[:]) planck = MCSamples(samples=nparr_planck,names = data.colnames, labels = data.colnames, name_tag='Planck') ## C folder = basedir + 'fC/' files = glob.glob(folder + "*__*.txt") params = glob.glob(folder + "*_.paramnames") datalist = [] for f in files: datalist.append( loadMCMC(f, params[0]) ) data = astropy.table.vstack( datalist ) data_sim = data[:] weights_act = data['acceptance'][:] for col in ['likelihood', 'acceptance','omega_b','omega_cdm','100theta_s','tau_reio']: data.remove_column(col) nparr_act = np.array(data.as_array().tolist()[:]) planck_s4 = MCSamples(samples=nparr_act,names = data.colnames, labels = data.colnames, name_tag='Planck low_l + S4') ## E folder = basedir + 'fE/' files = glob.glob(folder + "*__*.txt") params = glob.glob(folder + "*_.paramnames") datalist = [] for f in files: datalist.append( loadMCMC(f, params[0]) ) data = astropy.table.vstack( datalist ) data_sim = data[:] weights_act = data['acceptance'][:] for col in ['likelihood', 'acceptance','omega_b','omega_cdm','100theta_s','tau_reio']: data.remove_column(col) nparr_act = np.array(data.as_array().tolist()[:]) pixie_planck = MCSamples(samples=nparr_act,names = data.colnames, labels = data.colnames, name_tag='PIXIE low_l + Planck high_l') ## F folder = basedir + 'fF/' files = glob.glob(folder + "*__*.txt") params = glob.glob(folder + "*_.paramnames") datalist = [] for f in files: datalist.append( loadMCMC(f, params[0]) ) data = astropy.table.vstack( datalist ) data_sim = data[:] weights_act = data['acceptance'][:] for col in ['likelihood', 'acceptance','omega_b','omega_cdm','100theta_s','tau_reio']: data.remove_column(col) nparr_act = np.array(data.as_array().tolist()[:]) pixie_s4 = MCSamples(samples=nparr_act,names = data.colnames, labels = data.colnames, name_tag='PIXIE low_l + S4') #Triangle plot g = plots.getSubplotPlotter() g.triangle_plot([ planck_s4, pixie_planck, pixie_s4], filled=True) # now we add some boundaries # P_II^1 for ax in g.subplots[:,2]: if ax != None: ax.set_xlim(0,ax.get_xlim()[1]) for ax in g.subplots[2,:]: if ax != None: ax.set_ylim(0,ax.get_ylim()[1]) # P_II^2 for ax in g.subplots[:,3]: if ax != None: ax.set_xlim(0,ax.get_xlim()[1]) for ax in g.subplots[3,:]: if ax != None: ax.set_ylim(0,ax.get_ylim()[1]) plt.savefig('../../figures/nonzero_forecast_all_overplotted.pdf') plt.show()
{ "content_hash": "c70b71895cdfb7789698d127fa82dca1", "timestamp": "", "source": "github", "line_count": 107, "max_line_length": 129, "avg_line_length": 29.30841121495327, "alnum_prop": 0.6817602040816326, "repo_name": "xzackli/isocurvature_2017", "id": "089788876591bee16482005ee77f8c5744794229", "size": "3138", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "analysis/plot_triangle/make_only_nonzero_forecasts.py", "mode": "33188", "license": "mit", "language": [ { "name": "Jupyter Notebook", "bytes": "300659" }, { "name": "Python", "bytes": "69029" }, { "name": "Shell", "bytes": "213" }, { "name": "TeX", "bytes": "119428" } ], "symlink_target": "" }
package pacs import ( "encoding/xml" "github.com/fgrid/iso20022" ) type Document00700103 struct { XMLName xml.Name `xml:"urn:iso:std:iso:20022:tech:xsd:pacs.007.001.03 Document"` Message *FIToFIPaymentReversalV03 `xml:"FIToFIPmtRvsl"` } func (d *Document00700103) AddMessage() *FIToFIPaymentReversalV03 { d.Message = new(FIToFIPaymentReversalV03) return d.Message } // Scope // The FinancialInstitutionToFinancialInstitutionPaymentReversal message is sent by an agent to the next party in the payment chain. It is used to reverse a payment previously executed. // Usage // The FIToFIPaymentReversal message is exchanged between agents to reverse a FIToFICustomerDirectDebit message that has been settled. The result will be a credit on the debtor account. // The FIToFIPaymentReversal message may or may not be the follow-up of a CustomerDirectDebitInitiation message. // The FIToFIPaymentReversal message refers to the original FIToFICustomerDirectDebit message by means of references only or by means of references and a set of elements from the original instruction. // The FIToFIPaymentReversal message can be used in domestic and cross-border scenarios. type FIToFIPaymentReversalV03 struct { // Set of characteristics shared by all individual transactions included in the message. GroupHeader *iso20022.GroupHeader57 `xml:"GrpHdr"` // Information concerning the original group of transactions, to which the message refers. OriginalGroupInformation *iso20022.OriginalGroupHeader3 `xml:"OrgnlGrpInf"` // Information concerning the original transactions, to which the reversal message refers. TransactionInformation []*iso20022.PaymentTransaction36 `xml:"TxInf,omitempty"` // Additional information that cannot be captured in the structured elements and/or any other specific block. SupplementaryData []*iso20022.SupplementaryData1 `xml:"SplmtryData,omitempty"` } func (f *FIToFIPaymentReversalV03) AddGroupHeader() *iso20022.GroupHeader57 { f.GroupHeader = new(iso20022.GroupHeader57) return f.GroupHeader } func (f *FIToFIPaymentReversalV03) AddOriginalGroupInformation() *iso20022.OriginalGroupHeader3 { f.OriginalGroupInformation = new(iso20022.OriginalGroupHeader3) return f.OriginalGroupInformation } func (f *FIToFIPaymentReversalV03) AddTransactionInformation() *iso20022.PaymentTransaction36 { newValue := new(iso20022.PaymentTransaction36) f.TransactionInformation = append(f.TransactionInformation, newValue) return newValue } func (f *FIToFIPaymentReversalV03) AddSupplementaryData() *iso20022.SupplementaryData1 { newValue := new(iso20022.SupplementaryData1) f.SupplementaryData = append(f.SupplementaryData, newValue) return newValue }
{ "content_hash": "3ca7f4f0245288092462a6c15aaa8326", "timestamp": "", "source": "github", "line_count": 61, "max_line_length": 200, "avg_line_length": 44.377049180327866, "alnum_prop": 0.8093830809013668, "repo_name": "fgrid/iso20022", "id": "bf4f84ed159a65fb44d740278c80a0903a31c865", "size": "2707", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "pacs/FIToFIPaymentReversalV03.go", "mode": "33188", "license": "mit", "language": [ { "name": "Go", "bytes": "21383920" } ], "symlink_target": "" }
// // DropboxRemoveFolderMemberArg.h // Pods // // Created by Михаил Мотыженков on 26.04.16. // // #import <Foundation/Foundation.h> #import "DropboxMemberSelector.h" @interface DropboxRemoveFolderMemberArg : NSObject /** * The ID for the shared folder. */ @property (nonatomic, nonnull) NSString *sharedFolderID; /** * The member to remove from the folder. */ @property (nonatomic, nonnull) DropboxMemberSelector *member; /** * If YES, the removed user will keep their copy of the folder after it's unshared, assuming it was mounted. Otherwise, it will be removed from their Dropbox. Also, this must be set to NO when kicking a group. */ @property (nonatomic) BOOL leaveACopy; - (nonnull instancetype)initWithSharedFolderID:(NSString * _Nonnull)sharedFolderID member:(DropboxMemberSelector * _Nonnull)member leaveACopy:(BOOL)leaveACopy; @end
{ "content_hash": "f0f08d300040c7d281d358f4178d6e34", "timestamp": "", "source": "github", "line_count": 32, "max_line_length": 210, "avg_line_length": 26.96875, "alnum_prop": 0.7473928157589803, "repo_name": "zlib/ObjectiveDropbox", "id": "8b5c4e72a17b9b0aa97d6b3a83bce5d3c8febe10", "size": "879", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "Pod/Classes/Args/DropboxRemoveFolderMemberArg.h", "mode": "33188", "license": "mit", "language": [ { "name": "C++", "bytes": "679" }, { "name": "Objective-C", "bytes": "958389" }, { "name": "Ruby", "bytes": "1879" }, { "name": "Shell", "bytes": "17600" } ], "symlink_target": "" }
name: Enhancement Request about: Suggest an enhancement to the CoreDNS project labels: enhancement --- <!-- Please only use this template for submitting enhancement requests --> **What would you like to be added**: **Why is this needed**:
{ "content_hash": "cc609846cbefce7b7e4ace7d3bc7596a", "timestamp": "", "source": "github", "line_count": 10, "max_line_length": 74, "avg_line_length": 24.2, "alnum_prop": 0.743801652892562, "repo_name": "miekg/coredns", "id": "d39c37c010932d5c04f2d8a9ce0864bad1882728", "size": "246", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": ".github/ISSUE_TEMPLATE/enhancement.md", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Go", "bytes": "607128" }, { "name": "Makefile", "bytes": "1819" }, { "name": "Protocol Buffer", "bytes": "174" }, { "name": "Shell", "bytes": "554" } ], "symlink_target": "" }
#include <boost/interprocess/detail/config_begin.hpp> #include <boost/interprocess/allocators/allocator.hpp> #include <boost/interprocess/containers/vector.hpp> #include <boost/interprocess/detail/file_wrapper.hpp> #include <boost/interprocess/file_mapping.hpp> #include <boost/interprocess/detail/managed_open_or_create_impl.hpp> #include "named_creation_template.hpp" #include <cstdio> #include <cstring> #include <string> #include <boost/interprocess/detail/os_file_functions.hpp> #include "get_process_id_name.hpp" using namespace boost::interprocess; static const std::size_t FileSize = 1000; inline std::string get_filename() { std::string ret (ipcdetail::get_temporary_path()); ret += "/"; ret += test::get_process_id_name(); return ret; } struct file_destroyer { ~file_destroyer() { //The last destructor will destroy the file file_mapping::remove(get_filename().c_str()); } }; //This wrapper is necessary to have a common constructor //in generic named_creation_template functions class mapped_file_creation_test_wrapper : public file_destroyer , public boost::interprocess::ipcdetail::managed_open_or_create_impl <boost::interprocess::ipcdetail::file_wrapper, 0, true, false> { typedef boost::interprocess::ipcdetail::managed_open_or_create_impl <boost::interprocess::ipcdetail::file_wrapper, 0, true, false> mapped_file; public: mapped_file_creation_test_wrapper(boost::interprocess::create_only_t) : mapped_file(boost::interprocess::create_only, get_filename().c_str(), FileSize, read_write, 0, permissions()) {} mapped_file_creation_test_wrapper(boost::interprocess::open_only_t) : mapped_file(boost::interprocess::open_only, get_filename().c_str(), read_write, 0) {} mapped_file_creation_test_wrapper(boost::interprocess::open_or_create_t) : mapped_file(boost::interprocess::open_or_create, get_filename().c_str(), FileSize, read_write, 0, permissions()) {} }; int main () { typedef boost::interprocess::ipcdetail::managed_open_or_create_impl <boost::interprocess::ipcdetail::file_wrapper, 0, true, false> mapped_file; file_mapping::remove(get_filename().c_str()); test::test_named_creation<mapped_file_creation_test_wrapper>(); //Create and get name, size and address { mapped_file file1(create_only, get_filename().c_str(), FileSize, read_write, 0, permissions()); //Overwrite all memory std::memset(file1.get_user_address(), 0, file1.get_user_size()); //Now test move semantics mapped_file move_ctor(boost::move(file1)); mapped_file move_assign; move_assign = boost::move(move_ctor); } // file_mapping::remove(get_filename().c_str()); return 0; } #include <boost/interprocess/detail/config_end.hpp> #else //#if !defined(BOOST_INTERPROCESS_MAPPED_FILES) int main() { return 0; } #endif//#if !defined(BOOST_INTERPROCESS_MAPPED_FILES)
{ "content_hash": "7b0ba4feecfaf290e95fd8d15e35f77d", "timestamp": "", "source": "github", "line_count": 90, "max_line_length": 121, "avg_line_length": 33.6, "alnum_prop": 0.6845238095238095, "repo_name": "ycsoft/FatCat-Server", "id": "8b0ed20d1b3925881ea697a5e7ca6777e9d785f2", "size": "3506", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "LIBS/boost_1_58_0/libs/interprocess/test/mapped_file_test.cpp", "mode": "33188", "license": "mit", "language": [ { "name": "Assembly", "bytes": "195345" }, { "name": "Batchfile", "bytes": "32367" }, { "name": "C", "bytes": "9529739" }, { "name": "C#", "bytes": "41850" }, { "name": "C++", "bytes": "175536080" }, { "name": "CMake", "bytes": "14812" }, { "name": "CSS", "bytes": "282447" }, { "name": "Cuda", "bytes": "26521" }, { "name": "FORTRAN", "bytes": "1856" }, { "name": "Groff", "bytes": "6163" }, { "name": "HTML", "bytes": "148956564" }, { "name": "JavaScript", "bytes": "174868" }, { "name": "Lex", "bytes": "1290" }, { "name": "Makefile", "bytes": "1045258" }, { "name": "Max", "bytes": "37424" }, { "name": "Objective-C", "bytes": "34644" }, { "name": "Objective-C++", "bytes": "246" }, { "name": "PHP", "bytes": "60249" }, { "name": "Perl", "bytes": "37297" }, { "name": "Perl6", "bytes": "2130" }, { "name": "Python", "bytes": "1717781" }, { "name": "QML", "bytes": "613" }, { "name": "QMake", "bytes": "9450" }, { "name": "Rebol", "bytes": "372" }, { "name": "Shell", "bytes": "372652" }, { "name": "Tcl", "bytes": "1205" }, { "name": "TeX", "bytes": "13819" }, { "name": "XSLT", "bytes": "564356" }, { "name": "Yacc", "bytes": "19612" } ], "symlink_target": "" }
dojo.provide("dijit.layout.AccordionContainer"); dojo.require("dojo.fx"); dojo.require("dijit._Container"); dojo.require("dijit._Templated"); dojo.require("dijit._CssStateMixin"); dojo.require("dijit.layout.StackContainer"); dojo.require("dijit.layout.ContentPane"); dojo.require("dijit.layout.AccordionPane"); // for back compat, remove for 2.0 dojo.declare( "dijit.layout.AccordionContainer", dijit.layout.StackContainer, { // summary: // Holds a set of panes where every pane's title is visible, but only one pane's content is visible at a time, // and switching between panes is visualized by sliding the other panes up/down. // example: // | <div dojoType="dijit.layout.AccordionContainer"> // | <div dojoType="dijit.layout.ContentPane" title="pane 1"> // | </div> // | <div dojoType="dijit.layout.ContentPane" title="pane 2"> // | <p>This is some text</p> // | </div> // | </div> // duration: Integer // Amount of time (in ms) it takes to slide panes duration: dijit.defaultDuration, // buttonWidget: [const] String // The name of the widget used to display the title of each pane buttonWidget: "dijit.layout._AccordionButton", // _verticalSpace: Number // Pixels of space available for the open pane // (my content box size minus the cumulative size of all the title bars) _verticalSpace: 0, baseClass: "dijitAccordionContainer", postCreate: function(){ this.domNode.style.overflow = "hidden"; this.inherited(arguments); dijit.setWaiRole(this.domNode, "tablist"); }, startup: function(){ if(this._started){ return; } this.inherited(arguments); if(this.selectedChildWidget){ var style = this.selectedChildWidget.containerNode.style; style.display = ""; style.overflow = "auto"; this.selectedChildWidget._wrapperWidget.set("selected", true); } }, _getTargetHeight: function(/* Node */ node){ // summary: // For the given node, returns the height that should be // set to achieve our vertical space (subtract any padding // we may have). // // This is used by the animations. // // TODO: I don't think this works correctly in IE quirks when an elements // style.height including padding and borders var cs = dojo.getComputedStyle(node); return Math.max(this._verticalSpace - dojo._getPadBorderExtents(node, cs).h - dojo._getMarginExtents(node, cs).h, 0); }, layout: function(){ // Implement _LayoutWidget.layout() virtual method. // Set the height of the open pane based on what room remains. var openPane = this.selectedChildWidget; if(!openPane){ return;} var openPaneContainer = openPane._wrapperWidget.domNode, openPaneContainerMargin = dojo._getMarginExtents(openPaneContainer), openPaneContainerPadBorder = dojo._getPadBorderExtents(openPaneContainer), mySize = this._contentBox; // get cumulative height of all the unselected title bars var totalCollapsedHeight = 0; dojo.forEach(this.getChildren(), function(child){ if(child != openPane){ totalCollapsedHeight += dojo.marginBox(child._wrapperWidget.domNode).h; } }); this._verticalSpace = mySize.h - totalCollapsedHeight - openPaneContainerMargin.h - openPaneContainerPadBorder.h - openPane._buttonWidget.getTitleHeight(); // Memo size to make displayed child this._containerContentBox = { h: this._verticalSpace, w: this._contentBox.w - openPaneContainerMargin.w - openPaneContainerPadBorder.w }; if(openPane){ openPane.resize(this._containerContentBox); } }, _setupChild: function(child){ // Overrides _LayoutWidget._setupChild(). // Put wrapper widget around the child widget, showing title child._wrapperWidget = new dijit.layout._AccordionInnerContainer({ contentWidget: child, buttonWidget: this.buttonWidget, id: child.id + "_wrapper", dir: child.dir, lang: child.lang, parent: this }); this.inherited(arguments); }, addChild: function(/*dijit._Widget*/ child, /*Integer?*/ insertIndex){ if(this._started){ // Adding a child to a started Accordion is complicated because children have // wrapper widgets. Default code path (calling this.inherited()) would add // the new child inside another child's wrapper. // First add in child as a direct child of this AccordionContainer dojo.place(child.domNode, this.containerNode, insertIndex); if(!child._started){ child.startup(); } // Then stick the wrapper widget around the child widget this._setupChild(child); // Code below copied from StackContainer dojo.publish(this.id+"-addChild", [child, insertIndex]); this.layout(); if(!this.selectedChildWidget){ this.selectChild(child); } }else{ // We haven't been started yet so just add in the child widget directly, // and the wrapper will be created on startup() this.inherited(arguments); } }, removeChild: function(child){ // Overrides _LayoutWidget.removeChild(). // Destroy wrapper widget first, before StackContainer.getChildren() call. // Replace wrapper widget with true child widget (ContentPane etc.) dojo.place(child.domNode, child._wrapperWidget.domNode, "after"); child._wrapperWidget.destroy(); delete child._wrapperWidget; dojo.removeClass(child.domNode, "dijitHidden"); this.inherited(arguments); }, getChildren: function(){ // Overrides _Container.getChildren() to return content panes rather than internal AccordionInnerContainer panes return dojo.map(this.inherited(arguments), function(child){ return child.declaredClass == "dijit.layout._AccordionInnerContainer" ? child.contentWidget : child; }, this); }, destroy: function(){ dojo.forEach(this.getChildren(), function(child){ child._wrapperWidget.destroy(); }); this.inherited(arguments); }, _transition: function(/*dijit._Widget?*/newWidget, /*dijit._Widget?*/oldWidget, /*Boolean*/ animate){ // Overrides StackContainer._transition() to provide sliding of title bars etc. //TODO: should be able to replace this with calls to slideIn/slideOut if(this._inTransition){ return; } var animations = []; var paneHeight = this._verticalSpace; if(newWidget){ newWidget._wrapperWidget.set("selected", true); this._showChild(newWidget); // prepare widget to be slid in // Size the new widget, in case this is the first time it's being shown, // or I have been resized since the last time it was shown. // Note that page must be visible for resizing to work. if(this.doLayout && newWidget.resize){ newWidget.resize(this._containerContentBox); } var newContents = newWidget.domNode; dojo.addClass(newContents, "dijitVisible"); dojo.removeClass(newContents, "dijitHidden"); if(animate){ var newContentsOverflow = newContents.style.overflow; newContents.style.overflow = "hidden"; animations.push(dojo.animateProperty({ node: newContents, duration: this.duration, properties: { height: { start: 1, end: this._getTargetHeight(newContents) } }, onEnd: function(){ newContents.style.overflow = newContentsOverflow; // Kick IE to workaround layout bug, see #11415 if(dojo.isIE){ setTimeout(function(){ dojo.removeClass(newContents.parentNode, "dijitAccordionInnerContainerFocused"); setTimeout(function(){ dojo.addClass(newContents.parentNode, "dijitAccordionInnerContainerFocused"); }, 0); }, 0); } } })); } } if(oldWidget){ oldWidget._wrapperWidget.set("selected", false); var oldContents = oldWidget.domNode; if(animate){ var oldContentsOverflow = oldContents.style.overflow; oldContents.style.overflow = "hidden"; animations.push(dojo.animateProperty({ node: oldContents, duration: this.duration, properties: { height: { start: this._getTargetHeight(oldContents), end: 1 } }, onEnd: function(){ dojo.addClass(oldContents, "dijitHidden"); dojo.removeClass(oldContents, "dijitVisible"); oldContents.style.overflow = oldContentsOverflow; if(oldWidget.onHide){ oldWidget.onHide(); } } })); }else{ dojo.addClass(oldContents, "dijitHidden"); dojo.removeClass(oldContents, "dijitVisible"); if(oldWidget.onHide){ oldWidget.onHide(); } } } if(animate){ this._inTransition = true; var combined = dojo.fx.combine(animations); combined.onEnd = dojo.hitch(this, function(){ delete this._inTransition; }); combined.play(); } }, // note: we are treating the container as controller here _onKeyPress: function(/*Event*/ e, /*dijit._Widget*/ fromTitle){ // summary: // Handle keypress events // description: // This is called from a handler on AccordionContainer.domNode // (setup in StackContainer), and is also called directly from // the click handler for accordion labels if(this._inTransition || this.disabled || e.altKey || !(fromTitle || e.ctrlKey)){ if(this._inTransition){ dojo.stopEvent(e); } return; } var k = dojo.keys, c = e.charOrCode; if((fromTitle && (c == k.LEFT_ARROW || c == k.UP_ARROW)) || (e.ctrlKey && c == k.PAGE_UP)){ this._adjacent(false)._buttonWidget._onTitleClick(); dojo.stopEvent(e); }else if((fromTitle && (c == k.RIGHT_ARROW || c == k.DOWN_ARROW)) || (e.ctrlKey && (c == k.PAGE_DOWN || c == k.TAB))){ this._adjacent(true)._buttonWidget._onTitleClick(); dojo.stopEvent(e); } } } ); dojo.declare("dijit.layout._AccordionInnerContainer", [dijit._Widget, dijit._CssStateMixin], { // summary: // Internal widget placed as direct child of AccordionContainer.containerNode. // When other widgets are added as children to an AccordionContainer they are wrapped in // this widget. // buttonWidget: String // Name of class to use to instantiate title // (Wish we didn't have a separate widget for just the title but maintaining it // for backwards compatibility, is it worth it?) /*===== buttonWidget: null, =====*/ // contentWidget: dijit._Widget // Pointer to the real child widget /*===== contentWidget: null, =====*/ baseClass: "dijitAccordionInnerContainer", // tell nested layout widget that we will take care of sizing isContainer: true, isLayoutContainer: true, buildRendering: function(){ // Create wrapper div, placed where the child is now this.domNode = dojo.place("<div class='" + this.baseClass + "'>", this.contentWidget.domNode, "after"); // wrapper div's first child is the button widget (ie, the title bar) var child = this.contentWidget, cls = dojo.getObject(this.buttonWidget); this.button = child._buttonWidget = (new cls({ contentWidget: child, label: child.title, title: child.tooltip, dir: child.dir, lang: child.lang, iconClass: child.iconClass, id: child.id + "_button", parent: this.parent })).placeAt(this.domNode); // and then the actual content widget (changing it from prior-sibling to last-child) dojo.place(this.contentWidget.domNode, this.domNode); }, postCreate: function(){ this.inherited(arguments); this.connect(this.contentWidget, 'set', function(name, value){ var mappedName = {title: "label", tooltip: "title", iconClass: "iconClass"}[name]; if(mappedName){ this.button.set(mappedName, value); } }, this); }, _setSelectedAttr: function(/*Boolean*/ isSelected){ this.selected = isSelected; this.button.set("selected", isSelected); if(isSelected){ var cw = this.contentWidget; if(cw.onSelected){ cw.onSelected(); } } }, startup: function(){ // Called by _Container.addChild() this.contentWidget.startup(); }, destroy: function(){ this.button.destroyRecursive(); delete this.contentWidget._buttonWidget; delete this.contentWidget._wrapperWidget; this.inherited(arguments); }, destroyDescendants: function(){ // since getChildren isn't working for me, have to code this manually this.contentWidget.destroyRecursive(); } }); dojo.declare("dijit.layout._AccordionButton", [dijit._Widget, dijit._Templated, dijit._CssStateMixin], { // summary: // The title bar to click to open up an accordion pane. // Internal widget used by AccordionContainer. // tags: // private templateString: dojo.cache("dijit.layout", "templates/AccordionButton.html"), attributeMap: dojo.mixin(dojo.clone(dijit.layout.ContentPane.prototype.attributeMap), { label: {node: "titleTextNode", type: "innerHTML" }, title: {node: "titleTextNode", type: "attribute", attribute: "title"}, iconClass: { node: "iconNode", type: "class" } }), baseClass: "dijitAccordionTitle", getParent: function(){ // summary: // Returns the AccordionContainer parent. // tags: // private return this.parent; }, postCreate: function(){ this.inherited(arguments); dojo.setSelectable(this.domNode, false); var titleTextNodeId = dojo.attr(this.domNode,'id').replace(' ','_'); dojo.attr(this.titleTextNode, "id", titleTextNodeId+"_title"); dijit.setWaiState(this.focusNode, "labelledby", dojo.attr(this.titleTextNode, "id")); }, getTitleHeight: function(){ // summary: // Returns the height of the title dom node. return dojo.marginBox(this.domNode).h; // Integer }, // TODO: maybe the parent should set these methods directly rather than forcing the code // into the button widget? _onTitleClick: function(){ // summary: // Callback when someone clicks my title. var parent = this.getParent(); if(!parent._inTransition){ parent.selectChild(this.contentWidget, true); dijit.focus(this.focusNode); } }, _onTitleKeyPress: function(/*Event*/ evt){ return this.getParent()._onKeyPress(evt, this.contentWidget); }, _setSelectedAttr: function(/*Boolean*/ isSelected){ this.selected = isSelected; dijit.setWaiState(this.focusNode, "expanded", isSelected); dijit.setWaiState(this.focusNode, "selected", isSelected); this.focusNode.setAttribute("tabIndex", isSelected ? "0" : "-1"); } });
{ "content_hash": "002eac0be327c909432bbbf7a2c1bd2c", "timestamp": "", "source": "github", "line_count": 447, "max_line_length": 120, "avg_line_length": 32.02460850111857, "alnum_prop": 0.678239608801956, "repo_name": "cappadona/Suma", "id": "f7ff1b3896c23bc2059173662850d63aaf2bc47c", "size": "14315", "binary": false, "copies": "24", "ref": "refs/heads/master", "path": "service/lib/zend/externals/dojo/dijit/layout/AccordionContainer.js", "mode": "33188", "license": "mit", "language": [ { "name": "ActionScript", "bytes": "19954" }, { "name": "Batchfile", "bytes": "4801" }, { "name": "CSS", "bytes": "593544" }, { "name": "Groff", "bytes": "310" }, { "name": "HTML", "bytes": "543716" }, { "name": "Java", "bytes": "123492" }, { "name": "JavaScript", "bytes": "11493140" }, { "name": "PHP", "bytes": "32483591" }, { "name": "PowerShell", "bytes": "1028" }, { "name": "Puppet", "bytes": "770" }, { "name": "Ruby", "bytes": "4905" }, { "name": "Shell", "bytes": "24561" }, { "name": "TypeScript", "bytes": "3445" }, { "name": "XSLT", "bytes": "104232" } ], "symlink_target": "" }
package org.cluj.bus.logging; import org.apache.log4j.*; import java.util.Enumeration; public class LogInitializer { public static final String DEFAULT_LOG_LAYOUT = "[%20.20t] %40.40c [%5.5p] (%d{yyyy-MM-dd HH:mm:ss.SSS}) %m%n"; public static void configureLOG4J() { configureLOG4J(DEFAULT_LOG_LAYOUT); } public static void configureLOG4J(String layout) { final Enumeration loggers = LogManager.getCurrentLoggers(); while (loggers.hasMoreElements()) { final Logger logger = (Logger) loggers.nextElement(); logger.removeAllAppenders(); } Logger.getRootLogger().removeAllAppenders(); Logger.getRootLogger().addAppender(new ConsoleAppender(new PatternLayout(layout))); Logger.getRootLogger().setLevel(Level.INFO); Logger.getRootLogger().getLoggerRepository().setThreshold(Level.DEBUG); Logger.getLogger("org.hibernate").setLevel(Level.WARN); } }
{ "content_hash": "398f408477a1bd48797124e4304a3e3e", "timestamp": "", "source": "github", "line_count": 37, "max_line_length": 115, "avg_line_length": 26.675675675675677, "alnum_prop": 0.6656534954407295, "repo_name": "abotos/BusTracker", "id": "b2bef12ede6885d0d29283de9692e20e88350fc0", "size": "1511", "binary": false, "copies": "2", "ref": "refs/heads/master", "path": "code/common/src/java/org/cluj/bus/logging/LogInitializer.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "CSS", "bytes": "5576" }, { "name": "Java", "bytes": "282453" }, { "name": "Shell", "bytes": "77905" }, { "name": "XSLT", "bytes": "4598" } ], "symlink_target": "" }
package xworker.dataObject.proxy; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.ParseException; import org.apache.http.client.HttpClient; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.message.BasicNameValuePair; import org.apache.http.util.EntityUtils; import org.xmeta.ActionContext; import org.xmeta.Thing; import org.xmeta.World; import ognl.OgnlException; import xworker.dataObject.DataObject; import xworker.dataObject.DataObjectConstants; import xworker.dataObject.DataObjectException; import xworker.dataObject.PageInfo; import xworker.util.JacksonFormator; public class HttpProxyDataObjectAction { /** * 装载数据对象。 * * @param actionContext * @throws OgnlException * @throws IOException * @throws ParseException */ public static Object doLoad(ActionContext actionContext) throws Exception{ return doAction("load", actionContext); } public static Object doCreate(ActionContext actionContext) throws Exception{ return doAction("create", actionContext); } public static Object doUpdate(ActionContext actionContext) throws Exception{ return doAction("update", actionContext); } public static Object doDelete(ActionContext actionContext) throws Exception{ return doAction("delete", actionContext); } @SuppressWarnings("unchecked") public static Object doQuery(ActionContext actionContext) throws Exception{ Thing self = (Thing) actionContext.get("self"); //使用self获取url和httpClient,这样可以使用重载过的相应的方法 String url = (String) self.doAction("getUrl", actionContext); HttpClient client = (HttpClient) self.doAction("getHttpClient", actionContext); if(url == null){ throw new DataObjectException("Url is null, thing=" + self); } if(client == null){ throw new DataObjectException("HttpClient is null, thing=" + self); } //把数据提交到服务器 HttpPost httpPost = new HttpPost(url); List<NameValuePair> formparams = new ArrayList<NameValuePair>(); formparams.add(new BasicNameValuePair("dataObject", self.getString("dataObject"))); formparams.add(new BasicNameValuePair("actionName", "query")); Object pageInfo = actionContext.get(DataObjectConstants.PAGEINFO_PAGEINFO); if(pageInfo != null){ PageInfo tpageInfo = PageInfo.getPageInfo(actionContext); if(tpageInfo != null){ //清楚datas如果有,否则会提交到服务器,没有必要 if(tpageInfo.getDatas() != null){ tpageInfo.setDatas(null); } if(tpageInfo.getLimit() == 0){ tpageInfo.setLimit(self.getInt(DataObjectConstants.PAGEINFO_PAGESIZE)); } if(tpageInfo.getDir() == null){ tpageInfo.setDir(self.getString(DataObjectConstants.SORT_DIR)); } if(tpageInfo.getSort() == null || DataObjectConstants.PAGEINFO_SORT_EXTEND.equals(tpageInfo.getSort())){ tpageInfo.setSort(self.getString(DataObjectConstants.SORT_FIELD)); } } formparams.add(new BasicNameValuePair(DataObjectConstants.PAGEINFO_PAGEINFO, JacksonFormator.formatObject(pageInfo))); } Object conditionData = actionContext.get(DataObjectConstants.CONDITION_DATA); if(conditionData != null){ formparams.add(new BasicNameValuePair(DataObjectConstants.CONDITION_DATA, JacksonFormator.formatObject(conditionData))); } Object conditionConfig = actionContext.get(DataObjectConstants.CONDITION_CONFIG); if(conditionConfig != null){ formparams.add(new BasicNameValuePair(DataObjectConstants.CONDITION_CONFIG, JacksonFormator.formatObject(conditionConfig))); } UrlEncodedFormEntity entity = new UrlEncodedFormEntity(formparams, "UTF-8"); httpPost.setEntity(entity); HttpResponse response = client.execute(httpPost); try{ if(response.getStatusLine().getStatusCode() == 200){ String body = EntityUtils.toString(response.getEntity()); Map<String, Object> result = (Map<String, Object>) JacksonFormator.parseObject(body); if((Boolean) result.get("success") == true){ Object data = result.get("data"); if(data == null){ data = result.get("rows"); } if(data instanceof Map){ //复制远程返回的pageInfo到本地的pageInfo Map<String, Object> page = (Map<String, Object>) data; PageInfo srcPageInfo = PageInfo.getPageInfo(actionContext); PageInfo rpageInfo = PageInfo.getPageInfo(data); srcPageInfo.setDatas(listToDataObjectList((List<Map<String, Object>>) page.get("datas"), self.getMetadata().getPath())); srcPageInfo.setDir(rpageInfo.getDir()); srcPageInfo.setLimit(rpageInfo.getLimit()); srcPageInfo.setMsg(rpageInfo.getMsg()); srcPageInfo.setSort(rpageInfo.getSort()); srcPageInfo.setStart(rpageInfo.getStart()); srcPageInfo.setSuccess(rpageInfo.isSuccess()); srcPageInfo.setTotalCount(rpageInfo.getTotalCount()); return srcPageInfo.getDatas(); }else{ return listToDataObjectList((List<Map<String, Object>>) data, self.getMetadata().getPath()); } }else{ throw new DataObjectException((String) result.get("msg")); } } }finally{ EntityUtils.consume(response.getEntity()); } return null; } public static List<DataObject> listToDataObjectList(List<Map<String, Object>> datas, String dataObject){ if(datas == null){ return Collections.emptyList(); }else{ List<DataObject> dataObjectList = new ArrayList<DataObject>(); for(Map<String, Object> data : datas){ DataObject obj = new DataObject(dataObject); obj.setInited(false); obj.putAll(data); obj.setInited(true); dataObjectList.add(obj); } return dataObjectList; } } @SuppressWarnings("unchecked") public static Object doAction(String actionName, ActionContext actionContext) throws Exception{ Thing self = (Thing) actionContext.get("self"); //使用self获取url和httpClient,这样可以使用重载过的相应的方法 String url = (String) self.doAction("getUrl", actionContext); HttpClient client = (HttpClient) self.doAction("getHttpClient", actionContext); if(url == null){ throw new DataObjectException("Url is null, thing=" + self); } if(client == null){ throw new DataObjectException("HttpClient is null, thing=" + self); } //把数据对象转化为json数据 DataObject theData = (DataObject) actionContext.get("theData"); if("update".equals(actionName)){ //如果是更新,则只需要脏数据和id DataObject newData = new DataObject(theData.getMetadata().getDescriptor()); for(Object[] key : theData.getKeyAndDatas()){ Thing attr = (Thing) key[0]; newData.put(attr.getString("name"), key[1]); } for(String dirty : theData.getMetadata().getDirtyFields()){ newData.put(dirty, theData.get(dirty)); } theData = newData; } String json = JacksonFormator.formatObject(theData); //把数据提交到服务器 HttpPost httpPost = new HttpPost(url); List<NameValuePair> formparams = new ArrayList<NameValuePair>(); formparams.add(new BasicNameValuePair("dataObject", self.getString("dataObject"))); formparams.add(new BasicNameValuePair("actionName", actionName)); formparams.add(new BasicNameValuePair(DataObjectConstants.THEDATA, json)); UrlEncodedFormEntity entity = new UrlEncodedFormEntity(formparams, "UTF-8"); httpPost.setEntity(entity); HttpResponse response = client.execute(httpPost); try{ if(response.getStatusLine().getStatusCode() == 200){ String body = EntityUtils.toString(response.getEntity()); Map<String, Object> result = (Map<String, Object>) JacksonFormator.parseObject(body); if((Boolean) result.get("success") == true){ if("update".equals(actionName) || "delete".equals(actionName)){ Boolean r = (Boolean) result.get("data"); if("update".equals(actionName) && r){ theData.getMetadata().cleanDirty(); } return r; } Map<String, Object> data = (Map<String, Object>) result.get("data"); theData.putAll(data); return theData; }else{ throw new DataObjectException((String) result.get("msg")); } } }finally{ EntityUtils.consume(response.getEntity()); } return null; } /** * 获取访问服务器的URL。 * * @param actionContext * @return */ public static String getUrl(ActionContext actionContext){ Thing self = (Thing) actionContext.get("self"); Thing httpProxyConfiger = World.getInstance().getThing(self.getString("httpProxyConfiger")); if(httpProxyConfiger == null){ throw new DataObjectException("Please set up httpProxyConfiger, thing=" + self); } return httpProxyConfiger.doAction("getBaseUrl", actionContext) + self.getString("url"); } /** * 获取HttpClient。 * * @param actionContext * @return */ public static HttpClient getHttpClient(ActionContext actionContext){ Thing self = (Thing) actionContext.get("self"); Thing httpProxyConfiger = World.getInstance().getThing(self.getString("httpProxyConfiger")); if(httpProxyConfiger == null){ throw new DataObjectException("Please set up httpProxyConfiger, thing=" + self); } return (HttpClient) httpProxyConfiger.doAction("getHttpClient", actionContext); } }
{ "content_hash": "3006e1b64fb766c16def9641ab5ea3d5", "timestamp": "", "source": "github", "line_count": 263, "max_line_length": 129, "avg_line_length": 35.82509505703422, "alnum_prop": 0.7028231797919762, "repo_name": "x-meta/xworker", "id": "aeecb27681df97699440512e875006afc96215a1", "size": "10452", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "xworker_dataobject/src/main/java/xworker/dataObject/proxy/HttpProxyDataObjectAction.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "13302" }, { "name": "CSS", "bytes": "608135" }, { "name": "Fluent", "bytes": "1420" }, { "name": "FreeMarker", "bytes": "245430" }, { "name": "Groovy", "bytes": "2259" }, { "name": "HTML", "bytes": "228023" }, { "name": "Java", "bytes": "12283468" }, { "name": "JavaScript", "bytes": "804030" }, { "name": "Lex", "bytes": "5606489" }, { "name": "PHP", "bytes": "2232" }, { "name": "Python", "bytes": "572" }, { "name": "Ruby", "bytes": "302" }, { "name": "SCSS", "bytes": "16447" }, { "name": "Shell", "bytes": "13983" } ], "symlink_target": "" }
define(function(require) { 'use strict'; var modules = require('modules'); var _ = require('lodash'); var d3Tip = require('d3-tip'); var d3 = require('d3'); require('scripts/common-graph/services/runtime_color_service'); require('scripts/common-graph/services/svg_service'); require('scripts/topology/services/workflow_shapes'); require('scripts/topology/services/workflow_render'); modules.get('a4c-topology-editor', [ 'a4c-common', 'a4c-common-graph', 'ui.bootstrap', 'a4c-styles' ]).directive('topologyPlan', ['$filter', '$http', '$modal', '$interval', '$translate', 'svgServiceFactory', 'runtimeColorsService', 'listToMapService', 'workflowShapes', 'planRender', function($filter, $http, $modal, $interval, $translate, svgServiceFactory, runtimeColorsService, listToMapService, workflowShapes, planRender) { return { restrict : 'E', link : function(scope) { // Default parent svg markup to render the workflow var containerElement = d3.select('#plan-graph-container'); var contextContainer = d3.select('#editor-context-container'); contextContainer.html(''); var svgGraph = svgServiceFactory.create(containerElement,'plan-svg', 'plan-svg', contextContainer); var svgGroup = svgGraph.svgGroup; // add markers for arrows workflowShapes.initMarkers(svgGraph.svg); scope.$watch('triggerRefresh', function() { scope.workflows.topologyChanged(); }); scope.$watch('visualDimensions', function(visualDimensions) { onResize(visualDimensions.width, visualDimensions.height); }); function onResize(width, height) { svgGraph.onResize(width, height); } function centerGraph() { svgGraph.controls.reset(); } // Create the input graph var g = planRender.createGraph(); function render(layout) { planRender.render(svgGroup, g, layout); svgGraph.controls.updateBBox(planRender.bbox); } // Add our custom shapes workflowShapes.scope = scope; // the hosts (graph clusters) var hosts = []; // the steps var steps = []; // data used to render errors var errorRenderingData = {cycles: {}, errorSteps: {}}; function appendStepNode(g, stepName, step, hostId) { var shortActivityType = scope.workflows.getStepActivityType(step); var width, height; if (shortActivityType === 'OperationCallActivity' || shortActivityType === 'DelegateWorkflowActivity') { width = 80; height = 60; } else if (shortActivityType === 'SetStateActivity') { if (scope.wfViewMode === 'simple') { width = 32; height = 24; } else { width = 60; height = 45; } } else { // Display an alert message as this should be considered as an error. console.error('Unexpected activity type encountered', shortActivityType); } if(_.defined(width)) { g.setNode(stepName, { label : '', width: width, height: height, shape: 'operationStep', parent: hostId }); } } function appendEdge(g, from, to) { var style = { lineInterpolate: 'basis', arrowhead: 'vee', style: 'stroke: black; stroke-width: 1.5px;', pinnedStyle: 'stroke: black; stroke-width: 5px;', marker: 'arrow-standard' }; if (errorRenderingData.cycles[from] && _.contains(errorRenderingData.cycles[from], to)) { // the edge is in a cycle, make it red style = { lineInterpolate: 'basis', arrowhead: 'vee', style: 'stroke: #f66; stroke-width: 1.5px;', pinnedStyle: 'stroke: black; stroke-width: 5px;', marker: 'arrow-error' }; } g.setEdge(from, to, style); } function refresh() { // remove remaining popups d3.selectAll('.d3-tip').remove(); g.nodes().forEach(function(node) { g.removeNode(node); }); if (!scope.currentWorkflowName || !scope.topology.topology.workflows || !scope.topology.topology.workflows[scope.currentWorkflowName]) { // TODO clear SVG return; } errorRenderingData = scope.workflows.getErrorRenderingData(); workflowShapes.errorRenderingData = errorRenderingData; hosts = scope.topology.topology.workflows[scope.currentWorkflowName].hosts; steps = scope.topology.topology.workflows[scope.currentWorkflowName].steps; workflowShapes.steps = steps; var hostsMap = {}; // add the hosts if (hosts) { for (var i = 0; i < hosts.length; i++) { var host = hosts[i]; hostsMap[hosts[i]] = host; g.setNode(host, {label : host, clusterLabelPos : 'top'}); } } g.nodes().forEach(function(nodeKey) { // if the node doesn't exists anymore let's remove it if(_.undefined(hostsMap[nodeKey]) && _.undefined(steps[nodeKey]) && nodeKey !== 'start' && nodeKey !== 'end') { g.removeNode(nodeKey); } }); g.setNode('start', {label : '', width: 20, height: 20, shape: 'start'}); g.setNode('end', {label : '', width: 20, height: 20, shape: 'stop'}); var hasSteps = false; if (steps) { for (var stepName in steps) { hasSteps = true; var step = steps[stepName]; appendStepNode(g, stepName, step, step.hostId); if (step.hostId) { g.setParent(stepName, step.hostId); } if (!step.precedingSteps || step.precedingSteps.length === 0) { appendEdge(g, 'start', stepName); } if (!step.followingSteps || step.followingSteps.length === 0) { appendEdge(g, stepName, 'end'); } else { for (var j = 0; j < step.followingSteps.length; j++) { appendEdge(g, stepName, step.followingSteps[j]); } } } } if (!hasSteps) { appendEdge(g, 'start', 'end'); } // Rendering render(true); // tooltip var tip = d3Tip().attr('class', 'd3-tip wf-tip').offset([-10, 0]).html(function(d) { return styleTooltip(d.id); }); svgGroup.call(tip); d3.selectAll('g.node').on('mouseover', tip.show).on('mouseout', tip.hide); } // render an styled html tool tip for a given step var styleTooltip = function(nodeId) { var step = steps[nodeId]; if (!step) { return nodeId; } var html = '<div>'; html += '<h5 class="pull-left">' + step.name + '</h5>'; html += '<i class="fa pull-right">' + scope.workflows.getStepActivityTypeIcon(step) + '</i>'; html += '<span class="clearfix"></span>'; html += '<div class="row"><div class="col-md-3">Node' + ': </div><div class="col-md-9"><b>' + step.nodeId + '</b></div></div>'; html += '<div class="row"><div class="col-md-3">Host' + ': </div><div class="col-md-9"><b>' + step.hostId + '</b></div></div>'; html += '<div class="row"><div class="col-md-3">' + $translate.instant('APPLICATIONS.WF.activity') + ': </div>'; html += '<div class="col-md-9"><b>' + $translate.instant('APPLICATIONS.WF.' + scope.workflows.getStepActivityType(step)) + '</b></div></div>'; var activityDetails = scope.workflows.getStepActivityDetails(step); for (var propName in activityDetails) { html += '<div class="row"><div class="col-md-3">'; html += $translate.instant('APPLICATIONS.WF.' + propName) + ': </div><div class="col-md-9 wfActivityDetail"><b>' + _.startTrunc(activityDetails[propName], 25) + '</b></div></div>'; } html += '</div>'; return html; }; scope.$on('WfRefresh', function (event, args) { if(args.layout){ refresh(); } else { render(false); } if(args.center) { centerGraph(); } }); // preview events registering function setPreviewEdge(g, from , to) { g.setEdge(from, to, { lineInterpolate: 'basis', style: 'stroke: blue; stroke-width: 3px; stroke-dasharray: 5, 5;', marker: 'arrow-preview' }); } function setPreviewNode(g) { g.setNode('a4cPreviewNewStep', { style: 'stroke: blue', shape: 'operationPreviewStep', labelStyle: 'fill: blue; font-weight: bold; font-size: 2em', width: 60, height: 45 }); } scope.$on('WfRemoveEdgePreview', function (event, from, to) { console.debug('WfRemoveEdgePreview event received : ' + event + ', from:' + from + ', to:' + to); g.removeEdge(from, to); if (steps[from].followingSteps.length === 1) { setPreviewEdge(g, from, 'end'); } if (steps[to].precedingSteps.length === 1) { setPreviewEdge(g, 'start', to); } render(true); }); scope.$on('WfResetPreview', function (event) { console.debug('WfResetPreview event received : ' + event); refresh(); }); scope.$on('WfConnectPreview', function (event, from, to) { console.debug('WfConnectPreview event received : ' + event + ', from:' + from + ', to:' + to); for (var i = 0; i < from.length; i++) { g.removeEdge(from[i], 'end'); for (var j = 0; j < to.length; j++) { g.removeEdge('start', to[j]); setPreviewEdge(g, from[i], to[j]); } } render(true); }); scope.$on('WfAddStepPreview', function () { setPreviewNode(g); setPreviewEdge(g, 'start', 'a4cPreviewNewStep'); setPreviewEdge(g, 'a4cPreviewNewStep', 'end'); if (_.size(steps) === 0) { g.removeEdge('start', 'end'); } render(true); }); scope.$on('WfInsertStepPreview', function (event, stepId) { console.log('WfInsertStepPreview event received : ' + event + ', stepId:' + stepId); setPreviewNode(g); var precedingStep; if (steps[stepId].precedingSteps.length === 0) { precedingStep = 'start'; } else if (steps[stepId].precedingSteps.length === 1) { precedingStep = steps[stepId].precedingSteps[0]; } if (precedingStep) { g.removeEdge(precedingStep, stepId); setPreviewEdge(g, precedingStep, 'a4cPreviewNewStep'); } setPreviewEdge(g, 'a4cPreviewNewStep', stepId); render(true); }); scope.$on('WfAppendStepPreview', function (event, stepId) { console.debug('WfAppendStepPreview event received : ' + event + ', stepId:' + stepId); setPreviewNode(g); var followingStep; if (steps[stepId].followingSteps.length === 0) { followingStep = 'end'; } else if (steps[stepId].followingSteps.length === 1) { followingStep = steps[stepId].followingSteps[0]; } if (followingStep) { g.removeEdge(stepId, followingStep); setPreviewEdge(g, 'a4cPreviewNewStep', followingStep); } setPreviewEdge(g, stepId, 'a4cPreviewNewStep'); render(true); }); scope.$on('WfRemoveStepPreview', function (event, stepId) { console.debug('WfRemoveStepPreview event received : ' + event + ', stepId:' + stepId); g.removeNode(stepId); var precedingSteps; if (!steps[stepId].precedingSteps || steps[stepId].precedingSteps.length === 0) { precedingSteps = ['start']; } else if (steps[stepId].followingSteps) { precedingSteps = steps[stepId].precedingSteps; } var followingSteps; if (!steps[stepId].followingSteps || steps[stepId].followingSteps.length === 0) { followingSteps = ['end']; } else if (steps[stepId].followingSteps) { followingSteps = steps[stepId].followingSteps; } for (var i = 0; i < precedingSteps.length; i++) { for (var j = 0; j < followingSteps.length; j++) { if (precedingSteps[i] === 'start' && followingSteps[j] === 'end') { continue; } setPreviewEdge(g, precedingSteps[i], followingSteps[j]); } } render(true); }); function swapLinks(from, to) { // from's preceding become preceding of to var precedingSteps; if (!steps[from].precedingSteps || steps[from].precedingSteps.length === 0) { precedingSteps = ['start']; } else { precedingSteps = steps[from].precedingSteps; } for (var i = 0; i < precedingSteps.length; i++) { g.removeEdge(precedingSteps[i], from); if (precedingSteps[i] !== to) { setPreviewEdge(g, precedingSteps[i], to); } } // from's following become following of 'to' (except 'to' itself) var followingSteps; if (!steps[from].followingSteps || steps[from].followingSteps.length === 0) { followingSteps = ['end']; } else { followingSteps = steps[from].followingSteps; } for (var j = 0; j < followingSteps.length; j++) { g.removeEdge(from, followingSteps[j]); if (followingSteps[j] !== to) { setPreviewEdge(g, to, followingSteps[j]); } } } // swap steps : connections between both is inversed and each other connections are swapped scope.$on('WfSwapPreview', function (event, from, to) { console.debug('WfSwapPreview event received : ' + event + ', from:' + from + ', to:' + from); g.removeEdge(from, to); swapLinks(from, to); swapLinks(to, from); setPreviewEdge(g, to, from); render(true); }); } }; } ]); // factory }); // define
{ "content_hash": "f2f8951783e2ee929ed96ea2937a4638", "timestamp": "", "source": "github", "line_count": 369, "max_line_length": 194, "avg_line_length": 41.983739837398375, "alnum_prop": 0.5104570100697134, "repo_name": "broly-git/alien4cloud", "id": "7cc360e8771c3bf07ca8841aa984c5b6d54441c0", "size": "15549", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "alien4cloud-ui/src/main/webapp/scripts/topology/directives/workflow_rendering.js", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "ApacheConf", "bytes": "59321" }, { "name": "Batchfile", "bytes": "526" }, { "name": "CSS", "bytes": "48747" }, { "name": "Cucumber", "bytes": "709222" }, { "name": "Groovy", "bytes": "108257" }, { "name": "HTML", "bytes": "419764" }, { "name": "Java", "bytes": "3722736" }, { "name": "JavaScript", "bytes": "1290754" }, { "name": "Shell", "bytes": "41378" } ], "symlink_target": "" }
package org.apache.zookeeper.server; import org.apache.jute.BinaryOutputArchive; import org.apache.jute.Record; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Id; import org.apache.zookeeper.data.Stat; import org.apache.zookeeper.proto.GetACLRequest; import org.apache.zookeeper.proto.GetACLResponse; import org.apache.zookeeper.proto.ReplyHeader; import org.junit.Before; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class FinalRequestProcessorTest { private List<ACL> testACLs = new ArrayList<ACL>(); private final Record[] responseRecord = new Record[1]; private final ReplyHeader[] replyHeaders = new ReplyHeader[1]; private ServerCnxn cnxn; private ByteBuffer bb; private FinalRequestProcessor processor; @Before public void setUp() throws KeeperException.NoNodeException, IOException { testACLs.clear(); testACLs.addAll(Arrays.asList( new ACL(ZooDefs.Perms.ALL, new Id("digest", "user:secrethash")), new ACL(ZooDefs.Perms.ADMIN, new Id("digest", "adminuser:adminsecret")), new ACL(ZooDefs.Perms.READ, new Id("world", "anyone")) )); ZooKeeperServer zks = new ZooKeeperServer(); ZKDatabase db = mock(ZKDatabase.class); String testPath = "/testPath"; when(db.getNode(eq(testPath))).thenReturn(new DataNode()); when(db.getACL(eq(testPath), any(Stat.class))).thenReturn(testACLs); when(db.aclForNode(any(DataNode.class))).thenReturn(testACLs); zks.setZKDatabase(db); processor = new FinalRequestProcessor(zks); cnxn = mock(ServerCnxn.class); doAnswer(new Answer() { @Override public Object answer(InvocationOnMock invocationOnMock) { replyHeaders[0] = invocationOnMock.getArgument(0); responseRecord[0] = invocationOnMock.getArgument(1); return null; } }).when(cnxn).sendResponse(any(), any(), anyString()); GetACLRequest getACLRequest = new GetACLRequest(); getACLRequest.setPath(testPath); ByteArrayOutputStream baos = new ByteArrayOutputStream(); BinaryOutputArchive boa = BinaryOutputArchive.getArchive(baos); getACLRequest.serialize(boa, "request"); baos.close(); bb = ByteBuffer.wrap(baos.toByteArray()); } @Test public void testACLDigestHashHiding_NoAuth_WorldCanRead() { // Arrange // Act Request r = new Request(cnxn, 0, 0, ZooDefs.OpCode.getACL, bb, new ArrayList<Id>()); processor.processRequest(r); // Assert assertMasked(true); } @Test public void testACLDigestHashHiding_NoAuth_NoWorld() { // Arrange testACLs.remove(2); // Act Request r = new Request(cnxn, 0, 0, ZooDefs.OpCode.getACL, bb, new ArrayList<Id>()); processor.processRequest(r); // Assert assertThat(KeeperException.Code.get(replyHeaders[0].getErr()), equalTo(KeeperException.Code.NOAUTH)); } @Test public void testACLDigestHashHiding_UserCanRead() { // Arrange List<Id> authInfo = new ArrayList<Id>(); authInfo.add(new Id("digest", "otheruser:somesecrethash")); // Act Request r = new Request(cnxn, 0, 0, ZooDefs.OpCode.getACL, bb, authInfo); processor.processRequest(r); // Assert assertMasked(true); } @Test public void testACLDigestHashHiding_UserCanAll() { // Arrange List<Id> authInfo = new ArrayList<Id>(); authInfo.add(new Id("digest", "user:secrethash")); // Act Request r = new Request(cnxn, 0, 0, ZooDefs.OpCode.getACL, bb, authInfo); processor.processRequest(r); // Assert assertMasked(false); } @Test public void testACLDigestHashHiding_AdminUser() { // Arrange List<Id> authInfo = new ArrayList<Id>(); authInfo.add(new Id("digest", "adminuser:adminsecret")); // Act Request r = new Request(cnxn, 0, 0, ZooDefs.OpCode.getACL, bb, authInfo); processor.processRequest(r); // Assert assertMasked(false); } @Test public void testACLDigestHashHiding_OnlyAdmin() { // Arrange testACLs.clear(); testACLs.addAll(Arrays.asList( new ACL(ZooDefs.Perms.READ, new Id("digest", "user:secrethash")), new ACL(ZooDefs.Perms.ADMIN, new Id("digest", "adminuser:adminsecret")) )); List<Id> authInfo = new ArrayList<Id>(); authInfo.add(new Id("digest", "adminuser:adminsecret")); // Act Request r = new Request(cnxn, 0, 0, ZooDefs.OpCode.getACL, bb, authInfo); processor.processRequest(r); // Assert assertTrue("Not a GetACL response. Auth failed?", responseRecord[0] instanceof GetACLResponse); GetACLResponse rsp = (GetACLResponse)responseRecord[0]; assertThat("Number of ACLs in the response are different", rsp.getAcl().size(), equalTo(2)); // Verify ACLs in the response assertThat("Password hash mismatch in the response", rsp.getAcl().get(0).getId().getId(), equalTo("user:secrethash")); assertThat("Password hash mismatch in the response", rsp.getAcl().get(1).getId().getId(), equalTo("adminuser:adminsecret")); } private void assertMasked(boolean masked) { assertTrue("Not a GetACL response. Auth failed?", responseRecord[0] instanceof GetACLResponse); GetACLResponse rsp = (GetACLResponse)responseRecord[0]; assertThat("Number of ACLs in the response are different", rsp.getAcl().size(), equalTo(3)); // Verify ACLs in the response assertThat("Invalid ACL list in the response", rsp.getAcl().get(0).getPerms(), equalTo(ZooDefs.Perms.ALL)); assertThat("Invalid ACL list in the response", rsp.getAcl().get(0).getId().getScheme(), equalTo("digest")); if (masked) { assertThat("Password hash is not masked in the response", rsp.getAcl().get(0).getId().getId(), equalTo("user:x")); } else { assertThat("Password hash mismatch in the response", rsp.getAcl().get(0).getId().getId(), equalTo("user:secrethash")); } assertThat("Invalid ACL list in the response", rsp.getAcl().get(1).getPerms(), equalTo(ZooDefs.Perms.ADMIN)); assertThat("Invalid ACL list in the response", rsp.getAcl().get(1).getId().getScheme(), equalTo("digest")); if (masked) { assertThat("Password hash is not masked in the response", rsp.getAcl().get(1).getId().getId(), equalTo("adminuser:x")); } else { assertThat("Password hash mismatch in the response", rsp.getAcl().get(1).getId().getId(), equalTo("adminuser:adminsecret")); } assertThat("Invalid ACL list in the response", rsp.getAcl().get(2).getPerms(), equalTo(ZooDefs.Perms.READ)); assertThat("Invalid ACL list in the response", rsp.getAcl().get(2).getId().getScheme(), equalTo("world")); assertThat("Invalid ACL list in the response", rsp.getAcl().get(2).getId().getId(), equalTo("anyone")); // Verify that FinalRequestProcessor hasn't changed the original ACL objects assertThat("Original ACL list has been modified", testACLs.get(0).getPerms(), equalTo(ZooDefs.Perms.ALL)); assertThat("Original ACL list has been modified", testACLs.get(0).getId().getScheme(), equalTo("digest")); assertThat("Original ACL list has been modified", testACLs.get(0).getId().getId(), equalTo("user:secrethash")); assertThat("Original ACL list has been modified", testACLs.get(1).getPerms(), equalTo(ZooDefs.Perms.ADMIN)); assertThat("Original ACL list has been modified", testACLs.get(1).getId().getScheme(), equalTo("digest")); assertThat("Original ACL list has been modified", testACLs.get(1).getId().getId(), equalTo("adminuser:adminsecret")); assertThat("Original ACL list has been modified", testACLs.get(2).getPerms(), equalTo(ZooDefs.Perms.READ)); assertThat("Original ACL list has been modified", testACLs.get(2).getId().getScheme(), equalTo("world")); assertThat("Original ACL list has been modified", testACLs.get(2).getId().getId(), equalTo("anyone")); } }
{ "content_hash": "804d788794c3b6e2f8e741878849586c", "timestamp": "", "source": "github", "line_count": 214, "max_line_length": 136, "avg_line_length": 42.40654205607477, "alnum_prop": 0.6635812672176309, "repo_name": "naver/arcus-zookeeper", "id": "dc56f831bdf78d046ba3f9f7516150a11cf825de", "size": "9881", "binary": false, "copies": "1", "ref": "refs/heads/arcus-3.5.9", "path": "zookeeper-server/src/test/java/org/apache/zookeeper/server/FinalRequestProcessorTest.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "7073" }, { "name": "C", "bytes": "562640" }, { "name": "C++", "bytes": "689022" }, { "name": "CMake", "bytes": "6863" }, { "name": "CSS", "bytes": "22915" }, { "name": "Dockerfile", "bytes": "899" }, { "name": "HTML", "bytes": "46637" }, { "name": "Java", "bytes": "5173613" }, { "name": "JavaScript", "bytes": "246638" }, { "name": "M4", "bytes": "48728" }, { "name": "Makefile", "bytes": "11092" }, { "name": "Mako", "bytes": "13678" }, { "name": "Perl", "bytes": "82819" }, { "name": "Python", "bytes": "137860" }, { "name": "Raku", "bytes": "66615" }, { "name": "Shell", "bytes": "102911" }, { "name": "XS", "bytes": "66352" }, { "name": "XSLT", "bytes": "6024" } ], "symlink_target": "" }
ACCEPTED #### According to Index Fungorum #### Published in null #### Original name Gnomonia perfidiosa P. Karst. ### Remarks null
{ "content_hash": "c46ac9b1f80cddb0626e956b7a942719", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 29, "avg_line_length": 10.307692307692308, "alnum_prop": 0.7014925373134329, "repo_name": "mdoering/backbone", "id": "361ddfd6033f964b6320f71e034efa6fc5362c3c", "size": "187", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Fungi/Ascomycota/Sordariomycetes/Diaporthales/Gnomoniaceae/Gnomonia/Gnomonia perfidiosa/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
<# DO NOT EDIT THIS FILE WHEN IT IS LOCATED AT THE DEFAULT c:\scripts\CognosDownloader\CognosDefaults.ps1. You need to copy this template to c:\scripts\CognosDefaults.ps1. Per the suggested install instructions you should use the path c:\scripts\CognosDownload.ps1 for calling the CognosDownloader. It will use the CognosDefaults.ps1 file from the same folder. This file overrides ANY variable you set on the commandline for CognosDownload.ps1 Uncomment any sections you need below. #> #Only set this if no other users in your domain need the script #$username = '0000username' #Your DSN for your district. #$espdsn = 'schoolsms' #Recommend to set this #Override password file path. #$passwordfile = 'c:\scripts\mysavedpassword.txt' #If you always want to save to the same path. #$savepath = 'c:\scripts\files' #eFinance DSN #$efpdsn = 'schoolfms' #Recommend to set this #eFinance User #$efpuser = 'efinanceusername' #Email Configuration so you don't have to put it on the command line. Still need to specify -SendMail on command line. #$mailfrom = "[email protected]" #$mailto="[email protected]" #$smtpserver="smtp-relay.gmail.com" #$smtpport="587" #$smtppasswordfile="C:\Scripts\emailpw.txt" #change to a file path for email server password not needed if you use smtp-relay and auth your public IPs <# Example for multiuser environment where the scripts are running under the same Windows Accounts. All of these password files would be decryptable under the same account. You can specify any variable from above in the switch statement. No Default needed as it will be specified at the command line, above, or default in CognosDownload.ps1 #> # switch($username){ # '0401cmillsap' { # $useranme = '0403cmillsap' # $efpuser = 'cmillsap' # $passwordfile = 'c:\scripts\0403cmillsap-password.txt' # $SendMail = $True # } # '0402cweber' { # $efpuser = $username; # $passwordfile = 'c:\scripts\0402cweber-password.txt'; # $savepath = "c:\scripts\ImportFiles" # } # 'SSOusername' { # $efpuser ='' # $passwordfile = 'c:\scripts\importfiles\scripts\userpw1.txt' # $savepath = "c:\users\cmillsap\Downloads" # } # 'SSOusername2' { # $efpuser = '' # $passwordfile = 'c:\scripts\importfiles\scripts\userpw1.txt' # } # } <# Example of a multiple user environment where the scripts are running under different Windows Accounts. You can specify the individual Windows Accounts and their eSchool username, dsn, password path, etc. #> # switch($env:username) { # 'xyz_automation' { $username = '0401cmillsap'; $passwordfile = 'c:\script\cmillsappw.txt'; $espdsn = 'gentrysms' } # 'abc_domain_admin' { $username = '0401jmillsap'; $passwordfile = 'c:\scripts\jmillsappw.txt'; $espdsn = 'gentrysms' } # } <# Empty CSV files fail the CSV Verification as they don't contain any data to verify. You can add reports by name to this variable and it will bypass the CSV verification process. #> # $ignoreEmptyCSV = @('Report Name 1','Report_Name_2') # if ($ignoreEmptyCSV -contains $report) { $DisableCSVVerification = $True } <# Do not download/process these reports during the summer months (June and July). Modify as you need. #> # if (@(6,7) -contains ([int](Get-Date -Format MM))) { # $SkipTheseReports = @('Report Name 1','Report_Name_2') # Write-Host "Info: Skip downloading this file per the CognosDefaults.ps1" -ForegroundColor Green # if ($SkipTheseReports -contains $report) { $SkipDownloadingFile = $True } # } <# Linux set path #> # if (-Not($IsWindows)) { # $savepath = '/scripts/download' # $passwordfile = '/scripts/mypassword.txt' # }
{ "content_hash": "bdca379a1ce9570ce5f6669e6da7aa55", "timestamp": "", "source": "github", "line_count": 102, "max_line_length": 177, "avg_line_length": 36.42156862745098, "alnum_prop": 0.7044414535666218, "repo_name": "AR-k12code/CognosDownloader", "id": "4a3b21cbbb3e1f2792740b68f1a9f2b583b86331", "size": "3715", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "CognosDefaults-SAMPLE.ps1", "mode": "33188", "license": "mit", "language": [ { "name": "PowerShell", "bytes": "52350" } ], "symlink_target": "" }
/** * @file * simple media prober based on the FFmpeg libraries */ #include "config.h" #include "libavutil/ffversion.h" #include <string.h> #include "libavformat/avformat.h" #include "libavcodec/avcodec.h" #include "libavutil/avassert.h" #include "libavutil/avstring.h" #include "libavutil/bprint.h" #include "libavutil/hash.h" #include "libavutil/opt.h" #include "libavutil/pixdesc.h" #include "libavutil/dict.h" #include "libavutil/libm.h" #include "libavutil/parseutils.h" #include "libavutil/timecode.h" #include "libavutil/timestamp.h" #include "libavdevice/avdevice.h" #include "libswscale/swscale.h" #include "libswresample/swresample.h" #include "libpostproc/postprocess.h" #include "cmdutils.h" const char program_name[] = "ffprobe"; const int program_birth_year = 2007; static int do_bitexact = 0; static int do_count_frames = 0; static int do_count_packets = 0; static int do_read_frames = 0; static int do_read_packets = 0; static int do_show_chapters = 0; static int do_show_error = 0; static int do_show_format = 0; static int do_show_frames = 0; static int do_show_packets = 0; static int do_show_programs = 0; static int do_show_streams = 0; static int do_show_stream_disposition = 0; static int do_show_data = 0; static int do_show_program_version = 0; static int do_show_library_versions = 0; static int do_show_pixel_formats = 0; static int do_show_pixel_format_flags = 0; static int do_show_pixel_format_components = 0; static int do_show_chapter_tags = 0; static int do_show_format_tags = 0; static int do_show_frame_tags = 0; static int do_show_program_tags = 0; static int do_show_stream_tags = 0; static int show_value_unit = 0; static int use_value_prefix = 0; static int use_byte_value_binary_prefix = 0; static int use_value_sexagesimal_format = 0; static int show_private_data = 1; static char *print_format; static char *stream_specifier; static char *show_data_hash; typedef struct ReadInterval { int id; ///< identifier int64_t start, end; ///< start, end in second/AV_TIME_BASE units int has_start, has_end; int start_is_offset, end_is_offset; int duration_frames; } ReadInterval; static ReadInterval *read_intervals; static int read_intervals_nb = 0; /* section structure definition */ #define SECTION_MAX_NB_CHILDREN 10 struct section { int id; ///< unique id identifying a section const char *name; #define SECTION_FLAG_IS_WRAPPER 1 ///< the section only contains other sections, but has no data at its own level #define SECTION_FLAG_IS_ARRAY 2 ///< the section contains an array of elements of the same type #define SECTION_FLAG_HAS_VARIABLE_FIELDS 4 ///< the section may contain a variable number of fields with variable keys. /// For these sections the element_name field is mandatory. int flags; int children_ids[SECTION_MAX_NB_CHILDREN+1]; ///< list of children section IDS, terminated by -1 const char *element_name; ///< name of the contained element, if provided const char *unique_name; ///< unique section name, in case the name is ambiguous AVDictionary *entries_to_show; int show_all_entries; }; typedef enum { SECTION_ID_NONE = -1, SECTION_ID_CHAPTER, SECTION_ID_CHAPTER_TAGS, SECTION_ID_CHAPTERS, SECTION_ID_ERROR, SECTION_ID_FORMAT, SECTION_ID_FORMAT_TAGS, SECTION_ID_FRAME, SECTION_ID_FRAMES, SECTION_ID_FRAME_TAGS, SECTION_ID_FRAME_SIDE_DATA_LIST, SECTION_ID_FRAME_SIDE_DATA, SECTION_ID_LIBRARY_VERSION, SECTION_ID_LIBRARY_VERSIONS, SECTION_ID_PACKET, SECTION_ID_PACKETS, SECTION_ID_PACKETS_AND_FRAMES, SECTION_ID_PIXEL_FORMAT, SECTION_ID_PIXEL_FORMAT_FLAGS, SECTION_ID_PIXEL_FORMAT_COMPONENT, SECTION_ID_PIXEL_FORMAT_COMPONENTS, SECTION_ID_PIXEL_FORMATS, SECTION_ID_PROGRAM_STREAM_DISPOSITION, SECTION_ID_PROGRAM_STREAM_TAGS, SECTION_ID_PROGRAM, SECTION_ID_PROGRAM_STREAMS, SECTION_ID_PROGRAM_STREAM, SECTION_ID_PROGRAM_TAGS, SECTION_ID_PROGRAM_VERSION, SECTION_ID_PROGRAMS, SECTION_ID_ROOT, SECTION_ID_STREAM, SECTION_ID_STREAM_DISPOSITION, SECTION_ID_STREAMS, SECTION_ID_STREAM_TAGS, SECTION_ID_SUBTITLE, } SectionID; static struct section sections[] = { [SECTION_ID_CHAPTERS] = { SECTION_ID_CHAPTERS, "chapters", SECTION_FLAG_IS_ARRAY, { SECTION_ID_CHAPTER, -1 } }, [SECTION_ID_CHAPTER] = { SECTION_ID_CHAPTER, "chapter", 0, { SECTION_ID_CHAPTER_TAGS, -1 } }, [SECTION_ID_CHAPTER_TAGS] = { SECTION_ID_CHAPTER_TAGS, "tags", SECTION_FLAG_HAS_VARIABLE_FIELDS, { -1 }, .element_name = "tag", .unique_name = "chapter_tags" }, [SECTION_ID_ERROR] = { SECTION_ID_ERROR, "error", 0, { -1 } }, [SECTION_ID_FORMAT] = { SECTION_ID_FORMAT, "format", 0, { SECTION_ID_FORMAT_TAGS, -1 } }, [SECTION_ID_FORMAT_TAGS] = { SECTION_ID_FORMAT_TAGS, "tags", SECTION_FLAG_HAS_VARIABLE_FIELDS, { -1 }, .element_name = "tag", .unique_name = "format_tags" }, [SECTION_ID_FRAMES] = { SECTION_ID_FRAMES, "frames", SECTION_FLAG_IS_ARRAY, { SECTION_ID_FRAME, SECTION_ID_SUBTITLE, -1 } }, [SECTION_ID_FRAME] = { SECTION_ID_FRAME, "frame", 0, { SECTION_ID_FRAME_TAGS, SECTION_ID_FRAME_SIDE_DATA_LIST, -1 } }, [SECTION_ID_FRAME_TAGS] = { SECTION_ID_FRAME_TAGS, "tags", SECTION_FLAG_HAS_VARIABLE_FIELDS, { -1 }, .element_name = "tag", .unique_name = "frame_tags" }, [SECTION_ID_FRAME_SIDE_DATA_LIST] ={ SECTION_ID_FRAME_SIDE_DATA_LIST, "side_data_list", SECTION_FLAG_IS_ARRAY, { SECTION_ID_FRAME_SIDE_DATA, -1 } }, [SECTION_ID_FRAME_SIDE_DATA] = { SECTION_ID_FRAME_SIDE_DATA, "side_data", 0, { -1 } }, [SECTION_ID_LIBRARY_VERSIONS] = { SECTION_ID_LIBRARY_VERSIONS, "library_versions", SECTION_FLAG_IS_ARRAY, { SECTION_ID_LIBRARY_VERSION, -1 } }, [SECTION_ID_LIBRARY_VERSION] = { SECTION_ID_LIBRARY_VERSION, "library_version", 0, { -1 } }, [SECTION_ID_PACKETS] = { SECTION_ID_PACKETS, "packets", SECTION_FLAG_IS_ARRAY, { SECTION_ID_PACKET, -1} }, [SECTION_ID_PACKETS_AND_FRAMES] = { SECTION_ID_PACKETS_AND_FRAMES, "packets_and_frames", SECTION_FLAG_IS_ARRAY, { SECTION_ID_PACKET, -1} }, [SECTION_ID_PACKET] = { SECTION_ID_PACKET, "packet", 0, { -1 } }, [SECTION_ID_PIXEL_FORMATS] = { SECTION_ID_PIXEL_FORMATS, "pixel_formats", SECTION_FLAG_IS_ARRAY, { SECTION_ID_PIXEL_FORMAT, -1 } }, [SECTION_ID_PIXEL_FORMAT] = { SECTION_ID_PIXEL_FORMAT, "pixel_format", 0, { SECTION_ID_PIXEL_FORMAT_FLAGS, SECTION_ID_PIXEL_FORMAT_COMPONENTS, -1 } }, [SECTION_ID_PIXEL_FORMAT_FLAGS] = { SECTION_ID_PIXEL_FORMAT_FLAGS, "flags", 0, { -1 }, .unique_name = "pixel_format_flags" }, [SECTION_ID_PIXEL_FORMAT_COMPONENTS] = { SECTION_ID_PIXEL_FORMAT_COMPONENTS, "components", SECTION_FLAG_IS_ARRAY, {SECTION_ID_PIXEL_FORMAT_COMPONENT, -1 }, .unique_name = "pixel_format_components" }, [SECTION_ID_PIXEL_FORMAT_COMPONENT] = { SECTION_ID_PIXEL_FORMAT_COMPONENT, "component", 0, { -1 } }, [SECTION_ID_PROGRAM_STREAM_DISPOSITION] = { SECTION_ID_PROGRAM_STREAM_DISPOSITION, "disposition", 0, { -1 }, .unique_name = "program_stream_disposition" }, [SECTION_ID_PROGRAM_STREAM_TAGS] = { SECTION_ID_PROGRAM_STREAM_TAGS, "tags", SECTION_FLAG_HAS_VARIABLE_FIELDS, { -1 }, .element_name = "tag", .unique_name = "program_stream_tags" }, [SECTION_ID_PROGRAM] = { SECTION_ID_PROGRAM, "program", 0, { SECTION_ID_PROGRAM_TAGS, SECTION_ID_PROGRAM_STREAMS, -1 } }, [SECTION_ID_PROGRAM_STREAMS] = { SECTION_ID_PROGRAM_STREAMS, "streams", SECTION_FLAG_IS_ARRAY, { SECTION_ID_PROGRAM_STREAM, -1 }, .unique_name = "program_streams" }, [SECTION_ID_PROGRAM_STREAM] = { SECTION_ID_PROGRAM_STREAM, "stream", 0, { SECTION_ID_PROGRAM_STREAM_DISPOSITION, SECTION_ID_PROGRAM_STREAM_TAGS, -1 }, .unique_name = "program_stream" }, [SECTION_ID_PROGRAM_TAGS] = { SECTION_ID_PROGRAM_TAGS, "tags", SECTION_FLAG_HAS_VARIABLE_FIELDS, { -1 }, .element_name = "tag", .unique_name = "program_tags" }, [SECTION_ID_PROGRAM_VERSION] = { SECTION_ID_PROGRAM_VERSION, "program_version", 0, { -1 } }, [SECTION_ID_PROGRAMS] = { SECTION_ID_PROGRAMS, "programs", SECTION_FLAG_IS_ARRAY, { SECTION_ID_PROGRAM, -1 } }, [SECTION_ID_ROOT] = { SECTION_ID_ROOT, "root", SECTION_FLAG_IS_WRAPPER, { SECTION_ID_CHAPTERS, SECTION_ID_FORMAT, SECTION_ID_FRAMES, SECTION_ID_PROGRAMS, SECTION_ID_STREAMS, SECTION_ID_PACKETS, SECTION_ID_ERROR, SECTION_ID_PROGRAM_VERSION, SECTION_ID_LIBRARY_VERSIONS, SECTION_ID_PIXEL_FORMATS, -1} }, [SECTION_ID_STREAMS] = { SECTION_ID_STREAMS, "streams", SECTION_FLAG_IS_ARRAY, { SECTION_ID_STREAM, -1 } }, [SECTION_ID_STREAM] = { SECTION_ID_STREAM, "stream", 0, { SECTION_ID_STREAM_DISPOSITION, SECTION_ID_STREAM_TAGS, -1 } }, [SECTION_ID_STREAM_DISPOSITION] = { SECTION_ID_STREAM_DISPOSITION, "disposition", 0, { -1 }, .unique_name = "stream_disposition" }, [SECTION_ID_STREAM_TAGS] = { SECTION_ID_STREAM_TAGS, "tags", SECTION_FLAG_HAS_VARIABLE_FIELDS, { -1 }, .element_name = "tag", .unique_name = "stream_tags" }, [SECTION_ID_SUBTITLE] = { SECTION_ID_SUBTITLE, "subtitle", 0, { -1 } }, }; static const OptionDef *options; /* FFprobe context */ static const char *input_filename; static AVInputFormat *iformat = NULL; static struct AVHashContext *hash; static const char *const binary_unit_prefixes [] = { "", "Ki", "Mi", "Gi", "Ti", "Pi" }; static const char *const decimal_unit_prefixes[] = { "", "K" , "M" , "G" , "T" , "P" }; static const char unit_second_str[] = "s" ; static const char unit_hertz_str[] = "Hz" ; static const char unit_byte_str[] = "byte" ; static const char unit_bit_per_second_str[] = "bit/s"; static int nb_streams; static uint64_t *nb_streams_packets; static uint64_t *nb_streams_frames; static int *selected_streams; static void ffprobe_cleanup(int ret) { int i; for (i = 0; i < FF_ARRAY_ELEMS(sections); i++) av_dict_free(&(sections[i].entries_to_show)); } struct unit_value { union { double d; long long int i; } val; const char *unit; }; static char *value_string(char *buf, int buf_size, struct unit_value uv) { double vald; long long int vali; int show_float = 0; if (uv.unit == unit_second_str) { vald = uv.val.d; show_float = 1; } else { vald = vali = uv.val.i; } if (uv.unit == unit_second_str && use_value_sexagesimal_format) { double secs; int hours, mins; secs = vald; mins = (int)secs / 60; secs = secs - mins * 60; hours = mins / 60; mins %= 60; snprintf(buf, buf_size, "%d:%02d:%09.6f", hours, mins, secs); } else { const char *prefix_string = ""; if (use_value_prefix && vald > 1) { long long int index; if (uv.unit == unit_byte_str && use_byte_value_binary_prefix) { index = (long long int) (log2(vald)) / 10; index = av_clip(index, 0, FF_ARRAY_ELEMS(binary_unit_prefixes) - 1); vald /= exp2(index * 10); prefix_string = binary_unit_prefixes[index]; } else { index = (long long int) (log10(vald)) / 3; index = av_clip(index, 0, FF_ARRAY_ELEMS(decimal_unit_prefixes) - 1); vald /= pow(10, index * 3); prefix_string = decimal_unit_prefixes[index]; } vali = vald; } if (show_float || (use_value_prefix && vald != (long long int)vald)) snprintf(buf, buf_size, "%f", vald); else snprintf(buf, buf_size, "%lld", vali); av_strlcatf(buf, buf_size, "%s%s%s", *prefix_string || show_value_unit ? " " : "", prefix_string, show_value_unit ? uv.unit : ""); } return buf; } /* WRITERS API */ typedef struct WriterContext WriterContext; #define WRITER_FLAG_DISPLAY_OPTIONAL_FIELDS 1 #define WRITER_FLAG_PUT_PACKETS_AND_FRAMES_IN_SAME_CHAPTER 2 typedef enum { WRITER_STRING_VALIDATION_FAIL, WRITER_STRING_VALIDATION_REPLACE, WRITER_STRING_VALIDATION_IGNORE, WRITER_STRING_VALIDATION_NB } StringValidation; typedef struct Writer { const AVClass *priv_class; ///< private class of the writer, if any int priv_size; ///< private size for the writer context const char *name; int (*init) (WriterContext *wctx); void (*uninit)(WriterContext *wctx); void (*print_section_header)(WriterContext *wctx); void (*print_section_footer)(WriterContext *wctx); void (*print_integer) (WriterContext *wctx, const char *, long long int); void (*print_rational) (WriterContext *wctx, AVRational *q, char *sep); void (*print_string) (WriterContext *wctx, const char *, const char *); int flags; ///< a combination or WRITER_FLAG_* } Writer; #define SECTION_MAX_NB_LEVELS 10 struct WriterContext { const AVClass *class; ///< class of the writer const Writer *writer; ///< the Writer of which this is an instance char *name; ///< name of this writer instance void *priv; ///< private data for use by the filter const struct section *sections; ///< array containing all sections int nb_sections; ///< number of sections int level; ///< current level, starting from 0 /** number of the item printed in the given section, starting from 0 */ unsigned int nb_item[SECTION_MAX_NB_LEVELS]; /** section per each level */ const struct section *section[SECTION_MAX_NB_LEVELS]; AVBPrint section_pbuf[SECTION_MAX_NB_LEVELS]; ///< generic print buffer dedicated to each section, /// used by various writers unsigned int nb_section_packet; ///< number of the packet section in case we are in "packets_and_frames" section unsigned int nb_section_frame; ///< number of the frame section in case we are in "packets_and_frames" section unsigned int nb_section_packet_frame; ///< nb_section_packet or nb_section_frame according if is_packets_and_frames int string_validation; char *string_validation_replacement; unsigned int string_validation_utf8_flags; }; static const char *writer_get_name(void *p) { WriterContext *wctx = p; return wctx->writer->name; } #define OFFSET(x) offsetof(WriterContext, x) static const AVOption writer_options[] = { { "string_validation", "set string validation mode", OFFSET(string_validation), AV_OPT_TYPE_INT, {.i64=WRITER_STRING_VALIDATION_REPLACE}, 0, WRITER_STRING_VALIDATION_NB-1, .unit = "sv" }, { "sv", "set string validation mode", OFFSET(string_validation), AV_OPT_TYPE_INT, {.i64=WRITER_STRING_VALIDATION_REPLACE}, 0, WRITER_STRING_VALIDATION_NB-1, .unit = "sv" }, { "ignore", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = WRITER_STRING_VALIDATION_IGNORE}, .unit = "sv" }, { "replace", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = WRITER_STRING_VALIDATION_REPLACE}, .unit = "sv" }, { "fail", NULL, 0, AV_OPT_TYPE_CONST, {.i64 = WRITER_STRING_VALIDATION_FAIL}, .unit = "sv" }, { "string_validation_replacement", "set string validation replacement string", OFFSET(string_validation_replacement), AV_OPT_TYPE_STRING, {.str=""}}, { "svr", "set string validation replacement string", OFFSET(string_validation_replacement), AV_OPT_TYPE_STRING, {.str="\xEF\xBF\xBD"}}, { NULL } }; static void *writer_child_next(void *obj, void *prev) { WriterContext *ctx = obj; if (!prev && ctx->writer && ctx->writer->priv_class && ctx->priv) return ctx->priv; return NULL; } static const AVClass writer_class = { .class_name = "Writer", .item_name = writer_get_name, .option = writer_options, .version = LIBAVUTIL_VERSION_INT, .child_next = writer_child_next, }; static void writer_close(WriterContext **wctx) { int i; if (!*wctx) return; if ((*wctx)->writer->uninit) (*wctx)->writer->uninit(*wctx); for (i = 0; i < SECTION_MAX_NB_LEVELS; i++) av_bprint_finalize(&(*wctx)->section_pbuf[i], NULL); if ((*wctx)->writer->priv_class) av_opt_free((*wctx)->priv); av_freep(&((*wctx)->priv)); av_opt_free(*wctx); av_freep(wctx); } static void bprint_bytes(AVBPrint *bp, const uint8_t *ubuf, size_t ubuf_size) { int i; av_bprintf(bp, "0X"); for (i = 0; i < ubuf_size; i++) av_bprintf(bp, "%02X", ubuf[i]); } static int writer_open(WriterContext **wctx, const Writer *writer, const char *args, const struct section *sections, int nb_sections) { int i, ret = 0; if (!(*wctx = av_mallocz(sizeof(WriterContext)))) { ret = AVERROR(ENOMEM); goto fail; } if (!((*wctx)->priv = av_mallocz(writer->priv_size))) { ret = AVERROR(ENOMEM); goto fail; } (*wctx)->class = &writer_class; (*wctx)->writer = writer; (*wctx)->level = -1; (*wctx)->sections = sections; (*wctx)->nb_sections = nb_sections; av_opt_set_defaults(*wctx); if (writer->priv_class) { void *priv_ctx = (*wctx)->priv; *((const AVClass **)priv_ctx) = writer->priv_class; av_opt_set_defaults(priv_ctx); } /* convert options to dictionary */ if (args) { AVDictionary *opts = NULL; AVDictionaryEntry *opt = NULL; if ((ret = av_dict_parse_string(&opts, args, "=", ":", 0)) < 0) { av_log(*wctx, AV_LOG_ERROR, "Failed to parse option string '%s' provided to writer context\n", args); av_dict_free(&opts); goto fail; } while ((opt = av_dict_get(opts, "", opt, AV_DICT_IGNORE_SUFFIX))) { if ((ret = av_opt_set(*wctx, opt->key, opt->value, AV_OPT_SEARCH_CHILDREN)) < 0) { av_log(*wctx, AV_LOG_ERROR, "Failed to set option '%s' with value '%s' provided to writer context\n", opt->key, opt->value); av_dict_free(&opts); goto fail; } } av_dict_free(&opts); } /* validate replace string */ { const uint8_t *p = (*wctx)->string_validation_replacement; const uint8_t *endp = p + strlen(p); while (*p) { const uint8_t *p0 = p; int32_t code; ret = av_utf8_decode(&code, &p, endp, (*wctx)->string_validation_utf8_flags); if (ret < 0) { AVBPrint bp; av_bprint_init(&bp, 0, AV_BPRINT_SIZE_AUTOMATIC); bprint_bytes(&bp, p0, p-p0), av_log(wctx, AV_LOG_ERROR, "Invalid UTF8 sequence %s found in string validation replace '%s'\n", bp.str, (*wctx)->string_validation_replacement); return ret; } } } for (i = 0; i < SECTION_MAX_NB_LEVELS; i++) av_bprint_init(&(*wctx)->section_pbuf[i], 1, AV_BPRINT_SIZE_UNLIMITED); if ((*wctx)->writer->init) ret = (*wctx)->writer->init(*wctx); if (ret < 0) goto fail; return 0; fail: writer_close(wctx); return ret; } static inline void writer_print_section_header(WriterContext *wctx, int section_id) { int parent_section_id; wctx->level++; av_assert0(wctx->level < SECTION_MAX_NB_LEVELS); parent_section_id = wctx->level ? (wctx->section[wctx->level-1])->id : SECTION_ID_NONE; wctx->nb_item[wctx->level] = 0; wctx->section[wctx->level] = &wctx->sections[section_id]; if (section_id == SECTION_ID_PACKETS_AND_FRAMES) { wctx->nb_section_packet = wctx->nb_section_frame = wctx->nb_section_packet_frame = 0; } else if (parent_section_id == SECTION_ID_PACKETS_AND_FRAMES) { wctx->nb_section_packet_frame = section_id == SECTION_ID_PACKET ? wctx->nb_section_packet : wctx->nb_section_frame; } if (wctx->writer->print_section_header) wctx->writer->print_section_header(wctx); } static inline void writer_print_section_footer(WriterContext *wctx) { int section_id = wctx->section[wctx->level]->id; int parent_section_id = wctx->level ? wctx->section[wctx->level-1]->id : SECTION_ID_NONE; if (parent_section_id != SECTION_ID_NONE) wctx->nb_item[wctx->level-1]++; if (parent_section_id == SECTION_ID_PACKETS_AND_FRAMES) { if (section_id == SECTION_ID_PACKET) wctx->nb_section_packet++; else wctx->nb_section_frame++; } if (wctx->writer->print_section_footer) wctx->writer->print_section_footer(wctx); wctx->level--; } static inline void writer_print_integer(WriterContext *wctx, const char *key, long long int val) { const struct section *section = wctx->section[wctx->level]; if (section->show_all_entries || av_dict_get(section->entries_to_show, key, NULL, 0)) { wctx->writer->print_integer(wctx, key, val); wctx->nb_item[wctx->level]++; } } static inline int validate_string(WriterContext *wctx, char **dstp, const char *src) { const uint8_t *p, *endp; AVBPrint dstbuf; int invalid_chars_nb = 0, ret = 0; av_bprint_init(&dstbuf, 0, AV_BPRINT_SIZE_UNLIMITED); endp = src + strlen(src); for (p = (uint8_t *)src; *p;) { uint32_t code; int invalid = 0; const uint8_t *p0 = p; if (av_utf8_decode(&code, &p, endp, wctx->string_validation_utf8_flags) < 0) { AVBPrint bp; av_bprint_init(&bp, 0, AV_BPRINT_SIZE_AUTOMATIC); bprint_bytes(&bp, p0, p-p0); av_log(wctx, AV_LOG_DEBUG, "Invalid UTF-8 sequence %s found in string '%s'\n", bp.str, src); invalid = 1; } if (invalid) { invalid_chars_nb++; switch (wctx->string_validation) { case WRITER_STRING_VALIDATION_FAIL: av_log(wctx, AV_LOG_ERROR, "Invalid UTF-8 sequence found in string '%s'\n", src); ret = AVERROR_INVALIDDATA; goto end; break; case WRITER_STRING_VALIDATION_REPLACE: av_bprintf(&dstbuf, "%s", wctx->string_validation_replacement); break; } } if (!invalid || wctx->string_validation == WRITER_STRING_VALIDATION_IGNORE) av_bprint_append_data(&dstbuf, p0, p-p0); } if (invalid_chars_nb && wctx->string_validation == WRITER_STRING_VALIDATION_REPLACE) { av_log(wctx, AV_LOG_WARNING, "%d invalid UTF-8 sequence(s) found in string '%s', replaced with '%s'\n", invalid_chars_nb, src, wctx->string_validation_replacement); } end: av_bprint_finalize(&dstbuf, dstp); return ret; } #define PRINT_STRING_OPT 1 #define PRINT_STRING_VALIDATE 2 static inline int writer_print_string(WriterContext *wctx, const char *key, const char *val, int flags) { const struct section *section = wctx->section[wctx->level]; int ret = 0; if ((flags & PRINT_STRING_OPT) && !(wctx->writer->flags & WRITER_FLAG_DISPLAY_OPTIONAL_FIELDS)) return 0; if (section->show_all_entries || av_dict_get(section->entries_to_show, key, NULL, 0)) { if (flags & PRINT_STRING_VALIDATE) { char *key1 = NULL, *val1 = NULL; ret = validate_string(wctx, &key1, key); if (ret < 0) goto end; ret = validate_string(wctx, &val1, val); if (ret < 0) goto end; wctx->writer->print_string(wctx, key1, val1); end: if (ret < 0) { av_log(wctx, AV_LOG_ERROR, "Invalid key=value string combination %s=%s in section %s\n", key, val, section->unique_name); } av_free(key1); av_free(val1); } else { wctx->writer->print_string(wctx, key, val); } wctx->nb_item[wctx->level]++; } return ret; } static inline void writer_print_rational(WriterContext *wctx, const char *key, AVRational q, char sep) { AVBPrint buf; av_bprint_init(&buf, 0, AV_BPRINT_SIZE_AUTOMATIC); av_bprintf(&buf, "%d%c%d", q.num, sep, q.den); writer_print_string(wctx, key, buf.str, 0); } static void writer_print_time(WriterContext *wctx, const char *key, int64_t ts, const AVRational *time_base, int is_duration) { char buf[128]; if ((!is_duration && ts == AV_NOPTS_VALUE) || (is_duration && ts == 0)) { writer_print_string(wctx, key, "N/A", PRINT_STRING_OPT); } else { double d = ts * av_q2d(*time_base); struct unit_value uv; uv.val.d = d; uv.unit = unit_second_str; value_string(buf, sizeof(buf), uv); writer_print_string(wctx, key, buf, 0); } } static void writer_print_ts(WriterContext *wctx, const char *key, int64_t ts, int is_duration) { if ((!is_duration && ts == AV_NOPTS_VALUE) || (is_duration && ts == 0)) { writer_print_string(wctx, key, "N/A", PRINT_STRING_OPT); } else { writer_print_integer(wctx, key, ts); } } static void writer_print_data(WriterContext *wctx, const char *name, uint8_t *data, int size) { AVBPrint bp; int offset = 0, l, i; av_bprint_init(&bp, 0, AV_BPRINT_SIZE_UNLIMITED); av_bprintf(&bp, "\n"); while (size) { av_bprintf(&bp, "%08x: ", offset); l = FFMIN(size, 16); for (i = 0; i < l; i++) { av_bprintf(&bp, "%02x", data[i]); if (i & 1) av_bprintf(&bp, " "); } av_bprint_chars(&bp, ' ', 41 - 2 * i - i / 2); for (i = 0; i < l; i++) av_bprint_chars(&bp, data[i] - 32U < 95 ? data[i] : '.', 1); av_bprintf(&bp, "\n"); offset += l; data += l; size -= l; } writer_print_string(wctx, name, bp.str, 0); av_bprint_finalize(&bp, NULL); } static void writer_print_data_hash(WriterContext *wctx, const char *name, uint8_t *data, int size) { char *p, buf[AV_HASH_MAX_SIZE * 2 + 64] = { 0 }; if (!hash) return; av_hash_init(hash); av_hash_update(hash, data, size); snprintf(buf, sizeof(buf), "%s:", av_hash_get_name(hash)); p = buf + strlen(buf); av_hash_final_hex(hash, p, buf + sizeof(buf) - p); writer_print_string(wctx, name, buf, 0); } #define MAX_REGISTERED_WRITERS_NB 64 static const Writer *registered_writers[MAX_REGISTERED_WRITERS_NB + 1]; static int writer_register(const Writer *writer) { static int next_registered_writer_idx = 0; if (next_registered_writer_idx == MAX_REGISTERED_WRITERS_NB) return AVERROR(ENOMEM); registered_writers[next_registered_writer_idx++] = writer; return 0; } static const Writer *writer_get_by_name(const char *name) { int i; for (i = 0; registered_writers[i]; i++) if (!strcmp(registered_writers[i]->name, name)) return registered_writers[i]; return NULL; } /* WRITERS */ #define DEFINE_WRITER_CLASS(name) \ static const char *name##_get_name(void *ctx) \ { \ return #name ; \ } \ static const AVClass name##_class = { \ .class_name = #name, \ .item_name = name##_get_name, \ .option = name##_options \ } /* Default output */ typedef struct DefaultContext { const AVClass *class; int nokey; int noprint_wrappers; int nested_section[SECTION_MAX_NB_LEVELS]; } DefaultContext; #undef OFFSET #define OFFSET(x) offsetof(DefaultContext, x) static const AVOption default_options[] = { { "noprint_wrappers", "do not print headers and footers", OFFSET(noprint_wrappers), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, { "nw", "do not print headers and footers", OFFSET(noprint_wrappers), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, { "nokey", "force no key printing", OFFSET(nokey), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, { "nk", "force no key printing", OFFSET(nokey), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, {NULL}, }; DEFINE_WRITER_CLASS(default); /* lame uppercasing routine, assumes the string is lower case ASCII */ static inline char *upcase_string(char *dst, size_t dst_size, const char *src) { int i; for (i = 0; src[i] && i < dst_size-1; i++) dst[i] = av_toupper(src[i]); dst[i] = 0; return dst; } static void default_print_section_header(WriterContext *wctx) { DefaultContext *def = wctx->priv; char buf[32]; const struct section *section = wctx->section[wctx->level]; const struct section *parent_section = wctx->level ? wctx->section[wctx->level-1] : NULL; av_bprint_clear(&wctx->section_pbuf[wctx->level]); if (parent_section && !(parent_section->flags & (SECTION_FLAG_IS_WRAPPER|SECTION_FLAG_IS_ARRAY))) { def->nested_section[wctx->level] = 1; av_bprintf(&wctx->section_pbuf[wctx->level], "%s%s:", wctx->section_pbuf[wctx->level-1].str, upcase_string(buf, sizeof(buf), av_x_if_null(section->element_name, section->name))); } if (def->noprint_wrappers || def->nested_section[wctx->level]) return; if (!(section->flags & (SECTION_FLAG_IS_WRAPPER|SECTION_FLAG_IS_ARRAY))) printf("[%s]\n", upcase_string(buf, sizeof(buf), section->name)); } static void default_print_section_footer(WriterContext *wctx) { DefaultContext *def = wctx->priv; const struct section *section = wctx->section[wctx->level]; char buf[32]; if (def->noprint_wrappers || def->nested_section[wctx->level]) return; if (!(section->flags & (SECTION_FLAG_IS_WRAPPER|SECTION_FLAG_IS_ARRAY))) printf("[/%s]\n", upcase_string(buf, sizeof(buf), section->name)); } static void default_print_str(WriterContext *wctx, const char *key, const char *value) { DefaultContext *def = wctx->priv; if (!def->nokey) printf("%s%s=", wctx->section_pbuf[wctx->level].str, key); printf("%s\n", value); } static void default_print_int(WriterContext *wctx, const char *key, long long int value) { DefaultContext *def = wctx->priv; if (!def->nokey) printf("%s%s=", wctx->section_pbuf[wctx->level].str, key); printf("%lld\n", value); } static const Writer default_writer = { .name = "default", .priv_size = sizeof(DefaultContext), .print_section_header = default_print_section_header, .print_section_footer = default_print_section_footer, .print_integer = default_print_int, .print_string = default_print_str, .flags = WRITER_FLAG_DISPLAY_OPTIONAL_FIELDS, .priv_class = &default_class, }; /* Compact output */ /** * Apply C-language-like string escaping. */ static const char *c_escape_str(AVBPrint *dst, const char *src, const char sep, void *log_ctx) { const char *p; for (p = src; *p; p++) { switch (*p) { case '\b': av_bprintf(dst, "%s", "\\b"); break; case '\f': av_bprintf(dst, "%s", "\\f"); break; case '\n': av_bprintf(dst, "%s", "\\n"); break; case '\r': av_bprintf(dst, "%s", "\\r"); break; case '\\': av_bprintf(dst, "%s", "\\\\"); break; default: if (*p == sep) av_bprint_chars(dst, '\\', 1); av_bprint_chars(dst, *p, 1); } } return dst->str; } /** * Quote fields containing special characters, check RFC4180. */ static const char *csv_escape_str(AVBPrint *dst, const char *src, const char sep, void *log_ctx) { char meta_chars[] = { sep, '"', '\n', '\r', '\0' }; int needs_quoting = !!src[strcspn(src, meta_chars)]; if (needs_quoting) av_bprint_chars(dst, '"', 1); for (; *src; src++) { if (*src == '"') av_bprint_chars(dst, '"', 1); av_bprint_chars(dst, *src, 1); } if (needs_quoting) av_bprint_chars(dst, '"', 1); return dst->str; } static const char *none_escape_str(AVBPrint *dst, const char *src, const char sep, void *log_ctx) { return src; } typedef struct CompactContext { const AVClass *class; char *item_sep_str; char item_sep; int nokey; int print_section; char *escape_mode_str; const char * (*escape_str)(AVBPrint *dst, const char *src, const char sep, void *log_ctx); int nested_section[SECTION_MAX_NB_LEVELS]; int has_nested_elems[SECTION_MAX_NB_LEVELS]; int terminate_line[SECTION_MAX_NB_LEVELS]; } CompactContext; #undef OFFSET #define OFFSET(x) offsetof(CompactContext, x) static const AVOption compact_options[]= { {"item_sep", "set item separator", OFFSET(item_sep_str), AV_OPT_TYPE_STRING, {.str="|"}, CHAR_MIN, CHAR_MAX }, {"s", "set item separator", OFFSET(item_sep_str), AV_OPT_TYPE_STRING, {.str="|"}, CHAR_MIN, CHAR_MAX }, {"nokey", "force no key printing", OFFSET(nokey), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, {"nk", "force no key printing", OFFSET(nokey), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, {"escape", "set escape mode", OFFSET(escape_mode_str), AV_OPT_TYPE_STRING, {.str="c"}, CHAR_MIN, CHAR_MAX }, {"e", "set escape mode", OFFSET(escape_mode_str), AV_OPT_TYPE_STRING, {.str="c"}, CHAR_MIN, CHAR_MAX }, {"print_section", "print section name", OFFSET(print_section), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {"p", "print section name", OFFSET(print_section), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {NULL}, }; DEFINE_WRITER_CLASS(compact); static av_cold int compact_init(WriterContext *wctx) { CompactContext *compact = wctx->priv; if (strlen(compact->item_sep_str) != 1) { av_log(wctx, AV_LOG_ERROR, "Item separator '%s' specified, but must contain a single character\n", compact->item_sep_str); return AVERROR(EINVAL); } compact->item_sep = compact->item_sep_str[0]; if (!strcmp(compact->escape_mode_str, "none")) compact->escape_str = none_escape_str; else if (!strcmp(compact->escape_mode_str, "c" )) compact->escape_str = c_escape_str; else if (!strcmp(compact->escape_mode_str, "csv" )) compact->escape_str = csv_escape_str; else { av_log(wctx, AV_LOG_ERROR, "Unknown escape mode '%s'\n", compact->escape_mode_str); return AVERROR(EINVAL); } return 0; } static void compact_print_section_header(WriterContext *wctx) { CompactContext *compact = wctx->priv; const struct section *section = wctx->section[wctx->level]; const struct section *parent_section = wctx->level ? wctx->section[wctx->level-1] : NULL; compact->terminate_line[wctx->level] = 1; compact->has_nested_elems[wctx->level] = 0; av_bprint_clear(&wctx->section_pbuf[wctx->level]); if (!(section->flags & SECTION_FLAG_IS_ARRAY) && parent_section && !(parent_section->flags & (SECTION_FLAG_IS_WRAPPER|SECTION_FLAG_IS_ARRAY))) { compact->nested_section[wctx->level] = 1; compact->has_nested_elems[wctx->level-1] = 1; av_bprintf(&wctx->section_pbuf[wctx->level], "%s%s:", wctx->section_pbuf[wctx->level-1].str, (char *)av_x_if_null(section->element_name, section->name)); wctx->nb_item[wctx->level] = wctx->nb_item[wctx->level-1]; } else { if (parent_section && compact->has_nested_elems[wctx->level-1] && (section->flags & SECTION_FLAG_IS_ARRAY)) { compact->terminate_line[wctx->level-1] = 0; printf("\n"); } if (compact->print_section && !(section->flags & (SECTION_FLAG_IS_WRAPPER|SECTION_FLAG_IS_ARRAY))) printf("%s%c", section->name, compact->item_sep); } } static void compact_print_section_footer(WriterContext *wctx) { CompactContext *compact = wctx->priv; if (!compact->nested_section[wctx->level] && compact->terminate_line[wctx->level] && !(wctx->section[wctx->level]->flags & (SECTION_FLAG_IS_WRAPPER|SECTION_FLAG_IS_ARRAY))) printf("\n"); } static void compact_print_str(WriterContext *wctx, const char *key, const char *value) { CompactContext *compact = wctx->priv; AVBPrint buf; if (wctx->nb_item[wctx->level]) printf("%c", compact->item_sep); if (!compact->nokey) printf("%s%s=", wctx->section_pbuf[wctx->level].str, key); av_bprint_init(&buf, 1, AV_BPRINT_SIZE_UNLIMITED); printf("%s", compact->escape_str(&buf, value, compact->item_sep, wctx)); av_bprint_finalize(&buf, NULL); } static void compact_print_int(WriterContext *wctx, const char *key, long long int value) { CompactContext *compact = wctx->priv; if (wctx->nb_item[wctx->level]) printf("%c", compact->item_sep); if (!compact->nokey) printf("%s%s=", wctx->section_pbuf[wctx->level].str, key); printf("%lld", value); } static const Writer compact_writer = { .name = "compact", .priv_size = sizeof(CompactContext), .init = compact_init, .print_section_header = compact_print_section_header, .print_section_footer = compact_print_section_footer, .print_integer = compact_print_int, .print_string = compact_print_str, .flags = WRITER_FLAG_DISPLAY_OPTIONAL_FIELDS, .priv_class = &compact_class, }; /* CSV output */ #undef OFFSET #define OFFSET(x) offsetof(CompactContext, x) static const AVOption csv_options[] = { {"item_sep", "set item separator", OFFSET(item_sep_str), AV_OPT_TYPE_STRING, {.str=","}, CHAR_MIN, CHAR_MAX }, {"s", "set item separator", OFFSET(item_sep_str), AV_OPT_TYPE_STRING, {.str=","}, CHAR_MIN, CHAR_MAX }, {"nokey", "force no key printing", OFFSET(nokey), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {"nk", "force no key printing", OFFSET(nokey), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {"escape", "set escape mode", OFFSET(escape_mode_str), AV_OPT_TYPE_STRING, {.str="csv"}, CHAR_MIN, CHAR_MAX }, {"e", "set escape mode", OFFSET(escape_mode_str), AV_OPT_TYPE_STRING, {.str="csv"}, CHAR_MIN, CHAR_MAX }, {"print_section", "print section name", OFFSET(print_section), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {"p", "print section name", OFFSET(print_section), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {NULL}, }; DEFINE_WRITER_CLASS(csv); static const Writer csv_writer = { .name = "csv", .priv_size = sizeof(CompactContext), .init = compact_init, .print_section_header = compact_print_section_header, .print_section_footer = compact_print_section_footer, .print_integer = compact_print_int, .print_string = compact_print_str, .flags = WRITER_FLAG_DISPLAY_OPTIONAL_FIELDS, .priv_class = &csv_class, }; /* Flat output */ typedef struct FlatContext { const AVClass *class; const char *sep_str; char sep; int hierarchical; } FlatContext; #undef OFFSET #define OFFSET(x) offsetof(FlatContext, x) static const AVOption flat_options[]= { {"sep_char", "set separator", OFFSET(sep_str), AV_OPT_TYPE_STRING, {.str="."}, CHAR_MIN, CHAR_MAX }, {"s", "set separator", OFFSET(sep_str), AV_OPT_TYPE_STRING, {.str="."}, CHAR_MIN, CHAR_MAX }, {"hierarchical", "specify if the section specification should be hierarchical", OFFSET(hierarchical), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {"h", "specify if the section specification should be hierarchical", OFFSET(hierarchical), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {NULL}, }; DEFINE_WRITER_CLASS(flat); static av_cold int flat_init(WriterContext *wctx) { FlatContext *flat = wctx->priv; if (strlen(flat->sep_str) != 1) { av_log(wctx, AV_LOG_ERROR, "Item separator '%s' specified, but must contain a single character\n", flat->sep_str); return AVERROR(EINVAL); } flat->sep = flat->sep_str[0]; return 0; } static const char *flat_escape_key_str(AVBPrint *dst, const char *src, const char sep) { const char *p; for (p = src; *p; p++) { if (!((*p >= '0' && *p <= '9') || (*p >= 'a' && *p <= 'z') || (*p >= 'A' && *p <= 'Z'))) av_bprint_chars(dst, '_', 1); else av_bprint_chars(dst, *p, 1); } return dst->str; } static const char *flat_escape_value_str(AVBPrint *dst, const char *src) { const char *p; for (p = src; *p; p++) { switch (*p) { case '\n': av_bprintf(dst, "%s", "\\n"); break; case '\r': av_bprintf(dst, "%s", "\\r"); break; case '\\': av_bprintf(dst, "%s", "\\\\"); break; case '"': av_bprintf(dst, "%s", "\\\""); break; case '`': av_bprintf(dst, "%s", "\\`"); break; case '$': av_bprintf(dst, "%s", "\\$"); break; default: av_bprint_chars(dst, *p, 1); break; } } return dst->str; } static void flat_print_section_header(WriterContext *wctx) { FlatContext *flat = wctx->priv; AVBPrint *buf = &wctx->section_pbuf[wctx->level]; const struct section *section = wctx->section[wctx->level]; const struct section *parent_section = wctx->level ? wctx->section[wctx->level-1] : NULL; /* build section header */ av_bprint_clear(buf); if (!parent_section) return; av_bprintf(buf, "%s", wctx->section_pbuf[wctx->level-1].str); if (flat->hierarchical || !(section->flags & (SECTION_FLAG_IS_ARRAY|SECTION_FLAG_IS_WRAPPER))) { av_bprintf(buf, "%s%s", wctx->section[wctx->level]->name, flat->sep_str); if (parent_section->flags & SECTION_FLAG_IS_ARRAY) { int n = parent_section->id == SECTION_ID_PACKETS_AND_FRAMES ? wctx->nb_section_packet_frame : wctx->nb_item[wctx->level-1]; av_bprintf(buf, "%d%s", n, flat->sep_str); } } } static void flat_print_int(WriterContext *wctx, const char *key, long long int value) { printf("%s%s=%lld\n", wctx->section_pbuf[wctx->level].str, key, value); } static void flat_print_str(WriterContext *wctx, const char *key, const char *value) { FlatContext *flat = wctx->priv; AVBPrint buf; printf("%s", wctx->section_pbuf[wctx->level].str); av_bprint_init(&buf, 1, AV_BPRINT_SIZE_UNLIMITED); printf("%s=", flat_escape_key_str(&buf, key, flat->sep)); av_bprint_clear(&buf); printf("\"%s\"\n", flat_escape_value_str(&buf, value)); av_bprint_finalize(&buf, NULL); } static const Writer flat_writer = { .name = "flat", .priv_size = sizeof(FlatContext), .init = flat_init, .print_section_header = flat_print_section_header, .print_integer = flat_print_int, .print_string = flat_print_str, .flags = WRITER_FLAG_DISPLAY_OPTIONAL_FIELDS|WRITER_FLAG_PUT_PACKETS_AND_FRAMES_IN_SAME_CHAPTER, .priv_class = &flat_class, }; /* INI format output */ typedef struct INIContext { const AVClass *class; int hierarchical; } INIContext; #undef OFFSET #define OFFSET(x) offsetof(INIContext, x) static const AVOption ini_options[] = { {"hierarchical", "specify if the section specification should be hierarchical", OFFSET(hierarchical), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {"h", "specify if the section specification should be hierarchical", OFFSET(hierarchical), AV_OPT_TYPE_INT, {.i64=1}, 0, 1 }, {NULL}, }; DEFINE_WRITER_CLASS(ini); static char *ini_escape_str(AVBPrint *dst, const char *src) { int i = 0; char c = 0; while (c = src[i++]) { switch (c) { case '\b': av_bprintf(dst, "%s", "\\b"); break; case '\f': av_bprintf(dst, "%s", "\\f"); break; case '\n': av_bprintf(dst, "%s", "\\n"); break; case '\r': av_bprintf(dst, "%s", "\\r"); break; case '\t': av_bprintf(dst, "%s", "\\t"); break; case '\\': case '#' : case '=' : case ':' : av_bprint_chars(dst, '\\', 1); default: if ((unsigned char)c < 32) av_bprintf(dst, "\\x00%02x", c & 0xff); else av_bprint_chars(dst, c, 1); break; } } return dst->str; } static void ini_print_section_header(WriterContext *wctx) { INIContext *ini = wctx->priv; AVBPrint *buf = &wctx->section_pbuf[wctx->level]; const struct section *section = wctx->section[wctx->level]; const struct section *parent_section = wctx->level ? wctx->section[wctx->level-1] : NULL; av_bprint_clear(buf); if (!parent_section) { printf("# ffprobe output\n\n"); return; } if (wctx->nb_item[wctx->level-1]) printf("\n"); av_bprintf(buf, "%s", wctx->section_pbuf[wctx->level-1].str); if (ini->hierarchical || !(section->flags & (SECTION_FLAG_IS_ARRAY|SECTION_FLAG_IS_WRAPPER))) { av_bprintf(buf, "%s%s", buf->str[0] ? "." : "", wctx->section[wctx->level]->name); if (parent_section->flags & SECTION_FLAG_IS_ARRAY) { int n = parent_section->id == SECTION_ID_PACKETS_AND_FRAMES ? wctx->nb_section_packet_frame : wctx->nb_item[wctx->level-1]; av_bprintf(buf, ".%d", n); } } if (!(section->flags & (SECTION_FLAG_IS_ARRAY|SECTION_FLAG_IS_WRAPPER))) printf("[%s]\n", buf->str); } static void ini_print_str(WriterContext *wctx, const char *key, const char *value) { AVBPrint buf; av_bprint_init(&buf, 1, AV_BPRINT_SIZE_UNLIMITED); printf("%s=", ini_escape_str(&buf, key)); av_bprint_clear(&buf); printf("%s\n", ini_escape_str(&buf, value)); av_bprint_finalize(&buf, NULL); } static void ini_print_int(WriterContext *wctx, const char *key, long long int value) { printf("%s=%lld\n", key, value); } static const Writer ini_writer = { .name = "ini", .priv_size = sizeof(INIContext), .print_section_header = ini_print_section_header, .print_integer = ini_print_int, .print_string = ini_print_str, .flags = WRITER_FLAG_DISPLAY_OPTIONAL_FIELDS|WRITER_FLAG_PUT_PACKETS_AND_FRAMES_IN_SAME_CHAPTER, .priv_class = &ini_class, }; /* JSON output */ typedef struct JSONContext { const AVClass *class; int indent_level; int compact; const char *item_sep, *item_start_end; } JSONContext; #undef OFFSET #define OFFSET(x) offsetof(JSONContext, x) static const AVOption json_options[]= { { "compact", "enable compact output", OFFSET(compact), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, { "c", "enable compact output", OFFSET(compact), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, { NULL } }; DEFINE_WRITER_CLASS(json); static av_cold int json_init(WriterContext *wctx) { JSONContext *json = wctx->priv; json->item_sep = json->compact ? ", " : ",\n"; json->item_start_end = json->compact ? " " : "\n"; return 0; } static const char *json_escape_str(AVBPrint *dst, const char *src, void *log_ctx) { static const char json_escape[] = {'"', '\\', '\b', '\f', '\n', '\r', '\t', 0}; static const char json_subst[] = {'"', '\\', 'b', 'f', 'n', 'r', 't', 0}; const char *p; for (p = src; *p; p++) { char *s = strchr(json_escape, *p); if (s) { av_bprint_chars(dst, '\\', 1); av_bprint_chars(dst, json_subst[s - json_escape], 1); } else if ((unsigned char)*p < 32) { av_bprintf(dst, "\\u00%02x", *p & 0xff); } else { av_bprint_chars(dst, *p, 1); } } return dst->str; } #define JSON_INDENT() printf("%*c", json->indent_level * 4, ' ') static void json_print_section_header(WriterContext *wctx) { JSONContext *json = wctx->priv; AVBPrint buf; const struct section *section = wctx->section[wctx->level]; const struct section *parent_section = wctx->level ? wctx->section[wctx->level-1] : NULL; if (wctx->level && wctx->nb_item[wctx->level-1]) printf(",\n"); if (section->flags & SECTION_FLAG_IS_WRAPPER) { printf("{\n"); json->indent_level++; } else { av_bprint_init(&buf, 1, AV_BPRINT_SIZE_UNLIMITED); json_escape_str(&buf, section->name, wctx); JSON_INDENT(); json->indent_level++; if (section->flags & SECTION_FLAG_IS_ARRAY) { printf("\"%s\": [\n", buf.str); } else if (parent_section && !(parent_section->flags & SECTION_FLAG_IS_ARRAY)) { printf("\"%s\": {%s", buf.str, json->item_start_end); } else { printf("{%s", json->item_start_end); /* this is required so the parser can distinguish between packets and frames */ if (parent_section && parent_section->id == SECTION_ID_PACKETS_AND_FRAMES) { if (!json->compact) JSON_INDENT(); printf("\"type\": \"%s\"%s", section->name, json->item_sep); } } av_bprint_finalize(&buf, NULL); } } static void json_print_section_footer(WriterContext *wctx) { JSONContext *json = wctx->priv; const struct section *section = wctx->section[wctx->level]; if (wctx->level == 0) { json->indent_level--; printf("\n}\n"); } else if (section->flags & SECTION_FLAG_IS_ARRAY) { printf("\n"); json->indent_level--; JSON_INDENT(); printf("]"); } else { printf("%s", json->item_start_end); json->indent_level--; if (!json->compact) JSON_INDENT(); printf("}"); } } static inline void json_print_item_str(WriterContext *wctx, const char *key, const char *value) { AVBPrint buf; av_bprint_init(&buf, 1, AV_BPRINT_SIZE_UNLIMITED); printf("\"%s\":", json_escape_str(&buf, key, wctx)); av_bprint_clear(&buf); printf(" \"%s\"", json_escape_str(&buf, value, wctx)); av_bprint_finalize(&buf, NULL); } static void json_print_str(WriterContext *wctx, const char *key, const char *value) { JSONContext *json = wctx->priv; if (wctx->nb_item[wctx->level]) printf("%s", json->item_sep); if (!json->compact) JSON_INDENT(); json_print_item_str(wctx, key, value); } static void json_print_int(WriterContext *wctx, const char *key, long long int value) { JSONContext *json = wctx->priv; AVBPrint buf; if (wctx->nb_item[wctx->level]) printf("%s", json->item_sep); if (!json->compact) JSON_INDENT(); av_bprint_init(&buf, 1, AV_BPRINT_SIZE_UNLIMITED); printf("\"%s\": %lld", json_escape_str(&buf, key, wctx), value); av_bprint_finalize(&buf, NULL); } static const Writer json_writer = { .name = "json", .priv_size = sizeof(JSONContext), .init = json_init, .print_section_header = json_print_section_header, .print_section_footer = json_print_section_footer, .print_integer = json_print_int, .print_string = json_print_str, .flags = WRITER_FLAG_PUT_PACKETS_AND_FRAMES_IN_SAME_CHAPTER, .priv_class = &json_class, }; /* XML output */ typedef struct XMLContext { const AVClass *class; int within_tag; int indent_level; int fully_qualified; int xsd_strict; } XMLContext; #undef OFFSET #define OFFSET(x) offsetof(XMLContext, x) static const AVOption xml_options[] = { {"fully_qualified", "specify if the output should be fully qualified", OFFSET(fully_qualified), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, {"q", "specify if the output should be fully qualified", OFFSET(fully_qualified), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, {"xsd_strict", "ensure that the output is XSD compliant", OFFSET(xsd_strict), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, {"x", "ensure that the output is XSD compliant", OFFSET(xsd_strict), AV_OPT_TYPE_INT, {.i64=0}, 0, 1 }, {NULL}, }; DEFINE_WRITER_CLASS(xml); static av_cold int xml_init(WriterContext *wctx) { XMLContext *xml = wctx->priv; if (xml->xsd_strict) { xml->fully_qualified = 1; #define CHECK_COMPLIANCE(opt, opt_name) \ if (opt) { \ av_log(wctx, AV_LOG_ERROR, \ "XSD-compliant output selected but option '%s' was selected, XML output may be non-compliant.\n" \ "You need to disable such option with '-no%s'\n", opt_name, opt_name); \ return AVERROR(EINVAL); \ } CHECK_COMPLIANCE(show_private_data, "private"); CHECK_COMPLIANCE(show_value_unit, "unit"); CHECK_COMPLIANCE(use_value_prefix, "prefix"); if (do_show_frames && do_show_packets) { av_log(wctx, AV_LOG_ERROR, "Interleaved frames and packets are not allowed in XSD. " "Select only one between the -show_frames and the -show_packets options.\n"); return AVERROR(EINVAL); } } return 0; } static const char *xml_escape_str(AVBPrint *dst, const char *src, void *log_ctx) { const char *p; for (p = src; *p; p++) { switch (*p) { case '&' : av_bprintf(dst, "%s", "&amp;"); break; case '<' : av_bprintf(dst, "%s", "&lt;"); break; case '>' : av_bprintf(dst, "%s", "&gt;"); break; case '"' : av_bprintf(dst, "%s", "&quot;"); break; case '\'': av_bprintf(dst, "%s", "&apos;"); break; default: av_bprint_chars(dst, *p, 1); } } return dst->str; } #define XML_INDENT() printf("%*c", xml->indent_level * 4, ' ') static void xml_print_section_header(WriterContext *wctx) { XMLContext *xml = wctx->priv; const struct section *section = wctx->section[wctx->level]; const struct section *parent_section = wctx->level ? wctx->section[wctx->level-1] : NULL; if (wctx->level == 0) { const char *qual = " xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance' " "xmlns:ffprobe='http://www.ffmpeg.org/schema/ffprobe' " "xsi:schemaLocation='http://www.ffmpeg.org/schema/ffprobe ffprobe.xsd'"; printf("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"); printf("<%sffprobe%s>\n", xml->fully_qualified ? "ffprobe:" : "", xml->fully_qualified ? qual : ""); return; } if (xml->within_tag) { xml->within_tag = 0; printf(">\n"); } if (section->flags & SECTION_FLAG_HAS_VARIABLE_FIELDS) { xml->indent_level++; } else { if (parent_section && (parent_section->flags & SECTION_FLAG_IS_WRAPPER) && wctx->level && wctx->nb_item[wctx->level-1]) printf("\n"); xml->indent_level++; if (section->flags & SECTION_FLAG_IS_ARRAY) { XML_INDENT(); printf("<%s>\n", section->name); } else { XML_INDENT(); printf("<%s ", section->name); xml->within_tag = 1; } } } static void xml_print_section_footer(WriterContext *wctx) { XMLContext *xml = wctx->priv; const struct section *section = wctx->section[wctx->level]; if (wctx->level == 0) { printf("</%sffprobe>\n", xml->fully_qualified ? "ffprobe:" : ""); } else if (xml->within_tag) { xml->within_tag = 0; printf("/>\n"); xml->indent_level--; } else if (section->flags & SECTION_FLAG_HAS_VARIABLE_FIELDS) { xml->indent_level--; } else { XML_INDENT(); printf("</%s>\n", section->name); xml->indent_level--; } } static void xml_print_str(WriterContext *wctx, const char *key, const char *value) { AVBPrint buf; XMLContext *xml = wctx->priv; const struct section *section = wctx->section[wctx->level]; av_bprint_init(&buf, 1, AV_BPRINT_SIZE_UNLIMITED); if (section->flags & SECTION_FLAG_HAS_VARIABLE_FIELDS) { XML_INDENT(); printf("<%s key=\"%s\"", section->element_name, xml_escape_str(&buf, key, wctx)); av_bprint_clear(&buf); printf(" value=\"%s\"/>\n", xml_escape_str(&buf, value, wctx)); } else { if (wctx->nb_item[wctx->level]) printf(" "); printf("%s=\"%s\"", key, xml_escape_str(&buf, value, wctx)); } av_bprint_finalize(&buf, NULL); } static void xml_print_int(WriterContext *wctx, const char *key, long long int value) { if (wctx->nb_item[wctx->level]) printf(" "); printf("%s=\"%lld\"", key, value); } static Writer xml_writer = { .name = "xml", .priv_size = sizeof(XMLContext), .init = xml_init, .print_section_header = xml_print_section_header, .print_section_footer = xml_print_section_footer, .print_integer = xml_print_int, .print_string = xml_print_str, .flags = WRITER_FLAG_PUT_PACKETS_AND_FRAMES_IN_SAME_CHAPTER, .priv_class = &xml_class, }; static void writer_register_all(void) { static int initialized; if (initialized) return; initialized = 1; writer_register(&default_writer); writer_register(&compact_writer); writer_register(&csv_writer); writer_register(&flat_writer); writer_register(&ini_writer); writer_register(&json_writer); writer_register(&xml_writer); } #define print_fmt(k, f, ...) do { \ av_bprint_clear(&pbuf); \ av_bprintf(&pbuf, f, __VA_ARGS__); \ writer_print_string(w, k, pbuf.str, 0); \ } while (0) #define print_int(k, v) writer_print_integer(w, k, v) #define print_q(k, v, s) writer_print_rational(w, k, v, s) #define print_str(k, v) writer_print_string(w, k, v, 0) #define print_str_opt(k, v) writer_print_string(w, k, v, PRINT_STRING_OPT) #define print_str_validate(k, v) writer_print_string(w, k, v, PRINT_STRING_VALIDATE) #define print_time(k, v, tb) writer_print_time(w, k, v, tb, 0) #define print_ts(k, v) writer_print_ts(w, k, v, 0) #define print_duration_time(k, v, tb) writer_print_time(w, k, v, tb, 1) #define print_duration_ts(k, v) writer_print_ts(w, k, v, 1) #define print_val(k, v, u) do { \ struct unit_value uv; \ uv.val.i = v; \ uv.unit = u; \ writer_print_string(w, k, value_string(val_str, sizeof(val_str), uv), 0); \ } while (0) #define print_section_header(s) writer_print_section_header(w, s) #define print_section_footer(s) writer_print_section_footer(w, s) #define REALLOCZ_ARRAY_STREAM(ptr, cur_n, new_n) \ { \ ret = av_reallocp_array(&(ptr), (new_n), sizeof(*(ptr))); \ if (ret < 0) \ goto end; \ memset( (ptr) + (cur_n), 0, ((new_n) - (cur_n)) * sizeof(*(ptr)) ); \ } static inline int show_tags(WriterContext *w, AVDictionary *tags, int section_id) { AVDictionaryEntry *tag = NULL; int ret = 0; if (!tags) return 0; writer_print_section_header(w, section_id); while ((tag = av_dict_get(tags, "", tag, AV_DICT_IGNORE_SUFFIX))) { if ((ret = print_str_validate(tag->key, tag->value)) < 0) break; } writer_print_section_footer(w); return ret; } static void show_packet(WriterContext *w, AVFormatContext *fmt_ctx, AVPacket *pkt, int packet_idx) { char val_str[128]; AVStream *st = fmt_ctx->streams[pkt->stream_index]; AVBPrint pbuf; const char *s; av_bprint_init(&pbuf, 1, AV_BPRINT_SIZE_UNLIMITED); writer_print_section_header(w, SECTION_ID_PACKET); s = av_get_media_type_string(st->codec->codec_type); if (s) print_str ("codec_type", s); else print_str_opt("codec_type", "unknown"); print_int("stream_index", pkt->stream_index); print_ts ("pts", pkt->pts); print_time("pts_time", pkt->pts, &st->time_base); print_ts ("dts", pkt->dts); print_time("dts_time", pkt->dts, &st->time_base); print_duration_ts("duration", pkt->duration); print_duration_time("duration_time", pkt->duration, &st->time_base); print_duration_ts("convergence_duration", pkt->convergence_duration); print_duration_time("convergence_duration_time", pkt->convergence_duration, &st->time_base); print_val("size", pkt->size, unit_byte_str); if (pkt->pos != -1) print_fmt ("pos", "%"PRId64, pkt->pos); else print_str_opt("pos", "N/A"); print_fmt("flags", "%c", pkt->flags & AV_PKT_FLAG_KEY ? 'K' : '_'); if (do_show_data) writer_print_data(w, "data", pkt->data, pkt->size); writer_print_data_hash(w, "data_hash", pkt->data, pkt->size); writer_print_section_footer(w); av_bprint_finalize(&pbuf, NULL); fflush(stdout); } static void show_subtitle(WriterContext *w, AVSubtitle *sub, AVStream *stream, AVFormatContext *fmt_ctx) { AVBPrint pbuf; av_bprint_init(&pbuf, 1, AV_BPRINT_SIZE_UNLIMITED); writer_print_section_header(w, SECTION_ID_SUBTITLE); print_str ("media_type", "subtitle"); print_ts ("pts", sub->pts); print_time("pts_time", sub->pts, &AV_TIME_BASE_Q); print_int ("format", sub->format); print_int ("start_display_time", sub->start_display_time); print_int ("end_display_time", sub->end_display_time); print_int ("num_rects", sub->num_rects); writer_print_section_footer(w); av_bprint_finalize(&pbuf, NULL); fflush(stdout); } static void show_frame(WriterContext *w, AVFrame *frame, AVStream *stream, AVFormatContext *fmt_ctx) { AVBPrint pbuf; const char *s; int i; av_bprint_init(&pbuf, 1, AV_BPRINT_SIZE_UNLIMITED); writer_print_section_header(w, SECTION_ID_FRAME); s = av_get_media_type_string(stream->codec->codec_type); if (s) print_str ("media_type", s); else print_str_opt("media_type", "unknown"); print_int("key_frame", frame->key_frame); print_ts ("pkt_pts", frame->pkt_pts); print_time("pkt_pts_time", frame->pkt_pts, &stream->time_base); print_ts ("pkt_dts", frame->pkt_dts); print_time("pkt_dts_time", frame->pkt_dts, &stream->time_base); print_ts ("best_effort_timestamp", av_frame_get_best_effort_timestamp(frame)); print_time("best_effort_timestamp_time", av_frame_get_best_effort_timestamp(frame), &stream->time_base); print_duration_ts ("pkt_duration", av_frame_get_pkt_duration(frame)); print_duration_time("pkt_duration_time", av_frame_get_pkt_duration(frame), &stream->time_base); if (av_frame_get_pkt_pos (frame) != -1) print_fmt ("pkt_pos", "%"PRId64, av_frame_get_pkt_pos(frame)); else print_str_opt("pkt_pos", "N/A"); if (av_frame_get_pkt_size(frame) != -1) print_fmt ("pkt_size", "%d", av_frame_get_pkt_size(frame)); else print_str_opt("pkt_size", "N/A"); switch (stream->codec->codec_type) { AVRational sar; case AVMEDIA_TYPE_VIDEO: print_int("width", frame->width); print_int("height", frame->height); s = av_get_pix_fmt_name(frame->format); if (s) print_str ("pix_fmt", s); else print_str_opt("pix_fmt", "unknown"); sar = av_guess_sample_aspect_ratio(fmt_ctx, stream, frame); if (sar.num) { print_q("sample_aspect_ratio", sar, ':'); } else { print_str_opt("sample_aspect_ratio", "N/A"); } print_fmt("pict_type", "%c", av_get_picture_type_char(frame->pict_type)); print_int("coded_picture_number", frame->coded_picture_number); print_int("display_picture_number", frame->display_picture_number); print_int("interlaced_frame", frame->interlaced_frame); print_int("top_field_first", frame->top_field_first); print_int("repeat_pict", frame->repeat_pict); break; case AVMEDIA_TYPE_AUDIO: s = av_get_sample_fmt_name(frame->format); if (s) print_str ("sample_fmt", s); else print_str_opt("sample_fmt", "unknown"); print_int("nb_samples", frame->nb_samples); print_int("channels", av_frame_get_channels(frame)); if (av_frame_get_channel_layout(frame)) { av_bprint_clear(&pbuf); av_bprint_channel_layout(&pbuf, av_frame_get_channels(frame), av_frame_get_channel_layout(frame)); print_str ("channel_layout", pbuf.str); } else print_str_opt("channel_layout", "unknown"); break; } if (do_show_frame_tags) show_tags(w, av_frame_get_metadata(frame), SECTION_ID_FRAME_TAGS); if (frame->nb_side_data) { writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA_LIST); for (i = 0; i < frame->nb_side_data; i++) { AVFrameSideData *sd = frame->side_data[i]; const char *name; writer_print_section_header(w, SECTION_ID_FRAME_SIDE_DATA); name = av_frame_side_data_name(sd->type); print_str("side_data_type", name ? name : "unknown"); print_int("side_data_size", sd->size); writer_print_section_footer(w); } writer_print_section_footer(w); } writer_print_section_footer(w); av_bprint_finalize(&pbuf, NULL); fflush(stdout); } static av_always_inline int process_frame(WriterContext *w, AVFormatContext *fmt_ctx, AVFrame *frame, AVPacket *pkt) { AVCodecContext *dec_ctx = fmt_ctx->streams[pkt->stream_index]->codec; AVSubtitle sub; int ret = 0, got_frame = 0; if (dec_ctx->codec) { switch (dec_ctx->codec_type) { case AVMEDIA_TYPE_VIDEO: ret = avcodec_decode_video2(dec_ctx, frame, &got_frame, pkt); break; case AVMEDIA_TYPE_AUDIO: ret = avcodec_decode_audio4(dec_ctx, frame, &got_frame, pkt); break; case AVMEDIA_TYPE_SUBTITLE: ret = avcodec_decode_subtitle2(dec_ctx, &sub, &got_frame, pkt); break; } } if (ret < 0) return ret; ret = FFMIN(ret, pkt->size); /* guard against bogus return values */ pkt->data += ret; pkt->size -= ret; if (got_frame) { int is_sub = (dec_ctx->codec_type == AVMEDIA_TYPE_SUBTITLE); nb_streams_frames[pkt->stream_index]++; if (do_show_frames) if (is_sub) show_subtitle(w, &sub, fmt_ctx->streams[pkt->stream_index], fmt_ctx); else show_frame(w, frame, fmt_ctx->streams[pkt->stream_index], fmt_ctx); if (is_sub) avsubtitle_free(&sub); } return got_frame; } static void log_read_interval(const ReadInterval *interval, void *log_ctx, int log_level) { av_log(log_ctx, log_level, "id:%d", interval->id); if (interval->has_start) { av_log(log_ctx, log_level, " start:%s%s", interval->start_is_offset ? "+" : "", av_ts2timestr(interval->start, &AV_TIME_BASE_Q)); } else { av_log(log_ctx, log_level, " start:N/A"); } if (interval->has_end) { av_log(log_ctx, log_level, " end:%s", interval->end_is_offset ? "+" : ""); if (interval->duration_frames) av_log(log_ctx, log_level, "#%"PRId64, interval->end); else av_log(log_ctx, log_level, "%s", av_ts2timestr(interval->end, &AV_TIME_BASE_Q)); } else { av_log(log_ctx, log_level, " end:N/A"); } av_log(log_ctx, log_level, "\n"); } static int read_interval_packets(WriterContext *w, AVFormatContext *fmt_ctx, const ReadInterval *interval, int64_t *cur_ts) { AVPacket pkt, pkt1; AVFrame *frame = NULL; int ret = 0, i = 0, frame_count = 0; int64_t start = -INT64_MAX, end = interval->end; int has_start = 0, has_end = interval->has_end && !interval->end_is_offset; av_init_packet(&pkt); av_log(NULL, AV_LOG_VERBOSE, "Processing read interval "); log_read_interval(interval, NULL, AV_LOG_VERBOSE); if (interval->has_start) { int64_t target; if (interval->start_is_offset) { if (*cur_ts == AV_NOPTS_VALUE) { av_log(NULL, AV_LOG_ERROR, "Could not seek to relative position since current " "timestamp is not defined\n"); ret = AVERROR(EINVAL); goto end; } target = *cur_ts + interval->start; } else { target = interval->start; } av_log(NULL, AV_LOG_VERBOSE, "Seeking to read interval start point %s\n", av_ts2timestr(target, &AV_TIME_BASE_Q)); if ((ret = avformat_seek_file(fmt_ctx, -1, -INT64_MAX, target, INT64_MAX, 0)) < 0) { av_log(NULL, AV_LOG_ERROR, "Could not seek to position %"PRId64": %s\n", interval->start, av_err2str(ret)); goto end; } } frame = av_frame_alloc(); if (!frame) { ret = AVERROR(ENOMEM); goto end; } while (!av_read_frame(fmt_ctx, &pkt)) { if (fmt_ctx->nb_streams > nb_streams) { REALLOCZ_ARRAY_STREAM(nb_streams_frames, nb_streams, fmt_ctx->nb_streams); REALLOCZ_ARRAY_STREAM(nb_streams_packets, nb_streams, fmt_ctx->nb_streams); REALLOCZ_ARRAY_STREAM(selected_streams, nb_streams, fmt_ctx->nb_streams); nb_streams = fmt_ctx->nb_streams; } if (selected_streams[pkt.stream_index]) { AVRational tb = fmt_ctx->streams[pkt.stream_index]->time_base; if (pkt.pts != AV_NOPTS_VALUE) *cur_ts = av_rescale_q(pkt.pts, tb, AV_TIME_BASE_Q); if (!has_start && *cur_ts != AV_NOPTS_VALUE) { start = *cur_ts; has_start = 1; } if (has_start && !has_end && interval->end_is_offset) { end = start + interval->end; has_end = 1; } if (interval->end_is_offset && interval->duration_frames) { if (frame_count >= interval->end) break; } else if (has_end && *cur_ts != AV_NOPTS_VALUE && *cur_ts >= end) { break; } frame_count++; if (do_read_packets) { if (do_show_packets) show_packet(w, fmt_ctx, &pkt, i++); nb_streams_packets[pkt.stream_index]++; } if (do_read_frames) { pkt1 = pkt; while (pkt1.size && process_frame(w, fmt_ctx, frame, &pkt1) > 0); } } av_free_packet(&pkt); } av_init_packet(&pkt); pkt.data = NULL; pkt.size = 0; //Flush remaining frames that are cached in the decoder for (i = 0; i < fmt_ctx->nb_streams; i++) { pkt.stream_index = i; if (do_read_frames) while (process_frame(w, fmt_ctx, frame, &pkt) > 0); } end: av_frame_free(&frame); if (ret < 0) { av_log(NULL, AV_LOG_ERROR, "Could not read packets in interval "); log_read_interval(interval, NULL, AV_LOG_ERROR); } return ret; } static int read_packets(WriterContext *w, AVFormatContext *fmt_ctx) { int i, ret = 0; int64_t cur_ts = fmt_ctx->start_time; if (read_intervals_nb == 0) { ReadInterval interval = (ReadInterval) { .has_start = 0, .has_end = 0 }; ret = read_interval_packets(w, fmt_ctx, &interval, &cur_ts); } else { for (i = 0; i < read_intervals_nb; i++) { ret = read_interval_packets(w, fmt_ctx, &read_intervals[i], &cur_ts); if (ret < 0) break; } } return ret; } static int show_stream(WriterContext *w, AVFormatContext *fmt_ctx, int stream_idx, int in_program) { AVStream *stream = fmt_ctx->streams[stream_idx]; AVCodecContext *dec_ctx; const AVCodec *dec; char val_str[128]; const char *s; AVRational sar, dar; AVBPrint pbuf; const AVCodecDescriptor *cd; int ret = 0; av_bprint_init(&pbuf, 1, AV_BPRINT_SIZE_UNLIMITED); writer_print_section_header(w, in_program ? SECTION_ID_PROGRAM_STREAM : SECTION_ID_STREAM); print_int("index", stream->index); if ((dec_ctx = stream->codec)) { const char *profile = NULL; dec = dec_ctx->codec; if (dec) { print_str("codec_name", dec->name); if (!do_bitexact) { if (dec->long_name) print_str ("codec_long_name", dec->long_name); else print_str_opt("codec_long_name", "unknown"); } } else if ((cd = avcodec_descriptor_get(stream->codec->codec_id))) { print_str_opt("codec_name", cd->name); if (!do_bitexact) { print_str_opt("codec_long_name", cd->long_name ? cd->long_name : "unknown"); } } else { print_str_opt("codec_name", "unknown"); if (!do_bitexact) { print_str_opt("codec_long_name", "unknown"); } } if (dec && (profile = av_get_profile_name(dec, dec_ctx->profile))) print_str("profile", profile); else print_str_opt("profile", "unknown"); s = av_get_media_type_string(dec_ctx->codec_type); if (s) print_str ("codec_type", s); else print_str_opt("codec_type", "unknown"); print_q("codec_time_base", dec_ctx->time_base, '/'); /* print AVI/FourCC tag */ av_get_codec_tag_string(val_str, sizeof(val_str), dec_ctx->codec_tag); print_str("codec_tag_string", val_str); print_fmt("codec_tag", "0x%04x", dec_ctx->codec_tag); switch (dec_ctx->codec_type) { case AVMEDIA_TYPE_VIDEO: print_int("width", dec_ctx->width); print_int("height", dec_ctx->height); print_int("has_b_frames", dec_ctx->has_b_frames); sar = av_guess_sample_aspect_ratio(fmt_ctx, stream, NULL); if (sar.den) { print_q("sample_aspect_ratio", sar, ':'); av_reduce(&dar.num, &dar.den, dec_ctx->width * sar.num, dec_ctx->height * sar.den, 1024*1024); print_q("display_aspect_ratio", dar, ':'); } else { print_str_opt("sample_aspect_ratio", "N/A"); print_str_opt("display_aspect_ratio", "N/A"); } s = av_get_pix_fmt_name(dec_ctx->pix_fmt); if (s) print_str ("pix_fmt", s); else print_str_opt("pix_fmt", "unknown"); print_int("level", dec_ctx->level); if (dec_ctx->color_range != AVCOL_RANGE_UNSPECIFIED) print_str ("color_range", av_color_range_name(dec_ctx->color_range)); else print_str_opt("color_range", "N/A"); s = av_get_colorspace_name(dec_ctx->colorspace); if (s) print_str ("color_space", s); else print_str_opt("color_space", "unknown"); if (dec_ctx->color_trc != AVCOL_TRC_UNSPECIFIED) print_str("color_transfer", av_color_transfer_name(dec_ctx->color_trc)); else print_str_opt("color_transfer", av_color_transfer_name(dec_ctx->color_trc)); if (dec_ctx->color_primaries != AVCOL_PRI_UNSPECIFIED) print_str("color_primaries", av_color_primaries_name(dec_ctx->color_primaries)); else print_str_opt("color_primaries", av_color_primaries_name(dec_ctx->color_primaries)); if (dec_ctx->chroma_sample_location != AVCHROMA_LOC_UNSPECIFIED) print_str("chroma_location", av_chroma_location_name(dec_ctx->chroma_sample_location)); else print_str_opt("chroma_location", av_chroma_location_name(dec_ctx->chroma_sample_location)); if (dec_ctx->timecode_frame_start >= 0) { char tcbuf[AV_TIMECODE_STR_SIZE]; av_timecode_make_mpeg_tc_string(tcbuf, dec_ctx->timecode_frame_start); print_str("timecode", tcbuf); } else { print_str_opt("timecode", "N/A"); } print_int("refs", dec_ctx->refs); break; case AVMEDIA_TYPE_AUDIO: s = av_get_sample_fmt_name(dec_ctx->sample_fmt); if (s) print_str ("sample_fmt", s); else print_str_opt("sample_fmt", "unknown"); print_val("sample_rate", dec_ctx->sample_rate, unit_hertz_str); print_int("channels", dec_ctx->channels); if (dec_ctx->channel_layout) { av_bprint_clear(&pbuf); av_bprint_channel_layout(&pbuf, dec_ctx->channels, dec_ctx->channel_layout); print_str ("channel_layout", pbuf.str); } else { print_str_opt("channel_layout", "unknown"); } print_int("bits_per_sample", av_get_bits_per_sample(dec_ctx->codec_id)); break; case AVMEDIA_TYPE_SUBTITLE: if (dec_ctx->width) print_int("width", dec_ctx->width); else print_str_opt("width", "N/A"); if (dec_ctx->height) print_int("height", dec_ctx->height); else print_str_opt("height", "N/A"); break; } } else { print_str_opt("codec_type", "unknown"); } if (dec_ctx->codec && dec_ctx->codec->priv_class && show_private_data) { const AVOption *opt = NULL; while (opt = av_opt_next(dec_ctx->priv_data,opt)) { uint8_t *str; if (opt->flags) continue; if (av_opt_get(dec_ctx->priv_data, opt->name, 0, &str) >= 0) { print_str(opt->name, str); av_free(str); } } } if (fmt_ctx->iformat->flags & AVFMT_SHOW_IDS) print_fmt ("id", "0x%x", stream->id); else print_str_opt("id", "N/A"); print_q("r_frame_rate", stream->r_frame_rate, '/'); print_q("avg_frame_rate", stream->avg_frame_rate, '/'); print_q("time_base", stream->time_base, '/'); print_ts ("start_pts", stream->start_time); print_time("start_time", stream->start_time, &stream->time_base); print_ts ("duration_ts", stream->duration); print_time("duration", stream->duration, &stream->time_base); if (dec_ctx->bit_rate > 0) print_val ("bit_rate", dec_ctx->bit_rate, unit_bit_per_second_str); else print_str_opt("bit_rate", "N/A"); if (dec_ctx->rc_max_rate > 0) print_val ("max_bit_rate", dec_ctx->rc_max_rate, unit_bit_per_second_str); else print_str_opt("max_bit_rate", "N/A"); if (dec_ctx->bits_per_raw_sample > 0) print_fmt("bits_per_raw_sample", "%d", dec_ctx->bits_per_raw_sample); else print_str_opt("bits_per_raw_sample", "N/A"); if (stream->nb_frames) print_fmt ("nb_frames", "%"PRId64, stream->nb_frames); else print_str_opt("nb_frames", "N/A"); if (nb_streams_frames[stream_idx]) print_fmt ("nb_read_frames", "%"PRIu64, nb_streams_frames[stream_idx]); else print_str_opt("nb_read_frames", "N/A"); if (nb_streams_packets[stream_idx]) print_fmt ("nb_read_packets", "%"PRIu64, nb_streams_packets[stream_idx]); else print_str_opt("nb_read_packets", "N/A"); if (do_show_data) writer_print_data(w, "extradata", dec_ctx->extradata, dec_ctx->extradata_size); writer_print_data_hash(w, "extradata_hash", dec_ctx->extradata, dec_ctx->extradata_size); /* Print disposition information */ #define PRINT_DISPOSITION(flagname, name) do { \ print_int(name, !!(stream->disposition & AV_DISPOSITION_##flagname)); \ } while (0) if (do_show_stream_disposition) { writer_print_section_header(w, in_program ? SECTION_ID_PROGRAM_STREAM_DISPOSITION : SECTION_ID_STREAM_DISPOSITION); PRINT_DISPOSITION(DEFAULT, "default"); PRINT_DISPOSITION(DUB, "dub"); PRINT_DISPOSITION(ORIGINAL, "original"); PRINT_DISPOSITION(COMMENT, "comment"); PRINT_DISPOSITION(LYRICS, "lyrics"); PRINT_DISPOSITION(KARAOKE, "karaoke"); PRINT_DISPOSITION(FORCED, "forced"); PRINT_DISPOSITION(HEARING_IMPAIRED, "hearing_impaired"); PRINT_DISPOSITION(VISUAL_IMPAIRED, "visual_impaired"); PRINT_DISPOSITION(CLEAN_EFFECTS, "clean_effects"); PRINT_DISPOSITION(ATTACHED_PIC, "attached_pic"); writer_print_section_footer(w); } if (do_show_stream_tags) ret = show_tags(w, stream->metadata, in_program ? SECTION_ID_PROGRAM_STREAM_TAGS : SECTION_ID_STREAM_TAGS); writer_print_section_footer(w); av_bprint_finalize(&pbuf, NULL); fflush(stdout); return ret; } static int show_streams(WriterContext *w, AVFormatContext *fmt_ctx) { int i, ret = 0; writer_print_section_header(w, SECTION_ID_STREAMS); for (i = 0; i < fmt_ctx->nb_streams; i++) if (selected_streams[i]) { ret = show_stream(w, fmt_ctx, i, 0); if (ret < 0) break; } writer_print_section_footer(w); return ret; } static int show_program(WriterContext *w, AVFormatContext *fmt_ctx, AVProgram *program) { int i, ret = 0; writer_print_section_header(w, SECTION_ID_PROGRAM); print_int("program_id", program->id); print_int("program_num", program->program_num); print_int("nb_streams", program->nb_stream_indexes); print_int("pmt_pid", program->pmt_pid); print_int("pcr_pid", program->pcr_pid); print_ts("start_pts", program->start_time); print_time("start_time", program->start_time, &AV_TIME_BASE_Q); print_ts("end_pts", program->end_time); print_time("end_time", program->end_time, &AV_TIME_BASE_Q); if (do_show_program_tags) ret = show_tags(w, program->metadata, SECTION_ID_PROGRAM_TAGS); if (ret < 0) goto end; writer_print_section_header(w, SECTION_ID_PROGRAM_STREAMS); for (i = 0; i < program->nb_stream_indexes; i++) { if (selected_streams[program->stream_index[i]]) { ret = show_stream(w, fmt_ctx, program->stream_index[i], 1); if (ret < 0) break; } } writer_print_section_footer(w); end: writer_print_section_footer(w); return ret; } static int show_programs(WriterContext *w, AVFormatContext *fmt_ctx) { int i, ret = 0; writer_print_section_header(w, SECTION_ID_PROGRAMS); for (i = 0; i < fmt_ctx->nb_programs; i++) { AVProgram *program = fmt_ctx->programs[i]; if (!program) continue; ret = show_program(w, fmt_ctx, program); if (ret < 0) break; } writer_print_section_footer(w); return ret; } static int show_chapters(WriterContext *w, AVFormatContext *fmt_ctx) { int i, ret = 0; writer_print_section_header(w, SECTION_ID_CHAPTERS); for (i = 0; i < fmt_ctx->nb_chapters; i++) { AVChapter *chapter = fmt_ctx->chapters[i]; writer_print_section_header(w, SECTION_ID_CHAPTER); print_int("id", chapter->id); print_q ("time_base", chapter->time_base, '/'); print_int("start", chapter->start); print_time("start_time", chapter->start, &chapter->time_base); print_int("end", chapter->end); print_time("end_time", chapter->end, &chapter->time_base); if (do_show_chapter_tags) ret = show_tags(w, chapter->metadata, SECTION_ID_CHAPTER_TAGS); writer_print_section_footer(w); } writer_print_section_footer(w); return ret; } static int show_format(WriterContext *w, AVFormatContext *fmt_ctx) { char val_str[128]; int64_t size = fmt_ctx->pb ? avio_size(fmt_ctx->pb) : -1; int ret = 0; writer_print_section_header(w, SECTION_ID_FORMAT); print_str_validate("filename", fmt_ctx->filename); print_int("nb_streams", fmt_ctx->nb_streams); print_int("nb_programs", fmt_ctx->nb_programs); print_str("format_name", fmt_ctx->iformat->name); if (!do_bitexact) { if (fmt_ctx->iformat->long_name) print_str ("format_long_name", fmt_ctx->iformat->long_name); else print_str_opt("format_long_name", "unknown"); } print_time("start_time", fmt_ctx->start_time, &AV_TIME_BASE_Q); print_time("duration", fmt_ctx->duration, &AV_TIME_BASE_Q); if (size >= 0) print_val ("size", size, unit_byte_str); else print_str_opt("size", "N/A"); if (fmt_ctx->bit_rate > 0) print_val ("bit_rate", fmt_ctx->bit_rate, unit_bit_per_second_str); else print_str_opt("bit_rate", "N/A"); print_int("probe_score", av_format_get_probe_score(fmt_ctx)); if (do_show_format_tags) ret = show_tags(w, fmt_ctx->metadata, SECTION_ID_FORMAT_TAGS); writer_print_section_footer(w); fflush(stdout); return ret; } static void show_error(WriterContext *w, int err) { char errbuf[128]; const char *errbuf_ptr = errbuf; if (av_strerror(err, errbuf, sizeof(errbuf)) < 0) errbuf_ptr = strerror(AVUNERROR(err)); writer_print_section_header(w, SECTION_ID_ERROR); print_int("code", err); print_str("string", errbuf_ptr); writer_print_section_footer(w); } static int open_input_file(AVFormatContext **fmt_ctx_ptr, const char *filename) { int err, i, orig_nb_streams; AVFormatContext *fmt_ctx = NULL; AVDictionaryEntry *t; AVDictionary **opts; int scan_all_pmts_set = 0; if (!av_dict_get(format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) { av_dict_set(&format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE); scan_all_pmts_set = 1; } if ((err = avformat_open_input(&fmt_ctx, filename, iformat, &format_opts)) < 0) { print_error(filename, err); return err; } *fmt_ctx_ptr = fmt_ctx; if (scan_all_pmts_set) av_dict_set(&format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE); if ((t = av_dict_get(format_opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) { av_log(NULL, AV_LOG_ERROR, "Option %s not found.\n", t->key); return AVERROR_OPTION_NOT_FOUND; } /* fill the streams in the format context */ opts = setup_find_stream_info_opts(fmt_ctx, codec_opts); orig_nb_streams = fmt_ctx->nb_streams; err = avformat_find_stream_info(fmt_ctx, opts); for (i = 0; i < orig_nb_streams; i++) av_dict_free(&opts[i]); av_freep(&opts); if (err < 0) { print_error(filename, err); return err; } av_dump_format(fmt_ctx, 0, filename, 0); /* bind a decoder to each input stream */ for (i = 0; i < fmt_ctx->nb_streams; i++) { AVStream *stream = fmt_ctx->streams[i]; AVCodec *codec; if (stream->codec->codec_id == AV_CODEC_ID_PROBE) { av_log(NULL, AV_LOG_WARNING, "Failed to probe codec for input stream %d\n", stream->index); } else if (!(codec = avcodec_find_decoder(stream->codec->codec_id))) { av_log(NULL, AV_LOG_WARNING, "Unsupported codec with id %d for input stream %d\n", stream->codec->codec_id, stream->index); } else { AVDictionary *opts = filter_codec_opts(codec_opts, stream->codec->codec_id, fmt_ctx, stream, codec); if (avcodec_open2(stream->codec, codec, &opts) < 0) { av_log(NULL, AV_LOG_WARNING, "Could not open codec for input stream %d\n", stream->index); } if ((t = av_dict_get(opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) { av_log(NULL, AV_LOG_ERROR, "Option %s for input stream %d not found\n", t->key, stream->index); return AVERROR_OPTION_NOT_FOUND; } } } *fmt_ctx_ptr = fmt_ctx; return 0; } static void close_input_file(AVFormatContext **ctx_ptr) { int i; AVFormatContext *fmt_ctx = *ctx_ptr; /* close decoder for each stream */ for (i = 0; i < fmt_ctx->nb_streams; i++) if (fmt_ctx->streams[i]->codec->codec_id != AV_CODEC_ID_NONE) avcodec_close(fmt_ctx->streams[i]->codec); avformat_close_input(ctx_ptr); } static int probe_file(WriterContext *wctx, const char *filename) { AVFormatContext *fmt_ctx = NULL; int ret, i; int section_id; do_read_frames = do_show_frames || do_count_frames; do_read_packets = do_show_packets || do_count_packets; ret = open_input_file(&fmt_ctx, filename); if (ret < 0) goto end; #define CHECK_END if (ret < 0) goto end nb_streams = fmt_ctx->nb_streams; REALLOCZ_ARRAY_STREAM(nb_streams_frames,0,fmt_ctx->nb_streams); REALLOCZ_ARRAY_STREAM(nb_streams_packets,0,fmt_ctx->nb_streams); REALLOCZ_ARRAY_STREAM(selected_streams,0,fmt_ctx->nb_streams); for (i = 0; i < fmt_ctx->nb_streams; i++) { if (stream_specifier) { ret = avformat_match_stream_specifier(fmt_ctx, fmt_ctx->streams[i], stream_specifier); CHECK_END; else selected_streams[i] = ret; ret = 0; } else { selected_streams[i] = 1; } } if (do_read_frames || do_read_packets) { if (do_show_frames && do_show_packets && wctx->writer->flags & WRITER_FLAG_PUT_PACKETS_AND_FRAMES_IN_SAME_CHAPTER) section_id = SECTION_ID_PACKETS_AND_FRAMES; else if (do_show_packets && !do_show_frames) section_id = SECTION_ID_PACKETS; else // (!do_show_packets && do_show_frames) section_id = SECTION_ID_FRAMES; if (do_show_frames || do_show_packets) writer_print_section_header(wctx, section_id); ret = read_packets(wctx, fmt_ctx); if (do_show_frames || do_show_packets) writer_print_section_footer(wctx); CHECK_END; } if (do_show_programs) { ret = show_programs(wctx, fmt_ctx); CHECK_END; } if (do_show_streams) { ret = show_streams(wctx, fmt_ctx); CHECK_END; } if (do_show_chapters) { ret = show_chapters(wctx, fmt_ctx); CHECK_END; } if (do_show_format) { ret = show_format(wctx, fmt_ctx); CHECK_END; } end: if (fmt_ctx) close_input_file(&fmt_ctx); av_freep(&nb_streams_frames); av_freep(&nb_streams_packets); av_freep(&selected_streams); return ret; } static void show_usage(void) { av_log(NULL, AV_LOG_INFO, "Simple multimedia streams analyzer\n"); av_log(NULL, AV_LOG_INFO, "usage: %s [OPTIONS] [INPUT_FILE]\n", program_name); av_log(NULL, AV_LOG_INFO, "\n"); } static void ffprobe_show_program_version(WriterContext *w) { AVBPrint pbuf; av_bprint_init(&pbuf, 1, AV_BPRINT_SIZE_UNLIMITED); writer_print_section_header(w, SECTION_ID_PROGRAM_VERSION); print_str("version", FFMPEG_VERSION); print_fmt("copyright", "Copyright (c) %d-%d the FFmpeg developers", program_birth_year, CONFIG_THIS_YEAR); print_str("compiler_ident", CC_IDENT); print_str("configuration", FFMPEG_CONFIGURATION); writer_print_section_footer(w); av_bprint_finalize(&pbuf, NULL); } #define SHOW_LIB_VERSION(libname, LIBNAME) \ do { \ if (CONFIG_##LIBNAME) { \ unsigned int version = libname##_version(); \ writer_print_section_header(w, SECTION_ID_LIBRARY_VERSION); \ print_str("name", "lib" #libname); \ print_int("major", LIB##LIBNAME##_VERSION_MAJOR); \ print_int("minor", LIB##LIBNAME##_VERSION_MINOR); \ print_int("micro", LIB##LIBNAME##_VERSION_MICRO); \ print_int("version", version); \ print_str("ident", LIB##LIBNAME##_IDENT); \ writer_print_section_footer(w); \ } \ } while (0) static void ffprobe_show_library_versions(WriterContext *w) { writer_print_section_header(w, SECTION_ID_LIBRARY_VERSIONS); SHOW_LIB_VERSION(avutil, AVUTIL); SHOW_LIB_VERSION(avcodec, AVCODEC); SHOW_LIB_VERSION(avformat, AVFORMAT); SHOW_LIB_VERSION(avdevice, AVDEVICE); SHOW_LIB_VERSION(avfilter, AVFILTER); SHOW_LIB_VERSION(swscale, SWSCALE); SHOW_LIB_VERSION(swresample, SWRESAMPLE); SHOW_LIB_VERSION(postproc, POSTPROC); writer_print_section_footer(w); } #define PRINT_PIX_FMT_FLAG(flagname, name) \ do { \ print_int(name, !!(pixdesc->flags & AV_PIX_FMT_FLAG_##flagname)); \ } while (0) static void ffprobe_show_pixel_formats(WriterContext *w) { const AVPixFmtDescriptor *pixdesc = NULL; int i, n; writer_print_section_header(w, SECTION_ID_PIXEL_FORMATS); while (pixdesc = av_pix_fmt_desc_next(pixdesc)) { writer_print_section_header(w, SECTION_ID_PIXEL_FORMAT); print_str("name", pixdesc->name); print_int("nb_components", pixdesc->nb_components); if ((pixdesc->nb_components >= 3) && !(pixdesc->flags & AV_PIX_FMT_FLAG_RGB)) { print_int ("log2_chroma_w", pixdesc->log2_chroma_w); print_int ("log2_chroma_h", pixdesc->log2_chroma_h); } else { print_str_opt("log2_chroma_w", "N/A"); print_str_opt("log2_chroma_h", "N/A"); } n = av_get_bits_per_pixel(pixdesc); if (n) print_int ("bits_per_pixel", n); else print_str_opt("bits_per_pixel", "N/A"); if (do_show_pixel_format_flags) { writer_print_section_header(w, SECTION_ID_PIXEL_FORMAT_FLAGS); PRINT_PIX_FMT_FLAG(BE, "big_endian"); PRINT_PIX_FMT_FLAG(PAL, "palette"); PRINT_PIX_FMT_FLAG(BITSTREAM, "bitstream"); PRINT_PIX_FMT_FLAG(HWACCEL, "hwaccel"); PRINT_PIX_FMT_FLAG(PLANAR, "planar"); PRINT_PIX_FMT_FLAG(RGB, "rgb"); PRINT_PIX_FMT_FLAG(PSEUDOPAL, "pseudopal"); PRINT_PIX_FMT_FLAG(ALPHA, "alpha"); writer_print_section_footer(w); } if (do_show_pixel_format_components && (pixdesc->nb_components > 0)) { writer_print_section_header(w, SECTION_ID_PIXEL_FORMAT_COMPONENTS); for (i = 0; i < pixdesc->nb_components; i++) { writer_print_section_header(w, SECTION_ID_PIXEL_FORMAT_COMPONENT); print_int("index", i + 1); print_int("bit_depth", pixdesc->comp[i].depth_minus1 + 1); writer_print_section_footer(w); } writer_print_section_footer(w); } writer_print_section_footer(w); } writer_print_section_footer(w); } static int opt_format(void *optctx, const char *opt, const char *arg) { iformat = av_find_input_format(arg); if (!iformat) { av_log(NULL, AV_LOG_ERROR, "Unknown input format: %s\n", arg); return AVERROR(EINVAL); } return 0; } static inline void mark_section_show_entries(SectionID section_id, int show_all_entries, AVDictionary *entries) { struct section *section = &sections[section_id]; section->show_all_entries = show_all_entries; if (show_all_entries) { SectionID *id; for (id = section->children_ids; *id != -1; id++) mark_section_show_entries(*id, show_all_entries, entries); } else { av_dict_copy(&section->entries_to_show, entries, 0); } } static int match_section(const char *section_name, int show_all_entries, AVDictionary *entries) { int i, ret = 0; for (i = 0; i < FF_ARRAY_ELEMS(sections); i++) { const struct section *section = &sections[i]; if (!strcmp(section_name, section->name) || (section->unique_name && !strcmp(section_name, section->unique_name))) { av_log(NULL, AV_LOG_DEBUG, "'%s' matches section with unique name '%s'\n", section_name, (char *)av_x_if_null(section->unique_name, section->name)); ret++; mark_section_show_entries(section->id, show_all_entries, entries); } } return ret; } static int opt_show_entries(void *optctx, const char *opt, const char *arg) { const char *p = arg; int ret = 0; while (*p) { AVDictionary *entries = NULL; char *section_name = av_get_token(&p, "=:"); int show_all_entries = 0; if (!section_name) { av_log(NULL, AV_LOG_ERROR, "Missing section name for option '%s'\n", opt); return AVERROR(EINVAL); } if (*p == '=') { p++; while (*p && *p != ':') { char *entry = av_get_token(&p, ",:"); if (!entry) break; av_log(NULL, AV_LOG_VERBOSE, "Adding '%s' to the entries to show in section '%s'\n", entry, section_name); av_dict_set(&entries, entry, "", AV_DICT_DONT_STRDUP_KEY); if (*p == ',') p++; } } else { show_all_entries = 1; } ret = match_section(section_name, show_all_entries, entries); if (ret == 0) { av_log(NULL, AV_LOG_ERROR, "No match for section '%s'\n", section_name); ret = AVERROR(EINVAL); } av_dict_free(&entries); av_free(section_name); if (ret <= 0) break; if (*p) p++; } return ret; } static int opt_show_format_entry(void *optctx, const char *opt, const char *arg) { char *buf = av_asprintf("format=%s", arg); int ret; av_log(NULL, AV_LOG_WARNING, "Option '%s' is deprecated, use '-show_entries format=%s' instead\n", opt, arg); ret = opt_show_entries(optctx, opt, buf); av_free(buf); return ret; } static void opt_input_file(void *optctx, const char *arg) { if (input_filename) { av_log(NULL, AV_LOG_ERROR, "Argument '%s' provided as input filename, but '%s' was already specified.\n", arg, input_filename); exit_program(1); } if (!strcmp(arg, "-")) arg = "pipe:"; input_filename = arg; } static int opt_input_file_i(void *optctx, const char *opt, const char *arg) { opt_input_file(optctx, arg); return 0; } void show_help_default(const char *opt, const char *arg) { av_log_set_callback(log_callback_help); show_usage(); show_help_options(options, "Main options:", 0, 0, 0); printf("\n"); show_help_children(avformat_get_class(), AV_OPT_FLAG_DECODING_PARAM); } /** * Parse interval specification, according to the format: * INTERVAL ::= [START|+START_OFFSET][%[END|+END_OFFSET]] * INTERVALS ::= INTERVAL[,INTERVALS] */ static int parse_read_interval(const char *interval_spec, ReadInterval *interval) { int ret = 0; char *next, *p, *spec = av_strdup(interval_spec); if (!spec) return AVERROR(ENOMEM); if (!*spec) { av_log(NULL, AV_LOG_ERROR, "Invalid empty interval specification\n"); ret = AVERROR(EINVAL); goto end; } p = spec; next = strchr(spec, '%'); if (next) *next++ = 0; /* parse first part */ if (*p) { interval->has_start = 1; if (*p == '+') { interval->start_is_offset = 1; p++; } else { interval->start_is_offset = 0; } ret = av_parse_time(&interval->start, p, 1); if (ret < 0) { av_log(NULL, AV_LOG_ERROR, "Invalid interval start specification '%s'\n", p); goto end; } } else { interval->has_start = 0; } /* parse second part */ p = next; if (p && *p) { int64_t us; interval->has_end = 1; if (*p == '+') { interval->end_is_offset = 1; p++; } else { interval->end_is_offset = 0; } if (interval->end_is_offset && *p == '#') { long long int lli; char *tail; interval->duration_frames = 1; p++; lli = strtoll(p, &tail, 10); if (*tail || lli < 0) { av_log(NULL, AV_LOG_ERROR, "Invalid or negative value '%s' for duration number of frames\n", p); goto end; } interval->end = lli; } else { ret = av_parse_time(&us, p, 1); if (ret < 0) { av_log(NULL, AV_LOG_ERROR, "Invalid interval end/duration specification '%s'\n", p); goto end; } interval->end = us; } } else { interval->has_end = 0; } end: av_free(spec); return ret; } static int parse_read_intervals(const char *intervals_spec) { int ret, n, i; char *p, *spec = av_strdup(intervals_spec); if (!spec) return AVERROR(ENOMEM); /* preparse specification, get number of intervals */ for (n = 0, p = spec; *p; p++) if (*p == ',') n++; n++; read_intervals = av_malloc_array(n, sizeof(*read_intervals)); if (!read_intervals) { ret = AVERROR(ENOMEM); goto end; } read_intervals_nb = n; /* parse intervals */ p = spec; for (i = 0; p; i++) { char *next; av_assert0(i < read_intervals_nb); next = strchr(p, ','); if (next) *next++ = 0; read_intervals[i].id = i; ret = parse_read_interval(p, &read_intervals[i]); if (ret < 0) { av_log(NULL, AV_LOG_ERROR, "Error parsing read interval #%d '%s'\n", i, p); goto end; } av_log(NULL, AV_LOG_VERBOSE, "Parsed log interval "); log_read_interval(&read_intervals[i], NULL, AV_LOG_VERBOSE); p = next; } av_assert0(i == read_intervals_nb); end: av_free(spec); return ret; } static int opt_read_intervals(void *optctx, const char *opt, const char *arg) { return parse_read_intervals(arg); } static int opt_pretty(void *optctx, const char *opt, const char *arg) { show_value_unit = 1; use_value_prefix = 1; use_byte_value_binary_prefix = 1; use_value_sexagesimal_format = 1; return 0; } static void print_section(SectionID id, int level) { const SectionID *pid; const struct section *section = &sections[id]; printf("%c%c%c", section->flags & SECTION_FLAG_IS_WRAPPER ? 'W' : '.', section->flags & SECTION_FLAG_IS_ARRAY ? 'A' : '.', section->flags & SECTION_FLAG_HAS_VARIABLE_FIELDS ? 'V' : '.'); printf("%*c %s", level * 4, ' ', section->name); if (section->unique_name) printf("/%s", section->unique_name); printf("\n"); for (pid = section->children_ids; *pid != -1; pid++) print_section(*pid, level+1); } static int opt_sections(void *optctx, const char *opt, const char *arg) { printf("Sections:\n" "W.. = Section is a wrapper (contains other sections, no local entries)\n" ".A. = Section contains an array of elements of the same type\n" "..V = Section may contain a variable number of fields with variable keys\n" "FLAGS NAME/UNIQUE_NAME\n" "---\n"); print_section(SECTION_ID_ROOT, 0); return 0; } static int opt_show_versions(const char *opt, const char *arg) { mark_section_show_entries(SECTION_ID_PROGRAM_VERSION, 1, NULL); mark_section_show_entries(SECTION_ID_LIBRARY_VERSION, 1, NULL); return 0; } #define DEFINE_OPT_SHOW_SECTION(section, target_section_id) \ static int opt_show_##section(const char *opt, const char *arg) \ { \ mark_section_show_entries(SECTION_ID_##target_section_id, 1, NULL); \ return 0; \ } DEFINE_OPT_SHOW_SECTION(chapters, CHAPTERS); DEFINE_OPT_SHOW_SECTION(error, ERROR); DEFINE_OPT_SHOW_SECTION(format, FORMAT); DEFINE_OPT_SHOW_SECTION(frames, FRAMES); DEFINE_OPT_SHOW_SECTION(library_versions, LIBRARY_VERSIONS); DEFINE_OPT_SHOW_SECTION(packets, PACKETS); DEFINE_OPT_SHOW_SECTION(pixel_formats, PIXEL_FORMATS); DEFINE_OPT_SHOW_SECTION(program_version, PROGRAM_VERSION); DEFINE_OPT_SHOW_SECTION(streams, STREAMS); DEFINE_OPT_SHOW_SECTION(programs, PROGRAMS); static const OptionDef real_options[] = { #include "cmdutils_common_opts.h" { "f", HAS_ARG, {.func_arg = opt_format}, "force format", "format" }, { "unit", OPT_BOOL, {&show_value_unit}, "show unit of the displayed values" }, { "prefix", OPT_BOOL, {&use_value_prefix}, "use SI prefixes for the displayed values" }, { "byte_binary_prefix", OPT_BOOL, {&use_byte_value_binary_prefix}, "use binary prefixes for byte units" }, { "sexagesimal", OPT_BOOL, {&use_value_sexagesimal_format}, "use sexagesimal format HOURS:MM:SS.MICROSECONDS for time units" }, { "pretty", 0, {.func_arg = opt_pretty}, "prettify the format of displayed values, make it more human readable" }, { "print_format", OPT_STRING | HAS_ARG, {(void*)&print_format}, "set the output printing format (available formats are: default, compact, csv, flat, ini, json, xml)", "format" }, { "of", OPT_STRING | HAS_ARG, {(void*)&print_format}, "alias for -print_format", "format" }, { "select_streams", OPT_STRING | HAS_ARG, {(void*)&stream_specifier}, "select the specified streams", "stream_specifier" }, { "sections", OPT_EXIT, {.func_arg = opt_sections}, "print sections structure and section information, and exit" }, { "show_data", OPT_BOOL, {(void*)&do_show_data}, "show packets data" }, { "show_data_hash", OPT_STRING | HAS_ARG, {(void*)&show_data_hash}, "show packets data hash" }, { "show_error", 0, {(void*)&opt_show_error}, "show probing error" }, { "show_format", 0, {(void*)&opt_show_format}, "show format/container info" }, { "show_frames", 0, {(void*)&opt_show_frames}, "show frames info" }, { "show_format_entry", HAS_ARG, {.func_arg = opt_show_format_entry}, "show a particular entry from the format/container info", "entry" }, { "show_entries", HAS_ARG, {.func_arg = opt_show_entries}, "show a set of specified entries", "entry_list" }, { "show_packets", 0, {(void*)&opt_show_packets}, "show packets info" }, { "show_programs", 0, {(void*)&opt_show_programs}, "show programs info" }, { "show_streams", 0, {(void*)&opt_show_streams}, "show streams info" }, { "show_chapters", 0, {(void*)&opt_show_chapters}, "show chapters info" }, { "count_frames", OPT_BOOL, {(void*)&do_count_frames}, "count the number of frames per stream" }, { "count_packets", OPT_BOOL, {(void*)&do_count_packets}, "count the number of packets per stream" }, { "show_program_version", 0, {(void*)&opt_show_program_version}, "show ffprobe version" }, { "show_library_versions", 0, {(void*)&opt_show_library_versions}, "show library versions" }, { "show_versions", 0, {(void*)&opt_show_versions}, "show program and library versions" }, { "show_pixel_formats", 0, {(void*)&opt_show_pixel_formats}, "show pixel format descriptions" }, { "show_private_data", OPT_BOOL, {(void*)&show_private_data}, "show private data" }, { "private", OPT_BOOL, {(void*)&show_private_data}, "same as show_private_data" }, { "bitexact", OPT_BOOL, {&do_bitexact}, "force bitexact output" }, { "read_intervals", HAS_ARG, {.func_arg = opt_read_intervals}, "set read intervals", "read_intervals" }, { "default", HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, {.func_arg = opt_default}, "generic catch all option", "" }, { "i", HAS_ARG, {.func_arg = opt_input_file_i}, "read specified file", "input_file"}, { NULL, }, }; static inline int check_section_show_entries(int section_id) { int *id; struct section *section = &sections[section_id]; if (sections[section_id].show_all_entries || sections[section_id].entries_to_show) return 1; for (id = section->children_ids; *id != -1; id++) if (check_section_show_entries(*id)) return 1; return 0; } #define SET_DO_SHOW(id, varname) do { \ if (check_section_show_entries(SECTION_ID_##id)) \ do_show_##varname = 1; \ } while (0) int main(int argc, char **argv) { const Writer *w; WriterContext *wctx; char *buf; char *w_name = NULL, *w_args = NULL; int ret, i; av_log_set_flags(AV_LOG_SKIP_REPEATED); register_exit(ffprobe_cleanup); options = real_options; parse_loglevel(argc, argv, options); av_register_all(); avformat_network_init(); init_opts(); #if CONFIG_AVDEVICE avdevice_register_all(); #endif show_banner(argc, argv, options); parse_options(NULL, argc, argv, options, opt_input_file); /* mark things to show, based on -show_entries */ SET_DO_SHOW(CHAPTERS, chapters); SET_DO_SHOW(ERROR, error); SET_DO_SHOW(FORMAT, format); SET_DO_SHOW(FRAMES, frames); SET_DO_SHOW(LIBRARY_VERSIONS, library_versions); SET_DO_SHOW(PACKETS, packets); SET_DO_SHOW(PIXEL_FORMATS, pixel_formats); SET_DO_SHOW(PIXEL_FORMAT_FLAGS, pixel_format_flags); SET_DO_SHOW(PIXEL_FORMAT_COMPONENTS, pixel_format_components); SET_DO_SHOW(PROGRAM_VERSION, program_version); SET_DO_SHOW(PROGRAMS, programs); SET_DO_SHOW(STREAMS, streams); SET_DO_SHOW(STREAM_DISPOSITION, stream_disposition); SET_DO_SHOW(PROGRAM_STREAM_DISPOSITION, stream_disposition); SET_DO_SHOW(CHAPTER_TAGS, chapter_tags); SET_DO_SHOW(FORMAT_TAGS, format_tags); SET_DO_SHOW(FRAME_TAGS, frame_tags); SET_DO_SHOW(PROGRAM_TAGS, program_tags); SET_DO_SHOW(STREAM_TAGS, stream_tags); if (do_bitexact && (do_show_program_version || do_show_library_versions)) { av_log(NULL, AV_LOG_ERROR, "-bitexact and -show_program_version or -show_library_versions " "options are incompatible\n"); ret = AVERROR(EINVAL); goto end; } writer_register_all(); if (!print_format) print_format = av_strdup("default"); if (!print_format) { ret = AVERROR(ENOMEM); goto end; } w_name = av_strtok(print_format, "=", &buf); w_args = buf; if (show_data_hash) { if ((ret = av_hash_alloc(&hash, show_data_hash)) < 0) { if (ret == AVERROR(EINVAL)) { const char *n; av_log(NULL, AV_LOG_ERROR, "Unknown hash algorithm '%s'\nKnown algorithms:", show_data_hash); for (i = 0; (n = av_hash_names(i)); i++) av_log(NULL, AV_LOG_ERROR, " %s", n); av_log(NULL, AV_LOG_ERROR, "\n"); } goto end; } } w = writer_get_by_name(w_name); if (!w) { av_log(NULL, AV_LOG_ERROR, "Unknown output format with name '%s'\n", w_name); ret = AVERROR(EINVAL); goto end; } if ((ret = writer_open(&wctx, w, w_args, sections, FF_ARRAY_ELEMS(sections))) >= 0) { if (w == &xml_writer) wctx->string_validation_utf8_flags |= AV_UTF8_FLAG_EXCLUDE_XML_INVALID_CONTROL_CODES; writer_print_section_header(wctx, SECTION_ID_ROOT); if (do_show_program_version) ffprobe_show_program_version(wctx); if (do_show_library_versions) ffprobe_show_library_versions(wctx); if (do_show_pixel_formats) ffprobe_show_pixel_formats(wctx); if (!input_filename && ((do_show_format || do_show_programs || do_show_streams || do_show_chapters || do_show_packets || do_show_error) || (!do_show_program_version && !do_show_library_versions && !do_show_pixel_formats))) { show_usage(); av_log(NULL, AV_LOG_ERROR, "You have to specify one input file.\n"); av_log(NULL, AV_LOG_ERROR, "Use -h to get full help or, even better, run 'man %s'.\n", program_name); ret = AVERROR(EINVAL); } else if (input_filename) { ret = probe_file(wctx, input_filename); if (ret < 0 && do_show_error) show_error(wctx, ret); } writer_print_section_footer(wctx); writer_close(&wctx); } end: av_freep(&print_format); av_freep(&read_intervals); av_hash_freep(&hash); uninit_opts(); for (i = 0; i < FF_ARRAY_ELEMS(sections); i++) av_dict_free(&(sections[i].entries_to_show)); avformat_network_deinit(); return ret < 0; }
{ "content_hash": "0953925b28bd3e4576ea373a0eee70aa", "timestamp": "", "source": "github", "line_count": 3163, "max_line_length": 203, "avg_line_length": 36.836863736958584, "alnum_prop": 0.5623052825816418, "repo_name": "APCVSRepo/hmi_sdk_346_wince", "id": "1227f59c0bfff39635b32ff66339bd9b6a7188f2", "size": "117324", "binary": false, "copies": "7", "ref": "refs/heads/master", "path": "hmi_sdk/Tools/ffmpeg-2.6.2/ffprobe.c", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "Assembly", "bytes": "2448214" }, { "name": "Batchfile", "bytes": "4584" }, { "name": "C", "bytes": "35974832" }, { "name": "C++", "bytes": "1185931" }, { "name": "CSS", "bytes": "2494" }, { "name": "Groff", "bytes": "132" }, { "name": "HTML", "bytes": "1616" }, { "name": "Makefile", "bytes": "456667" }, { "name": "Objective-C", "bytes": "49497" }, { "name": "Perl", "bytes": "37891" }, { "name": "Python", "bytes": "1743" }, { "name": "QMake", "bytes": "5832" }, { "name": "Shell", "bytes": "59733" }, { "name": "Verilog", "bytes": "1335" } ], "symlink_target": "" }
.. _publishing: Publishing in GLAMkit ===================== The publishing system used in GLAMkit is a re-implementation of concepts and code from ```django-model-publisher`` <https://github.com/jp74/django-model-publisher>`__, though heavily customised for our purposes. It has been customised to work with the fluent style of projects with polymorphic, translatable models and other such fun. There are many considerations when using the publishing system with much history which cannot be covered completely here. This is an attempt to document the major pieces to allow people to understand how to use the publishing system and major ideas that need to be known. Use Publishing in your Project ------------------------------ To use GLAMkit Publishing in your project: - make sure you install GLAMkit with the optional 'publishing' extra to get any required libraries, e.g. in your *setup.py* include something like: 'django-icekit[forms,search,publishing]' - add ``'icekit.publishing'`` to ``INSTALLED_APPS`` - add ``'icekit.publishing.middleware.PublishingMiddleware'`` to ``MIDDLEWARE_CLASSES`` Once set up in this way, the page and plugin models defined in GLAMkit such as ``ArticlePage`` and ``SlideShow`` will gain publishing features in your project. Make Custom Publishable Models ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ While GLAMkit's models will general have publishing features built-in, you can add publishing to your own models by subclassing both the model and admin base classes provide in GLAMkit. To make a standard Django model publishable: - subclass your model from ``icekit.publishing.models.PublishingModel`` - subclass your model's admin from ``icekit.publishing.admin.PublishingAdmin`` To make a ``FluentContentsPage`` model publishable: - subclass your model from ``icekit.publishing.models.PublishableFluentContentsPage`` - subclass your model's admin from ``FluentContentsPageAdmin`` and ``icekit.publishing.admin.PublishingAdmin`` To make a fluent contents model (see `ContentsPlugins <../howto/plugins.md>`__) publishable: - subclass your model from ``icekit.publishing.models.PublishableFluentContents`` - subclass your model's admin from ``icekit.publishing.admin.PublishableFluentContentsAdmin`` Note: Validating slug uniqueness ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ In publishable models, both the draft and published slugs will be identical, which means declaring your ``SlugField`` with ``unique=True`` will cause Integrity Errors when you try to publish a model instance. To address this, add ``unique_together = (("slug", "publishing_is_draft"),)`` to your model's ``Meta`` class. Once you have modified your project's models, make new DB migrations to apply the additional database fields required for publishing. Setting up Admin if you are using Fluent Pages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ To set up admin for your Fluent Pages: - add the setting ``FLUENT_PAGES_PARENT_ADMIN_MIXIN`` with the value ``'icekit.publishing.admin.ICEKitFluentPagesParentAdminMixin'`` - this is used for the listing page admin, which, in Polymorphic models, is separate to the admins for each Page type. - ensure your Admin for each page subclasses ``FluentContentsPageAdmin`` and ``icekit.publishing.admin.PublishingAdmin`` as above. If your admin needs to render a custom ``change_form_template``, this template should extend ``admin/fluentpage/change_form.html``, *not* ``admin/publishing/publishing_change_form.html``, which is injected, and inherits from your template using ``{% extends non_publishing_change_form_template %}``. Filters ^^^^^^^ Consider providing the publishing-related admin filters provided in ``icekit.publishing.admin`` such as ``PublishingStatusFilter``, and the publishing status column for listing pages by adding 'publishing\_column' to your admin's ``list_display`` attribute. Draft Request Context ~~~~~~~~~~~~~~~~~~~~~ The ``icekit.publishing.middleware.PublishingMiddleware`` middleware class allows privileged users to view draft pages and page content before it has been published by adding the 'preview' GET parameter to page URLs, for example: http://site.com/welcome-page/?preview For the draft request context mechanism to work you must define a text model setting ``DRAFT_SECRET_KEY`` in the CMS admin at /admin/model\_settings/setting/ and provide secret value of some kind -- any long password-like text is fine. If you need to perform custom logic to show content to privileged users you can use the ``is_draft_request_context()`` global function defined with the middleware that will return true if, and only if, a privileged user has explicitly requested to view the draft version of a page by providing the 'preview' GET parameter. Implementation details ---------------------- Usage ~~~~~ As complicated as the publishing implementation is behind the scenes -- and it is unfortunately very complicated -- the following usage guidelines should be enough to use it properly in most situations: When rendering publishable items, be sure to retrieve only the items that should be visible to the current user -- draft items for privileged users, published items for everyone else: - use the ``visible()`` queryset method on a QS to publishable items - use the ``get_visible()`` object method on an object's FK relationship to a publishable item, which may return ``None`` if the target item is only a draft - use the ``is_visible`` object status flag on a target publishable object if you need to process a set of draft and published items in code and cannot easily use one of the mechanisms above, or - use the ``has_been_published`` object status flag on publishable objects when you are processing a set of draft and published copies and need to find out whether an object has been published regardless of whether the current object happens to be a draft or published copy. This is basically equivalent to ``get_visible() is not None``. Draft Content Protection ^^^^^^^^^^^^^^^^^^^^^^^^ If you forget to explicitly look up the visible version of publishable items, you will get the draft version instead and could risk displaying draft content to the public. To avoid this, the publishing implementation includes a booby trap that should raise a ``PublishingException`` in this situation with a message like *"Illegal attempt to access 'title' on a DRAFT publishable item..."*. If you see that, check that you are obtaining the correct visible or published version of items. If you are sure you want to access draft attributes within a published context, you can use ``get_draft_payload()`` on the draft item, or add the attribute to ``PUBLISHING_PERMITTED_ATTRS`` on the model. ``pk`` is accessible by default, but most other attributes (particularly reverse relations) will need to be added to ``PUBLISHING_PERMITTED_ATTRS`` individually. For some situations you might need to get just the published or draft copies of items, such as for the search indexes we only ever want published copies to be indexed regardless of the privileges of the user/process that triggers the indexing. In these situations, you can use the corresponding queryset methods and model methods/fields: - the ``published()`` queryset method and ``get_published()`` model method return the published copy of an item in all cases, regardless of the privileges of the current user. This is useful for rendering content that should always and only be safe for public consumption. - the ``draft()`` queryset method and ``get_draft()`` model method return the draft copy of an item in all cases, regardless of the privileges of the current user. This is useful for filtering items within the Django admin, where only draft items should be accessible. There are many different states an object can be in. This attempts to cover at least some of them. Check if an object is the draft object ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To check if an object is the draft object use the ``is_draft`` property which will return ``True`` if the specific publishable item is a draft copy, ``False`` otherwise. This will always return the opposite of ``is_published``. Check if an object is the published object ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To check if an object is the published object use the ``is_published`` property which returns ``True`` if the specific publishable item is a published copy, ``False`` otherwise. This will always return the opposite of ``is_draft``. Check if an object has been published ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ To check if a publishable item has been published, regardless of whether the item you are working with happens to be a draft or published copy, use the ``has_been_published`` property. This returns ``True`` if the item is itself published, or is a draft that has a published copy. Relating/retrieving items that are related to draft versions ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Since only draft versions are shown in the admin, and a published version isn't constantly available, it usually makes sense to to define relations to the draft version of an object. That means that a published version won't have incoming relations, and accessing reverse relations on the draft version will set off the booby trap, unless the ``related_name`` is added to ``PUBLISHING_PERMITTED_ATTRS``. A pattern like this is normally safest (``pk`` is a permitted attribute): :: RelatedModel.objects.filter(fk_id=self.get_draft().pk) Data model ~~~~~~~~~~ The general gist is that every item in Django's CMS admin is created a **draft** copy, which may or may not have an associated **published** copy. When a draft copy is published it is duplicated, along with some processing of related content, such that the DB will contain two copies of the same item: one draft, one published. The Django admin remains largely oblivious to the existence of published copies. When displaying content to users, the draft or published version of publishable items is rendered depending on the privileges of the user: admins might see draft content rendered, whereas the public must only ever see rendered versions of the corresponding published copy (if there is one). NOTE: The data model for ICEKit's current publishing approach is a tweaked version of the one from ``django-model-publisher`` and SFMOMA. Each publishable model is assigned four main extra columns: - ``publishing_linked``: a 1-to-1 relationship to self, or as near as possible to self, that on the draft copy of a publishable item will point to its **published** copy, if any. - ``publishing_is_draft``: boolean field, ``True`` if the current item is a draft copy (the default) or ``False`` if it is the published copy. - ``publishing_modified_at``: timestamp used mainly to track when publishable items are updated so that you can work out whether the published copy is up-to-date compared to the draft copy version. That is, any up-to-date published copy should have a ``publishing_modified_at``: timestamp value equal to or later than the corresponding draft item. - ``publishing_published_at``: used to set a future time when the item is to be considered published, for scheduling publication. I don't think we use or implement this at all... Handling unique fields ~~~~~~~~~~~~~~~~~~~~~~ Because the publishing approach creates draft and published copies of models, any fields marked as ``unique=True`` will raise IntegrityErrors unless the field is made non-unique. Related fields (``ForeignKey``, ``ManyToMany``, etc) ---------------------------------------------------- When referring to publishable ``ForeignKey`` or ``ManyToMany`` data items -- such as pages -- on an object that is being rendered or displayed to the public, it is important to specify that you only want the **published** versions to be displayed. During administration and saving of objects always reference the **draft** version, but when accessing ``ForeignKey`` or ``ManyToMany`` relationships in public contexts such as templates use the ``visible`` method on publishable query sets (i.e. ``UrlNodeQuerySet.published``) to get the correct draft or published object versions for the current user. For example, here is a template directive that will do the right thing when rendering related content for the public and for site admins: :: {% with published_pages=instance.pages.visible %} {% endwith %} There has been an issue discovered where ``ManyToMany`` fields referring both ways on models have the many to many data cloned for published and unpublished objects. This is currently being worked on.
{ "content_hash": "7edf80948f1f331d6af6a8d40c9ed5ad", "timestamp": "", "source": "github", "line_count": 296, "max_line_length": 80, "avg_line_length": 43.07432432432432, "alnum_prop": 0.7430588235294118, "repo_name": "ic-labs/django-icekit", "id": "5ac90f6ee9bf196e4116d27deb22591f3cdfae10", "size": "12750", "binary": false, "copies": "1", "ref": "refs/heads/develop", "path": "docs/architecture/content/publishing.rst", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "18019" }, { "name": "HTML", "bytes": "92605" }, { "name": "JavaScript", "bytes": "27803" }, { "name": "Python", "bytes": "1476354" }, { "name": "Shell", "bytes": "37850" } ], "symlink_target": "" }
package org.springframework.boot.configurationprocessor; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import org.json.JSONArray; import org.json.JSONObject; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.rules.TemporaryFolder; import org.springframework.boot.configurationprocessor.metadata.ConfigurationMetadata; import org.springframework.boot.configurationprocessor.metadata.ItemDeprecation; import org.springframework.boot.configurationprocessor.metadata.ItemHint; import org.springframework.boot.configurationprocessor.metadata.ItemMetadata; import org.springframework.boot.configurationprocessor.metadata.Metadata; import org.springframework.boot.configurationprocessor.metadata.TestJsonConverter; import org.springframework.boot.configurationsample.endpoint.CustomPropertiesEndpoint; import org.springframework.boot.configurationsample.endpoint.DisabledEndpoint; import org.springframework.boot.configurationsample.endpoint.OnlyJmxEndpoint; import org.springframework.boot.configurationsample.endpoint.OnlyWebEndpoint; import org.springframework.boot.configurationsample.endpoint.SimpleEndpoint; import org.springframework.boot.configurationsample.endpoint.incremental.IncrementalEndpoint; import org.springframework.boot.configurationsample.endpoint.incremental.IncrementalJmxEndpoint; import org.springframework.boot.configurationsample.incremental.BarProperties; import org.springframework.boot.configurationsample.incremental.FooProperties; import org.springframework.boot.configurationsample.incremental.RenamedBarProperties; import org.springframework.boot.configurationsample.lombok.LombokExplicitProperties; import org.springframework.boot.configurationsample.lombok.LombokInnerClassProperties; import org.springframework.boot.configurationsample.lombok.LombokInnerClassWithGetterProperties; import org.springframework.boot.configurationsample.lombok.LombokSimpleDataProperties; import org.springframework.boot.configurationsample.lombok.LombokSimpleProperties; import org.springframework.boot.configurationsample.lombok.SimpleLombokPojo; import org.springframework.boot.configurationsample.method.DeprecatedMethodConfig; import org.springframework.boot.configurationsample.method.EmptyTypeMethodConfig; import org.springframework.boot.configurationsample.method.InvalidMethodConfig; import org.springframework.boot.configurationsample.method.MethodAndClassConfig; import org.springframework.boot.configurationsample.method.SimpleMethodConfig; import org.springframework.boot.configurationsample.simple.ClassWithNestedProperties; import org.springframework.boot.configurationsample.simple.DeprecatedSingleProperty; import org.springframework.boot.configurationsample.simple.HierarchicalProperties; import org.springframework.boot.configurationsample.simple.NotAnnotated; import org.springframework.boot.configurationsample.simple.SimpleCollectionProperties; import org.springframework.boot.configurationsample.simple.SimplePrefixValueProperties; import org.springframework.boot.configurationsample.simple.SimpleProperties; import org.springframework.boot.configurationsample.simple.SimpleTypeProperties; import org.springframework.boot.configurationsample.specific.BoxingPojo; import org.springframework.boot.configurationsample.specific.BuilderPojo; import org.springframework.boot.configurationsample.specific.DeprecatedUnrelatedMethodPojo; import org.springframework.boot.configurationsample.specific.DoubleRegistrationProperties; import org.springframework.boot.configurationsample.specific.ExcludedTypesPojo; import org.springframework.boot.configurationsample.specific.GenericConfig; import org.springframework.boot.configurationsample.specific.InnerClassAnnotatedGetterConfig; import org.springframework.boot.configurationsample.specific.InnerClassHierarchicalProperties; import org.springframework.boot.configurationsample.specific.InnerClassProperties; import org.springframework.boot.configurationsample.specific.InnerClassRootConfig; import org.springframework.boot.configurationsample.specific.InvalidAccessorProperties; import org.springframework.boot.configurationsample.specific.InvalidDoubleRegistrationProperties; import org.springframework.boot.configurationsample.specific.SimplePojo; import org.springframework.boot.testsupport.compiler.TestCompiler; import org.springframework.util.FileCopyUtils; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link ConfigurationMetadataAnnotationProcessor}. * * @author Stephane Nicoll * @author Phillip Webb * @author Andy Wilkinson * @author Kris De Volder */ public class ConfigurationMetadataAnnotationProcessorTests { @Rule public TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public ExpectedException thrown = ExpectedException.none(); private TestCompiler compiler; @Before public void createCompiler() throws IOException { this.compiler = new TestCompiler(this.temporaryFolder); } @Test public void notAnnotated() throws Exception { ConfigurationMetadata metadata = compile(NotAnnotated.class); assertThat(metadata.getItems()).isEmpty(); } @Test public void simpleProperties() throws Exception { ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata) .has(Metadata.withGroup("simple").fromSource(SimpleProperties.class)); assertThat(metadata).has(Metadata.withProperty("simple.the-name", String.class) .fromSource(SimpleProperties.class) .withDescription("The name of this simple properties.") .withDefaultValue("boot").withDeprecation(null, null)); assertThat(metadata).has(Metadata.withProperty("simple.flag", Boolean.class) .fromSource(SimpleProperties.class).withDescription("A simple flag.") .withDeprecation(null, null)); assertThat(metadata).has(Metadata.withProperty("simple.comparator")); assertThat(metadata).doesNotHave(Metadata.withProperty("simple.counter")); assertThat(metadata).doesNotHave(Metadata.withProperty("simple.size")); } @Test public void simplePrefixValueProperties() throws Exception { ConfigurationMetadata metadata = compile(SimplePrefixValueProperties.class); assertThat(metadata).has(Metadata.withGroup("simple") .fromSource(SimplePrefixValueProperties.class)); assertThat(metadata).has(Metadata.withProperty("simple.name", String.class) .fromSource(SimplePrefixValueProperties.class)); } @Test public void simpleTypeProperties() throws Exception { ConfigurationMetadata metadata = compile(SimpleTypeProperties.class); assertThat(metadata).has( Metadata.withGroup("simple.type").fromSource(SimpleTypeProperties.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-string", String.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-byte", Byte.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-primitive-byte", Byte.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-char", Character.class)); assertThat(metadata).has( Metadata.withProperty("simple.type.my-primitive-char", Character.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-boolean", Boolean.class)); assertThat(metadata).has( Metadata.withProperty("simple.type.my-primitive-boolean", Boolean.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-short", Short.class)); assertThat(metadata).has( Metadata.withProperty("simple.type.my-primitive-short", Short.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-integer", Integer.class)); assertThat(metadata).has( Metadata.withProperty("simple.type.my-primitive-integer", Integer.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-long", Long.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-primitive-long", Long.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-double", Double.class)); assertThat(metadata).has( Metadata.withProperty("simple.type.my-primitive-double", Double.class)); assertThat(metadata) .has(Metadata.withProperty("simple.type.my-float", Float.class)); assertThat(metadata).has( Metadata.withProperty("simple.type.my-primitive-float", Float.class)); assertThat(metadata.getItems().size()).isEqualTo(18); } @Test public void hierarchicalProperties() throws Exception { ConfigurationMetadata metadata = compile(HierarchicalProperties.class); assertThat(metadata).has(Metadata.withGroup("hierarchical") .fromSource(HierarchicalProperties.class)); assertThat(metadata).has(Metadata.withProperty("hierarchical.first", String.class) .fromSource(HierarchicalProperties.class)); assertThat(metadata) .has(Metadata.withProperty("hierarchical.second", String.class) .fromSource(HierarchicalProperties.class)); assertThat(metadata).has(Metadata.withProperty("hierarchical.third", String.class) .fromSource(HierarchicalProperties.class)); } @Test @SuppressWarnings("deprecation") public void deprecatedProperties() throws Exception { Class<?> type = org.springframework.boot.configurationsample.simple.DeprecatedProperties.class; ConfigurationMetadata metadata = compile(type); assertThat(metadata).has(Metadata.withGroup("deprecated").fromSource(type)); assertThat(metadata).has(Metadata.withProperty("deprecated.name", String.class) .fromSource(type).withDeprecation(null, null)); assertThat(metadata) .has(Metadata.withProperty("deprecated.description", String.class) .fromSource(type).withDeprecation(null, null)); } @Test public void singleDeprecatedProperty() throws Exception { Class<?> type = DeprecatedSingleProperty.class; ConfigurationMetadata metadata = compile(type); assertThat(metadata).has(Metadata.withGroup("singledeprecated").fromSource(type)); assertThat(metadata) .has(Metadata.withProperty("singledeprecated.new-name", String.class) .fromSource(type)); assertThat(metadata).has(Metadata .withProperty("singledeprecated.name", String.class).fromSource(type) .withDeprecation("renamed", "singledeprecated.new-name")); } @Test public void deprecatedOnUnrelatedSetter() throws Exception { Class<?> type = DeprecatedUnrelatedMethodPojo.class; ConfigurationMetadata metadata = compile(type); assertThat(metadata).has(Metadata.withGroup("not.deprecated").fromSource(type)); assertThat(metadata) .has(Metadata.withProperty("not.deprecated.counter", Integer.class) .withNoDeprecation().fromSource(type)); assertThat(metadata) .has(Metadata.withProperty("not.deprecated.flag", Boolean.class) .withNoDeprecation().fromSource(type)); } @Test public void boxingOnSetter() throws IOException { Class<?> type = BoxingPojo.class; ConfigurationMetadata metadata = compile(type); assertThat(metadata).has(Metadata.withGroup("boxing").fromSource(type)); assertThat(metadata).has( Metadata.withProperty("boxing.flag", Boolean.class).fromSource(type)); assertThat(metadata).has( Metadata.withProperty("boxing.counter", Integer.class).fromSource(type)); } @Test public void parseCollectionConfig() throws Exception { ConfigurationMetadata metadata = compile(SimpleCollectionProperties.class); // getter and setter assertThat(metadata).has(Metadata.withProperty("collection.integers-to-names", "java.util.Map<java.lang.Integer,java.lang.String>")); assertThat(metadata).has(Metadata.withProperty("collection.longs", "java.util.Collection<java.lang.Long>")); assertThat(metadata).has(Metadata.withProperty("collection.floats", "java.util.List<java.lang.Float>")); // getter only assertThat(metadata).has(Metadata.withProperty("collection.names-to-integers", "java.util.Map<java.lang.String,java.lang.Integer>")); assertThat(metadata).has(Metadata.withProperty("collection.bytes", "java.util.Collection<java.lang.Byte>")); assertThat(metadata).has(Metadata.withProperty("collection.doubles", "java.util.List<java.lang.Double>")); } @Test public void simpleMethodConfig() throws Exception { ConfigurationMetadata metadata = compile(SimpleMethodConfig.class); assertThat(metadata) .has(Metadata.withGroup("foo").fromSource(SimpleMethodConfig.class)); assertThat(metadata).has(Metadata.withProperty("foo.name", String.class) .fromSource(SimpleMethodConfig.Foo.class)); assertThat(metadata).has(Metadata.withProperty("foo.flag", Boolean.class) .fromSource(SimpleMethodConfig.Foo.class)); } @Test public void invalidMethodConfig() throws Exception { ConfigurationMetadata metadata = compile(InvalidMethodConfig.class); assertThat(metadata).has(Metadata.withProperty("something.name", String.class) .fromSource(InvalidMethodConfig.class)); assertThat(metadata).isNotEqualTo(Metadata.withProperty("invalid.name")); } @Test public void methodAndClassConfig() throws Exception { ConfigurationMetadata metadata = compile(MethodAndClassConfig.class); assertThat(metadata).has(Metadata.withProperty("conflict.name", String.class) .fromSource(MethodAndClassConfig.Foo.class)); assertThat(metadata).has(Metadata.withProperty("conflict.flag", Boolean.class) .fromSource(MethodAndClassConfig.Foo.class)); assertThat(metadata).has(Metadata.withProperty("conflict.value", String.class) .fromSource(MethodAndClassConfig.class)); } @Test public void emptyTypeMethodConfig() throws Exception { ConfigurationMetadata metadata = compile(EmptyTypeMethodConfig.class); assertThat(metadata).isNotEqualTo(Metadata.withProperty("something.foo")); } @Test public void deprecatedMethodConfig() throws Exception { Class<DeprecatedMethodConfig> type = DeprecatedMethodConfig.class; ConfigurationMetadata metadata = compile(type); assertThat(metadata).has(Metadata.withGroup("foo").fromSource(type)); assertThat(metadata).has(Metadata.withProperty("foo.name", String.class) .fromSource(DeprecatedMethodConfig.Foo.class) .withDeprecation(null, null)); assertThat(metadata).has(Metadata.withProperty("foo.flag", Boolean.class) .fromSource(DeprecatedMethodConfig.Foo.class) .withDeprecation(null, null)); } @Test @SuppressWarnings("deprecation") public void deprecatedMethodConfigOnClass() throws Exception { Class<?> type = org.springframework.boot.configurationsample.method.DeprecatedClassMethodConfig.class; ConfigurationMetadata metadata = compile(type); assertThat(metadata).has(Metadata.withGroup("foo").fromSource(type)); assertThat(metadata).has(Metadata.withProperty("foo.name", String.class) .fromSource( org.springframework.boot.configurationsample.method.DeprecatedClassMethodConfig.Foo.class) .withDeprecation(null, null)); assertThat(metadata).has(Metadata.withProperty("foo.flag", Boolean.class) .fromSource( org.springframework.boot.configurationsample.method.DeprecatedClassMethodConfig.Foo.class) .withDeprecation(null, null)); } @Test public void innerClassRootConfig() throws Exception { ConfigurationMetadata metadata = compile(InnerClassRootConfig.class); assertThat(metadata).has(Metadata.withProperty("config.name")); } @Test public void innerClassProperties() throws Exception { ConfigurationMetadata metadata = compile(InnerClassProperties.class); assertThat(metadata) .has(Metadata.withGroup("config").fromSource(InnerClassProperties.class)); assertThat(metadata).has( Metadata.withGroup("config.first").ofType(InnerClassProperties.Foo.class) .fromSource(InnerClassProperties.class)); assertThat(metadata).has(Metadata.withProperty("config.first.name")); assertThat(metadata).has(Metadata.withProperty("config.first.bar.name")); assertThat(metadata).has( Metadata.withGroup("config.the-second", InnerClassProperties.Foo.class) .fromSource(InnerClassProperties.class)); assertThat(metadata).has(Metadata.withProperty("config.the-second.name")); assertThat(metadata).has(Metadata.withProperty("config.the-second.bar.name")); assertThat(metadata).has(Metadata.withGroup("config.third") .ofType(SimplePojo.class).fromSource(InnerClassProperties.class)); assertThat(metadata).has(Metadata.withProperty("config.third.value")); assertThat(metadata).has(Metadata.withProperty("config.fourth")); assertThat(metadata).isNotEqualTo(Metadata.withGroup("config.fourth")); } @Test public void innerClassPropertiesHierarchical() throws Exception { ConfigurationMetadata metadata = compile(InnerClassHierarchicalProperties.class); assertThat(metadata).has(Metadata.withGroup("config.foo") .ofType(InnerClassHierarchicalProperties.Foo.class)); assertThat(metadata).has(Metadata.withGroup("config.foo.bar") .ofType(InnerClassHierarchicalProperties.Bar.class)); assertThat(metadata).has(Metadata.withGroup("config.foo.bar.baz") .ofType(InnerClassHierarchicalProperties.Foo.Baz.class)); assertThat(metadata).has(Metadata.withProperty("config.foo.bar.baz.blah")); assertThat(metadata).has(Metadata.withProperty("config.foo.bar.bling")); } @Test public void innerClassAnnotatedGetterConfig() throws Exception { ConfigurationMetadata metadata = compile(InnerClassAnnotatedGetterConfig.class); assertThat(metadata).has(Metadata.withProperty("specific.value")); assertThat(metadata).has(Metadata.withProperty("foo.name")); assertThat(metadata).isNotEqualTo(Metadata.withProperty("specific.foo")); } @Test public void nestedClassChildProperties() throws Exception { ConfigurationMetadata metadata = compile(ClassWithNestedProperties.class); assertThat(metadata).has(Metadata.withGroup("nestedChildProps") .fromSource(ClassWithNestedProperties.NestedChildClass.class)); assertThat(metadata) .has(Metadata .withProperty("nestedChildProps.child-class-property", Integer.class) .fromSource(ClassWithNestedProperties.NestedChildClass.class) .withDefaultValue(20)); assertThat(metadata) .has(Metadata .withProperty("nestedChildProps.parent-class-property", Integer.class) .fromSource(ClassWithNestedProperties.NestedChildClass.class) .withDefaultValue(10)); } @Test public void builderPojo() throws IOException { ConfigurationMetadata metadata = compile(BuilderPojo.class); assertThat(metadata).has(Metadata.withProperty("builder.name")); } @Test public void excludedTypesPojo() throws IOException { ConfigurationMetadata metadata = compile(ExcludedTypesPojo.class); assertThat(metadata).has(Metadata.withProperty("excluded.name")); assertThat(metadata).isNotEqualTo(Metadata.withProperty("excluded.class-loader")); assertThat(metadata).isNotEqualTo(Metadata.withProperty("excluded.data-source")); assertThat(metadata).isNotEqualTo(Metadata.withProperty("excluded.print-writer")); assertThat(metadata).isNotEqualTo(Metadata.withProperty("excluded.writer")); assertThat(metadata).isNotEqualTo(Metadata.withProperty("excluded.writer-array")); } @Test public void invalidAccessor() throws IOException { ConfigurationMetadata metadata = compile(InvalidAccessorProperties.class); assertThat(metadata).has(Metadata.withGroup("config")); assertThat(metadata.getItems()).hasSize(1); } @Test public void doubleRegistration() throws IOException { ConfigurationMetadata metadata = compile(DoubleRegistrationProperties.class); assertThat(metadata).has(Metadata.withGroup("one")); assertThat(metadata).has(Metadata.withGroup("two")); assertThat(metadata).has(Metadata.withProperty("one.value")); assertThat(metadata).has(Metadata.withProperty("two.value")); assertThat(metadata.getItems()).hasSize(4); } @Test public void invalidDoubleRegistration() throws IOException { this.thrown.expect(IllegalStateException.class); this.thrown.expectMessage("Compilation failed"); compile(InvalidDoubleRegistrationProperties.class); } @Test public void genericTypes() throws IOException { ConfigurationMetadata metadata = compile(GenericConfig.class); assertThat(metadata).has(Metadata.withGroup("generic").ofType( "org.springframework.boot.configurationsample.specific.GenericConfig")); assertThat(metadata).has(Metadata.withGroup("generic.foo").ofType( "org.springframework.boot.configurationsample.specific.GenericConfig$Foo")); assertThat(metadata).has(Metadata.withGroup("generic.foo.bar").ofType( "org.springframework.boot.configurationsample.specific.GenericConfig$Bar")); assertThat(metadata).has(Metadata.withGroup("generic.foo.bar.biz").ofType( "org.springframework.boot.configurationsample.specific.GenericConfig$Bar$Biz")); assertThat(metadata).has(Metadata.withProperty("generic.foo.name") .ofType(String.class).fromSource(GenericConfig.Foo.class)); assertThat(metadata).has(Metadata.withProperty("generic.foo.string-to-bar") .ofType("java.util.Map<java.lang.String,org.springframework.boot.configurationsample.specific.GenericConfig.Bar<java.lang.Integer>>") .fromSource(GenericConfig.Foo.class)); assertThat(metadata).has(Metadata.withProperty("generic.foo.string-to-integer") .ofType("java.util.Map<java.lang.String,java.lang.Integer>") .fromSource(GenericConfig.Foo.class)); assertThat(metadata).has(Metadata.withProperty("generic.foo.bar.name") .ofType("java.lang.String").fromSource(GenericConfig.Bar.class)); assertThat(metadata).has(Metadata.withProperty("generic.foo.bar.biz.name") .ofType("java.lang.String").fromSource(GenericConfig.Bar.Biz.class)); assertThat(metadata.getItems()).hasSize(9); } @Test public void lombokDataProperties() throws Exception { ConfigurationMetadata metadata = compile(LombokSimpleDataProperties.class); assertSimpleLombokProperties(metadata, LombokSimpleDataProperties.class, "data"); } @Test public void lombokSimpleProperties() throws Exception { ConfigurationMetadata metadata = compile(LombokSimpleProperties.class); assertSimpleLombokProperties(metadata, LombokSimpleProperties.class, "simple"); } @Test public void lombokExplicitProperties() throws Exception { ConfigurationMetadata metadata = compile(LombokExplicitProperties.class); assertSimpleLombokProperties(metadata, LombokExplicitProperties.class, "explicit"); } @Test public void lombokInnerClassProperties() throws Exception { ConfigurationMetadata metadata = compile(LombokInnerClassProperties.class); assertThat(metadata).has(Metadata.withGroup("config") .fromSource(LombokInnerClassProperties.class)); assertThat(metadata).has(Metadata.withGroup("config.first") .ofType(LombokInnerClassProperties.Foo.class) .fromSource(LombokInnerClassProperties.class)); assertThat(metadata).has(Metadata.withProperty("config.first.name")); assertThat(metadata).has(Metadata.withProperty("config.first.bar.name")); assertThat(metadata).has( Metadata.withGroup("config.second", LombokInnerClassProperties.Foo.class) .fromSource(LombokInnerClassProperties.class)); assertThat(metadata).has(Metadata.withProperty("config.second.name")); assertThat(metadata).has(Metadata.withProperty("config.second.bar.name")); assertThat(metadata) .has(Metadata.withGroup("config.third").ofType(SimpleLombokPojo.class) .fromSource(LombokInnerClassProperties.class)); // For some reason the annotation processor resolves a type for SimpleLombokPojo // that is resolved (compiled) and the source annotations are gone. Because we // don't see the @Data annotation anymore, no field is harvested. What is crazy is // that a sample project works fine so this seem to be related to the unit test // environment for some reason. assertThat(metadata, // containsProperty("config.third.value")); assertThat(metadata).has(Metadata.withProperty("config.fourth")); assertThat(metadata).isNotEqualTo(Metadata.withGroup("config.fourth")); } @Test public void lombokInnerClassWithGetterProperties() throws IOException { ConfigurationMetadata metadata = compile( LombokInnerClassWithGetterProperties.class); assertThat(metadata).has(Metadata.withGroup("config") .fromSource(LombokInnerClassWithGetterProperties.class)); assertThat(metadata).has(Metadata.withGroup("config.first") .ofType(LombokInnerClassWithGetterProperties.Foo.class) .fromSourceMethod("getFirst()") .fromSource(LombokInnerClassWithGetterProperties.class)); assertThat(metadata).has(Metadata.withProperty("config.first.name")); assertThat(metadata.getItems()).hasSize(3); } @Test public void simpleEndpoint() throws IOException { ConfigurationMetadata metadata = compile(SimpleEndpoint.class); assertThat(metadata).has(Metadata.withGroup("endpoints.simple") .fromSource(SimpleEndpoint.class)); assertThat(metadata).has(enabledFlag("simple", true)); assertThat(metadata).has(jmxEnabledFlag("simple", true)); assertThat(metadata).has(webEnabledFlag("simple", true)); assertThat(metadata).has(cacheTtl("simple")); assertThat(metadata.getItems()).hasSize(5); } @Test public void disableEndpoint() throws IOException { ConfigurationMetadata metadata = compile(DisabledEndpoint.class); assertThat(metadata).has(Metadata.withGroup("endpoints.disabled") .fromSource(DisabledEndpoint.class)); assertThat(metadata).has(enabledFlag("disabled", false)); assertThat(metadata).has(jmxEnabledFlag("disabled", false)); assertThat(metadata).has(webEnabledFlag("disabled", false)); assertThat(metadata).has(cacheTtl("disabled")); assertThat(metadata.getItems()).hasSize(5); } @Test public void customPropertiesEndpoint() throws IOException { ConfigurationMetadata metadata = compile(CustomPropertiesEndpoint.class); assertThat(metadata).has(Metadata.withGroup("endpoints.customprops") .fromSource(CustomPropertiesEndpoint.class)); assertThat(metadata).has(Metadata.withProperty("endpoints.customprops.name"). ofType(String.class).withDefaultValue("test")); assertThat(metadata).has(enabledFlag("customprops", true)); assertThat(metadata).has(jmxEnabledFlag("customprops", true)); assertThat(metadata).has(webEnabledFlag("customprops", true)); assertThat(metadata).has(cacheTtl("customprops")); assertThat(metadata.getItems()).hasSize(6); } @Test public void jmxOnlyEndpoint() throws IOException { ConfigurationMetadata metadata = compile(OnlyJmxEndpoint.class); assertThat(metadata).has(Metadata.withGroup("endpoints.jmx") .fromSource(OnlyJmxEndpoint.class)); assertThat(metadata).has(enabledFlag("jmx", true)); assertThat(metadata).has(jmxEnabledFlag("jmx", true)); assertThat(metadata).has(cacheTtl("jmx")); assertThat(metadata.getItems()).hasSize(4); } @Test public void webOnlyEndpoint() throws IOException { ConfigurationMetadata metadata = compile(OnlyWebEndpoint.class); assertThat(metadata).has(Metadata.withGroup("endpoints.web") .fromSource(OnlyWebEndpoint.class)); assertThat(metadata).has(enabledFlag("web", true)); assertThat(metadata).has(webEnabledFlag("web", true)); assertThat(metadata).has(cacheTtl("web")); assertThat(metadata.getItems()).hasSize(4); } @Test public void incrementalEndpointBuildChangeGeneralEnabledFlag() throws Exception { TestProject project = new TestProject(this.temporaryFolder, IncrementalEndpoint.class); ConfigurationMetadata metadata = project.fullBuild(); assertThat(metadata).has(Metadata.withGroup("endpoints.incremental") .fromSource(IncrementalEndpoint.class)); assertThat(metadata).has(enabledFlag("incremental", true)); assertThat(metadata).has(jmxEnabledFlag("incremental", true)); assertThat(metadata).has(webEnabledFlag("incremental", true)); assertThat(metadata).has(cacheTtl("incremental")); assertThat(metadata.getItems()).hasSize(5); project.replaceText(IncrementalEndpoint.class, "id = \"incremental\"", "id = \"incremental\", enabledByDefault = false"); metadata = project.incrementalBuild(IncrementalEndpoint.class); assertThat(metadata).has(Metadata.withGroup("endpoints.incremental") .fromSource(IncrementalEndpoint.class)); assertThat(metadata).has(enabledFlag("incremental", false)); assertThat(metadata).has(jmxEnabledFlag("incremental", false)); assertThat(metadata).has(webEnabledFlag("incremental", false)); assertThat(metadata).has(cacheTtl("incremental")); assertThat(metadata.getItems()).hasSize(5); } @Test public void incrementalEndpointBuildDisableJmxEndpoint() throws Exception { TestProject project = new TestProject(this.temporaryFolder, IncrementalEndpoint.class); ConfigurationMetadata metadata = project.fullBuild(); assertThat(metadata).has(Metadata.withGroup("endpoints.incremental") .fromSource(IncrementalEndpoint.class)); assertThat(metadata).has(enabledFlag("incremental", true)); assertThat(metadata).has(jmxEnabledFlag("incremental", true)); assertThat(metadata).has(webEnabledFlag("incremental", true)); assertThat(metadata).has(cacheTtl("incremental")); assertThat(metadata.getItems()).hasSize(5); project.replaceText(IncrementalEndpoint.class, "id = \"incremental\"", "id = \"incremental\", types = Endpoint.Type.WEB"); metadata = project.incrementalBuild(IncrementalEndpoint.class); assertThat(metadata).has(Metadata.withGroup("endpoints.incremental") .fromSource(IncrementalEndpoint.class)); assertThat(metadata).has(enabledFlag("incremental", true)); assertThat(metadata).has(webEnabledFlag("incremental", true)); assertThat(metadata).has(cacheTtl("incremental")); assertThat(metadata.getItems()).hasSize(4); } @Test public void incrementalEndpointBuildEnableJmxEndpoint() throws Exception { TestProject project = new TestProject(this.temporaryFolder, IncrementalJmxEndpoint.class); ConfigurationMetadata metadata = project.fullBuild(); assertThat(metadata).has(Metadata.withGroup("endpoints.incremental") .fromSource(IncrementalJmxEndpoint.class)); assertThat(metadata).has(enabledFlag("incremental", true)); assertThat(metadata).has(jmxEnabledFlag("incremental", true)); assertThat(metadata).has(cacheTtl("incremental")); assertThat(metadata.getItems()).hasSize(4); project.replaceText(IncrementalJmxEndpoint.class, ", types = Endpoint.Type.JMX", ""); metadata = project.incrementalBuild(IncrementalJmxEndpoint.class); assertThat(metadata).has(Metadata.withGroup("endpoints.incremental") .fromSource(IncrementalJmxEndpoint.class)); assertThat(metadata).has(enabledFlag("incremental", true)); assertThat(metadata).has(jmxEnabledFlag("incremental", true)); assertThat(metadata).has(webEnabledFlag("incremental", true)); assertThat(metadata).has(cacheTtl("incremental")); assertThat(metadata.getItems()).hasSize(5); } private Metadata.MetadataItemCondition enabledFlag(String endpointId, boolean defaultValue) { return Metadata.withEnabledFlag("endpoints." + endpointId + ".enabled") .withDefaultValue(defaultValue).withDescription( String.format("Enable the %s endpoint.", endpointId)); } private Metadata.MetadataItemCondition jmxEnabledFlag(String endpointId, boolean defaultValue) { return Metadata.withEnabledFlag("endpoints." + endpointId + ".jmx.enabled") .withDefaultValue(defaultValue).withDescription(String.format( "Expose the %s endpoint as a JMX MBean.", endpointId)); } private Metadata.MetadataItemCondition webEnabledFlag(String endpointId, boolean defaultValue) { return Metadata.withEnabledFlag("endpoints." + endpointId + ".web.enabled") .withDefaultValue(defaultValue).withDescription(String.format( "Expose the %s endpoint as a Web endpoint.", endpointId)); } private Metadata.MetadataItemCondition cacheTtl(String endpointId) { return Metadata.withProperty("endpoints." + endpointId + ".cache.time-to-live") .ofType(Long.class).withDefaultValue(0).withDescription( "Maximum time in milliseconds that a response can be cached."); } @Test public void mergingOfAdditionalProperty() throws Exception { ItemMetadata property = ItemMetadata.newProperty(null, "foo", "java.lang.String", AdditionalMetadata.class.getName(), null, null, null, null); writeAdditionalMetadata(property); ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata).has(Metadata.withProperty("simple.comparator")); assertThat(metadata).has(Metadata.withProperty("foo", String.class) .fromSource(AdditionalMetadata.class)); } @Test public void mergeExistingPropertyDefaultValue() throws Exception { ItemMetadata property = ItemMetadata.newProperty("simple", "flag", null, null, null, null, true, null); writeAdditionalMetadata(property); ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata).has(Metadata.withProperty("simple.flag", Boolean.class) .fromSource(SimpleProperties.class).withDescription("A simple flag.") .withDeprecation(null, null).withDefaultValue(true)); assertThat(metadata.getItems()).hasSize(4); } @Test public void mergeExistingPropertyDescription() throws Exception { ItemMetadata property = ItemMetadata.newProperty("simple", "comparator", null, null, null, "A nice comparator.", null, null); writeAdditionalMetadata(property); ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata) .has(Metadata.withProperty("simple.comparator", "java.util.Comparator<?>") .fromSource(SimpleProperties.class) .withDescription("A nice comparator.")); assertThat(metadata.getItems()).hasSize(4); } @Test public void mergeExistingPropertyDeprecation() throws Exception { ItemMetadata property = ItemMetadata.newProperty("simple", "comparator", null, null, null, null, null, new ItemDeprecation("Don't use this.", "simple.complex-comparator", "error")); writeAdditionalMetadata(property); ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata) .has(Metadata.withProperty("simple.comparator", "java.util.Comparator<?>") .fromSource(SimpleProperties.class).withDeprecation( "Don't use this.", "simple.complex-comparator", "error")); assertThat(metadata.getItems()).hasSize(4); } @Test public void mergeExistingPropertyDeprecationOverride() throws Exception { ItemMetadata property = ItemMetadata.newProperty("singledeprecated", "name", null, null, null, null, null, new ItemDeprecation("Don't use this.", "single.name")); writeAdditionalMetadata(property); ConfigurationMetadata metadata = compile(DeprecatedSingleProperty.class); assertThat(metadata).has( Metadata.withProperty("singledeprecated.name", String.class.getName()) .fromSource(DeprecatedSingleProperty.class) .withDeprecation("Don't use this.", "single.name")); assertThat(metadata.getItems()).hasSize(3); } @Test public void mergeExistingPropertyDeprecationOverrideLevel() throws Exception { ItemMetadata property = ItemMetadata.newProperty("singledeprecated", "name", null, null, null, null, null, new ItemDeprecation(null, null, "error")); writeAdditionalMetadata(property); ConfigurationMetadata metadata = compile(DeprecatedSingleProperty.class); assertThat(metadata).has( Metadata.withProperty("singledeprecated.name", String.class.getName()) .fromSource(DeprecatedSingleProperty.class).withDeprecation( "renamed", "singledeprecated.new-name", "error")); assertThat(metadata.getItems()).hasSize(3); } @Test public void mergeOfInvalidAdditionalMetadata() throws IOException { File additionalMetadataFile = createAdditionalMetadataFile(); FileCopyUtils.copy("Hello World", new FileWriter(additionalMetadataFile)); this.thrown.expect(IllegalStateException.class); this.thrown.expectMessage("Compilation failed"); compile(SimpleProperties.class); } @Test public void mergingOfSimpleHint() throws Exception { writeAdditionalHints(ItemHint.newHint("simple.the-name", new ItemHint.ValueHint("boot", "Bla bla"), new ItemHint.ValueHint("spring", null))); ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata).has(Metadata.withProperty("simple.the-name", String.class) .fromSource(SimpleProperties.class) .withDescription("The name of this simple properties.") .withDefaultValue("boot").withDeprecation(null, null)); assertThat(metadata).has(Metadata.withHint("simple.the-name") .withValue(0, "boot", "Bla bla").withValue(1, "spring", null)); } @Test public void mergingOfHintWithNonCanonicalName() throws Exception { writeAdditionalHints(ItemHint.newHint("simple.theName", new ItemHint.ValueHint("boot", "Bla bla"))); ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata).has(Metadata.withProperty("simple.the-name", String.class) .fromSource(SimpleProperties.class) .withDescription("The name of this simple properties.") .withDefaultValue("boot").withDeprecation(null, null)); assertThat(metadata).has( Metadata.withHint("simple.the-name").withValue(0, "boot", "Bla bla")); } @Test public void mergingOfHintWithProvider() throws Exception { writeAdditionalHints(new ItemHint("simple.theName", Collections.<ItemHint.ValueHint>emptyList(), Arrays.asList( new ItemHint.ValueProvider("first", Collections.<String, Object>singletonMap("target", "org.foo")), new ItemHint.ValueProvider("second", null)))); ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata).has(Metadata.withProperty("simple.the-name", String.class) .fromSource(SimpleProperties.class) .withDescription("The name of this simple properties.") .withDefaultValue("boot").withDeprecation(null, null)); assertThat(metadata).has(Metadata.withHint("simple.the-name") .withProvider("first", "target", "org.foo").withProvider("second")); } @Test public void mergingOfAdditionalDeprecation() throws Exception { writePropertyDeprecation(ItemMetadata.newProperty("simple", "wrongName", "java.lang.String", null, null, null, null, new ItemDeprecation("Lame name.", "simple.the-name"))); ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata).has(Metadata.withProperty("simple.wrong-name", String.class) .withDeprecation("Lame name.", "simple.the-name")); } @Test public void mergingOfAdditionalMetadata() throws Exception { File metaInfFolder = new File(this.compiler.getOutputLocation(), "META-INF"); metaInfFolder.mkdirs(); File additionalMetadataFile = new File(metaInfFolder, "additional-spring-configuration-metadata.json"); additionalMetadataFile.createNewFile(); JSONObject property = new JSONObject(); property.put("name", "foo"); property.put("type", "java.lang.String"); property.put("sourceType", AdditionalMetadata.class.getName()); JSONArray properties = new JSONArray(); properties.put(property); JSONObject additionalMetadata = new JSONObject(); additionalMetadata.put("properties", properties); FileWriter writer = new FileWriter(additionalMetadataFile); writer.append(additionalMetadata.toString(2)); writer.flush(); writer.close(); ConfigurationMetadata metadata = compile(SimpleProperties.class); assertThat(metadata).has(Metadata.withProperty("simple.comparator")); assertThat(metadata).has(Metadata.withProperty("foo", String.class) .fromSource(AdditionalMetadata.class)); } @Test public void incrementalBuild() throws Exception { TestProject project = new TestProject(this.temporaryFolder, FooProperties.class, BarProperties.class); assertThat(project.getOutputFile(MetadataStore.METADATA_PATH).exists()).isFalse(); ConfigurationMetadata metadata = project.fullBuild(); assertThat(project.getOutputFile(MetadataStore.METADATA_PATH).exists()).isTrue(); assertThat(metadata).has( Metadata.withProperty("foo.counter").fromSource(FooProperties.class)); assertThat(metadata).has( Metadata.withProperty("bar.counter").fromSource(BarProperties.class)); metadata = project.incrementalBuild(BarProperties.class); assertThat(metadata).has( Metadata.withProperty("foo.counter").fromSource(FooProperties.class)); assertThat(metadata).has( Metadata.withProperty("bar.counter").fromSource(BarProperties.class)); project.addSourceCode(BarProperties.class, BarProperties.class.getResourceAsStream("BarProperties.snippet")); metadata = project.incrementalBuild(BarProperties.class); assertThat(metadata).has(Metadata.withProperty("bar.extra")); assertThat(metadata).has(Metadata.withProperty("foo.counter")); assertThat(metadata).has(Metadata.withProperty("bar.counter")); project.revert(BarProperties.class); metadata = project.incrementalBuild(BarProperties.class); assertThat(metadata).isNotEqualTo(Metadata.withProperty("bar.extra")); assertThat(metadata).has(Metadata.withProperty("foo.counter")); assertThat(metadata).has(Metadata.withProperty("bar.counter")); } @Test public void incrementalBuildAnnotationRemoved() throws Exception { TestProject project = new TestProject(this.temporaryFolder, FooProperties.class, BarProperties.class); ConfigurationMetadata metadata = project.fullBuild(); assertThat(metadata).has(Metadata.withProperty("foo.counter")); assertThat(metadata).has(Metadata.withProperty("bar.counter")); project.replaceText(BarProperties.class, "@ConfigurationProperties", "//@ConfigurationProperties"); metadata = project.incrementalBuild(BarProperties.class); assertThat(metadata).has(Metadata.withProperty("foo.counter")); assertThat(metadata).isNotEqualTo(Metadata.withProperty("bar.counter")); } @Test public void incrementalBuildTypeRenamed() throws Exception { TestProject project = new TestProject(this.temporaryFolder, FooProperties.class, BarProperties.class); ConfigurationMetadata metadata = project.fullBuild(); assertThat(metadata).has( Metadata.withProperty("foo.counter").fromSource(FooProperties.class)); assertThat(metadata).has( Metadata.withProperty("bar.counter").fromSource(BarProperties.class)); assertThat(metadata).doesNotHave(Metadata.withProperty("bar.counter") .fromSource(RenamedBarProperties.class)); project.delete(BarProperties.class); project.add(RenamedBarProperties.class); metadata = project.incrementalBuild(RenamedBarProperties.class); assertThat(metadata).has( Metadata.withProperty("foo.counter").fromSource(FooProperties.class)); assertThat(metadata).doesNotHave( Metadata.withProperty("bar.counter").fromSource(BarProperties.class)); assertThat(metadata).has(Metadata.withProperty("bar.counter") .fromSource(RenamedBarProperties.class)); } private void assertSimpleLombokProperties(ConfigurationMetadata metadata, Class<?> source, String prefix) { assertThat(metadata).has(Metadata.withGroup(prefix).fromSource(source)); assertThat(metadata).doesNotHave(Metadata.withProperty(prefix + ".id")); assertThat(metadata).has(Metadata.withProperty(prefix + ".name", String.class) .fromSource(source).withDescription("Name description.")); assertThat(metadata).has(Metadata.withProperty(prefix + ".description")); assertThat(metadata).has(Metadata.withProperty(prefix + ".counter")); assertThat(metadata).has(Metadata.withProperty(prefix + ".number") .fromSource(source).withDefaultValue(0).withDeprecation(null, null)); assertThat(metadata).has(Metadata.withProperty(prefix + ".items")); assertThat(metadata).doesNotHave(Metadata.withProperty(prefix + ".ignored")); } private ConfigurationMetadata compile(Class<?>... types) throws IOException { TestConfigurationMetadataAnnotationProcessor processor = new TestConfigurationMetadataAnnotationProcessor( this.compiler.getOutputLocation()); this.compiler.getTask(types).call(processor); return processor.getMetadata(); } private void writeAdditionalMetadata(ItemMetadata... metadata) throws Exception { TestJsonConverter converter = new TestJsonConverter(); File additionalMetadataFile = createAdditionalMetadataFile(); JSONObject additionalMetadata = new JSONObject(); JSONArray properties = new JSONArray(); for (ItemMetadata itemMetadata : metadata) { properties.put(converter.toJsonObject(itemMetadata)); } additionalMetadata.put("properties", properties); writeMetadata(additionalMetadataFile, additionalMetadata); } private void writeAdditionalHints(ItemHint... hints) throws Exception { TestJsonConverter converter = new TestJsonConverter(); File additionalMetadataFile = createAdditionalMetadataFile(); JSONObject additionalMetadata = new JSONObject(); additionalMetadata.put("hints", converter.toJsonArray(Arrays.asList(hints))); writeMetadata(additionalMetadataFile, additionalMetadata); } private void writePropertyDeprecation(ItemMetadata... items) throws Exception { File additionalMetadataFile = createAdditionalMetadataFile(); JSONArray propertiesArray = new JSONArray(); for (ItemMetadata item : items) { JSONObject jsonObject = new JSONObject(); jsonObject.put("name", item.getName()); if (item.getType() != null) { jsonObject.put("type", item.getType()); } ItemDeprecation deprecation = item.getDeprecation(); if (deprecation != null) { JSONObject deprecationJson = new JSONObject(); if (deprecation.getReason() != null) { deprecationJson.put("reason", deprecation.getReason()); } if (deprecation.getReplacement() != null) { deprecationJson.put("replacement", deprecation.getReplacement()); } jsonObject.put("deprecation", deprecationJson); } propertiesArray.put(jsonObject); } JSONObject additionalMetadata = new JSONObject(); additionalMetadata.put("properties", propertiesArray); writeMetadata(additionalMetadataFile, additionalMetadata); } private File createAdditionalMetadataFile() throws IOException { File metaInfFolder = new File(this.compiler.getOutputLocation(), "META-INF"); metaInfFolder.mkdirs(); File additionalMetadataFile = new File(metaInfFolder, "additional-spring-configuration-metadata.json"); additionalMetadataFile.createNewFile(); return additionalMetadataFile; } private void writeMetadata(File metadataFile, JSONObject metadata) throws Exception { try (FileWriter writer = new FileWriter(metadataFile)) { writer.append(metadata.toString(2)); } } private static class AdditionalMetadata { } }
{ "content_hash": "bd1cd922dc70014859624dddbd42f2df", "timestamp": "", "source": "github", "line_count": 991, "max_line_length": 137, "avg_line_length": 46.962663975782036, "alnum_prop": 0.782917920068758, "repo_name": "mosoft521/spring-boot", "id": "f97243d12ae109c259a642c35b045f841665055f", "size": "47160", "binary": false, "copies": "3", "ref": "refs/heads/master", "path": "spring-boot-tools/spring-boot-configuration-processor/src/test/java/org/springframework/boot/configurationprocessor/ConfigurationMetadataAnnotationProcessorTests.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Batchfile", "bytes": "6942" }, { "name": "CSS", "bytes": "5769" }, { "name": "FreeMarker", "bytes": "2134" }, { "name": "Groovy", "bytes": "49512" }, { "name": "HTML", "bytes": "69585" }, { "name": "Java", "bytes": "11596341" }, { "name": "JavaScript", "bytes": "37789" }, { "name": "Ruby", "bytes": "1307" }, { "name": "Shell", "bytes": "27326" }, { "name": "Smarty", "bytes": "2885" }, { "name": "XSLT", "bytes": "34105" } ], "symlink_target": "" }
import { moduleForComponent, test } from 'ember-qunit'; import hbs from 'htmlbars-inline-precompile'; moduleForComponent('video-gallery', 'Integration | Component | video gallery', { integration: true }); test('it renders', function(assert) { // Set any properties with this.set('myProperty', 'value'); // Handle any actions with this.on('myAction', function(val) { ... }); this.render(hbs`{{video-gallery}}`); assert.equal(this.$().text().trim(), ''); // Template block usage: this.render(hbs` {{#video-gallery}} template block text {{/video-gallery}} `); assert.equal(this.$().text().trim(), 'template block text'); });
{ "content_hash": "a913bd52f12f5f1d617efb7d227b1764", "timestamp": "", "source": "github", "line_count": 24, "max_line_length": 80, "avg_line_length": 27.5, "alnum_prop": 0.6515151515151515, "repo_name": "gossi/trixionary-client", "id": "31e994bbb130676d465a3ce1d1dbfeb5173866a0", "size": "660", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "ember/tests/integration/components/video-gallery-test.js", "mode": "33188", "license": "mit", "language": [ { "name": "CSS", "bytes": "7362" }, { "name": "HTML", "bytes": "80435" }, { "name": "JavaScript", "bytes": "136988" }, { "name": "PHP", "bytes": "49758" } ], "symlink_target": "" }
export * from './auth.guard'; export * from './idea-owner-auth.guard';
{ "content_hash": "ddc527b55a03f62bc2770fe4ce18cf24", "timestamp": "", "source": "github", "line_count": 2, "max_line_length": 40, "avg_line_length": 35.5, "alnum_prop": 0.6619718309859155, "repo_name": "mayermi/Platypus", "id": "5c6e1cfbc5e32087532be24fce3c62b80f630992", "size": "73", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "app/_guards/index.ts", "mode": "33261", "license": "mit", "language": [ { "name": "CSS", "bytes": "25223" }, { "name": "HTML", "bytes": "28885" }, { "name": "JavaScript", "bytes": "1413" }, { "name": "TypeScript", "bytes": "54244" } ], "symlink_target": "" }
<?php /** * Created by PhpStorm. * User: root * Date: 8/30/16 * Time: 4:38 PM */ namespace Masca\PersonnelBundle\Type; use Symfony\Component\Form\AbstractType; use Symfony\Component\Form\Extension\Core\Type\ChoiceType; use Symfony\Component\Form\Extension\Core\Type\NumberType; use Symfony\Component\Form\FormBuilderInterface; use Symfony\Component\OptionsResolver\OptionsResolver; class SalaireType extends AbstractType { private $mois; private $years = []; /** * SalaireType constructor. * @param $mois */ public function __construct($mois) { $this->mois = $mois; $tempYears = range(date('Y')-4,date('Y')); foreach( $tempYears as $year) { $this->years[$year] = $year; } } public function buildForm(FormBuilderInterface $builder, array $options) { $builder ->add('annee', ChoiceType::class, [ 'label' => 'Année', 'choices_as_values'=> true, 'choices'=>$this->years, 'placeholder' => 'Choisissez' ]) ->add('mois', ChoiceType::class, [ 'label' => 'Mois', 'choices_as_values' => true, 'choices' => $this->mois, 'placeholder' => 'Choisissez...' ]) ->add('prime', NumberType::class, [ 'label'=>'Prime', 'required'=>false ]) ; } public function configureOptions(OptionsResolver $resolver) { $resolver->setDefaults([ 'data_class' => 'Masca\PersonnelBundle\Entity\Salaire' ]); } }
{ "content_hash": "f0c09974f661116281d335b708ec69a1", "timestamp": "", "source": "github", "line_count": 66, "max_line_length": 76, "avg_line_length": 25.303030303030305, "alnum_prop": 0.5401197604790419, "repo_name": "alpatrick9/MascaGroupe", "id": "0049b7c18bb559d89ac6b772a7a3d5d14d129056", "size": "1671", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Masca/PersonnelBundle/Type/SalaireType.php", "mode": "33188", "license": "mit", "language": [ { "name": "Batchfile", "bytes": "839" }, { "name": "CSS", "bytes": "7777" }, { "name": "HTML", "bytes": "216385" }, { "name": "JavaScript", "bytes": "3475" }, { "name": "PHP", "bytes": "648378" }, { "name": "Shell", "bytes": "3777" } ], "symlink_target": "" }
#include "AppUtils.h" #include <CoordinatorServer.h> int main(int argc, char** argv) { using namespace Wuild; ConfiguredApplication app(argc, argv, "WuildCoordinator", "coordinator"); CoordinatorServer::Config coordinatorConfig; if (!app.GetCoordinatorServerConfig(coordinatorConfig)) return 1; CoordinatorServer coordServer; if (!coordServer.SetConfig(coordinatorConfig)) return 1; coordServer.Start(); return ExecAppLoop(); }
{ "content_hash": "c1382aedd8cf3b4690ba08b67bbf1156", "timestamp": "", "source": "github", "line_count": 23, "max_line_length": 77, "avg_line_length": 21.043478260869566, "alnum_prop": 0.7086776859504132, "repo_name": "mapron/Wuild", "id": "2d5329f36643568bb8ec21bc6fc6a35fc63c9911", "size": "1144", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/Apps/Coordinator/WuildCoordinator.cpp", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "C", "bytes": "2399" }, { "name": "C++", "bytes": "514903" }, { "name": "CMake", "bytes": "6915" } ], "symlink_target": "" }
<div ng-controller="FabricRequirements.SshConfigController"> <div class="row-fluid"> <div class="span12"> <h3>Defaults</h3> <div hawtio-form name="sshConfig" data="formConfig" entity="requirements.sshConfiguration" onSubmit="noop()"></div> </div> </div> <script type="text/ng-template" id="noDataTemplate"> <div class="hero-unit"> <h5>No SSH Hosts Configured</h5> <p><a href="" ng-click="addThing()">Add a host</a> to configure the SSH hosts that the fabric8 auto-scaler can use to deploy containers on.</p> </div> </script> <script type="text/ng-template" id="tableTemplate"> <div hawtio-form-grid="gridConfig"></div> </script> <div compile="tableTemplate"></div> </div>
{ "content_hash": "9962e773ac6c4dc79fb3cb31f3271cd6", "timestamp": "", "source": "github", "line_count": 26, "max_line_length": 149, "avg_line_length": 29.923076923076923, "alnum_prop": 0.6233933161953727, "repo_name": "oscerd/hawtio", "id": "af32d25d78b3c7cbf37fbb32832036585c658fd3", "size": "778", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "hawtio-web/src/main/webapp/app/fabric-requirements/html/sshConfig.html", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
/** * @fileoverview Tests for options. * @author George Zahariev */ "use strict"; //------------------------------------------------------------------------------ // Requirements //------------------------------------------------------------------------------ const assert = require("chai").assert, options = require("../../lib/options"); //------------------------------------------------------------------------------ // Tests //------------------------------------------------------------------------------ /* * This is testing the interface of the options object. */ describe("options", () => { describe("--help", () => { it("should return true for .help when passed", () => { const currentOptions = options.parse("--help"); assert.isTrue(currentOptions.help); }); }); describe("-h", () => { it("should return true for .help when passed", () => { const currentOptions = options.parse("-h"); assert.isTrue(currentOptions.help); }); }); describe("--config", () => { it("should return a string for .config when passed a string", () => { const currentOptions = options.parse("--config file"); assert.isString(currentOptions.config); assert.strictEqual(currentOptions.config, "file"); }); }); describe("-c", () => { it("should return a string for .config when passed a string", () => { const currentOptions = options.parse("-c file"); assert.isString(currentOptions.config); assert.strictEqual(currentOptions.config, "file"); }); }); describe("--ext", () => { it("should return an array with one item when passed .jsx", () => { const currentOptions = options.parse("--ext .jsx"); assert.isArray(currentOptions.ext); assert.strictEqual(currentOptions.ext[0], ".jsx"); }); it("should return an array with two items when passed .js and .jsx", () => { const currentOptions = options.parse("--ext .jsx --ext .js"); assert.isArray(currentOptions.ext); assert.strictEqual(currentOptions.ext[0], ".jsx"); assert.strictEqual(currentOptions.ext[1], ".js"); }); it("should return an array with two items when passed .jsx,.js", () => { const currentOptions = options.parse("--ext .jsx,.js"); assert.isArray(currentOptions.ext); assert.strictEqual(currentOptions.ext[0], ".jsx"); assert.strictEqual(currentOptions.ext[1], ".js"); }); it("should return an array one item when not passed", () => { const currentOptions = options.parse(""); assert.isArray(currentOptions.ext); assert.strictEqual(currentOptions.ext[0], ".js"); }); }); describe("--rulesdir", () => { it("should return a string for .rulesdir when passed a string", () => { const currentOptions = options.parse("--rulesdir /morerules"); assert.isArray(currentOptions.rulesdir); assert.deepStrictEqual(currentOptions.rulesdir, ["/morerules"]); }); }); describe("--format", () => { it("should return a string for .format when passed a string", () => { const currentOptions = options.parse("--format compact"); assert.isString(currentOptions.format); assert.strictEqual(currentOptions.format, "compact"); }); it("should return stylish for .format when not passed", () => { const currentOptions = options.parse(""); assert.isString(currentOptions.format); assert.strictEqual(currentOptions.format, "stylish"); }); }); describe("-f", () => { it("should return a string for .format when passed a string", () => { const currentOptions = options.parse("-f compact"); assert.isString(currentOptions.format); assert.strictEqual(currentOptions.format, "compact"); }); }); describe("--version", () => { it("should return true for .version when passed", () => { const currentOptions = options.parse("--version"); assert.isTrue(currentOptions.version); }); }); describe("-v", () => { it("should return true for .version when passed", () => { const currentOptions = options.parse("-v"); assert.isTrue(currentOptions.version); }); }); describe("when asking for help", () => { it("should return string of help text when called", () => { const helpText = options.generateHelp(); assert.isString(helpText); }); }); describe("--no-ignore", () => { it("should return false for .ignore when passed", () => { const currentOptions = options.parse("--no-ignore"); assert.isFalse(currentOptions.ignore); }); }); describe("--ignore-path", () => { it("should return a string for .ignorePath when passed", () => { const currentOptions = options.parse("--ignore-path .gitignore"); assert.strictEqual(currentOptions.ignorePath, ".gitignore"); }); }); describe("--ignore-pattern", () => { it("should return a string array for .ignorePattern when passed", () => { const currentOptions = options.parse("--ignore-pattern *.js"); assert.ok(currentOptions.ignorePattern); assert.strictEqual(currentOptions.ignorePattern.length, 1); assert.strictEqual(currentOptions.ignorePattern[0], "*.js"); }); it("should return a string array for multiple values", () => { const currentOptions = options.parse("--ignore-pattern *.js --ignore-pattern *.ts"); assert.ok(currentOptions.ignorePattern); assert.strictEqual(currentOptions.ignorePattern.length, 2); assert.strictEqual(currentOptions.ignorePattern[0], "*.js"); assert.strictEqual(currentOptions.ignorePattern[1], "*.ts"); }); it("should return a string array of properly parsed values, when those values include commas", () => { const currentOptions = options.parse("--ignore-pattern *.js --ignore-pattern foo-{bar,baz}.js"); assert.ok(currentOptions.ignorePattern); assert.strictEqual(currentOptions.ignorePattern.length, 2); assert.strictEqual(currentOptions.ignorePattern[0], "*.js"); assert.strictEqual(currentOptions.ignorePattern[1], "foo-{bar,baz}.js"); }); }); describe("--color", () => { it("should return true for .color when passed --color", () => { const currentOptions = options.parse("--color"); assert.isTrue(currentOptions.color); }); it("should return false for .color when passed --no-color", () => { const currentOptions = options.parse("--no-color"); assert.isFalse(currentOptions.color); }); }); describe("--stdin", () => { it("should return true for .stdin when passed", () => { const currentOptions = options.parse("--stdin"); assert.isTrue(currentOptions.stdin); }); }); describe("--stdin-filename", () => { it("should return a string for .stdinFilename when passed", () => { const currentOptions = options.parse("--stdin-filename test.js"); assert.strictEqual(currentOptions.stdinFilename, "test.js"); }); }); describe("--global", () => { it("should return an array for a single occurrence", () => { const currentOptions = options.parse("--global foo"); assert.isArray(currentOptions.global); assert.strictEqual(currentOptions.global.length, 1); assert.strictEqual(currentOptions.global[0], "foo"); }); it("should split variable names using commas", () => { const currentOptions = options.parse("--global foo,bar"); assert.isArray(currentOptions.global); assert.strictEqual(currentOptions.global.length, 2); assert.strictEqual(currentOptions.global[0], "foo"); assert.strictEqual(currentOptions.global[1], "bar"); }); it("should not split on colons", () => { const currentOptions = options.parse("--global foo:false,bar:true"); assert.isArray(currentOptions.global); assert.strictEqual(currentOptions.global.length, 2); assert.strictEqual(currentOptions.global[0], "foo:false"); assert.strictEqual(currentOptions.global[1], "bar:true"); }); it("should concatenate successive occurrences", () => { const currentOptions = options.parse("--global foo:true --global bar:false"); assert.isArray(currentOptions.global); assert.strictEqual(currentOptions.global.length, 2); assert.strictEqual(currentOptions.global[0], "foo:true"); assert.strictEqual(currentOptions.global[1], "bar:false"); }); }); describe("--plugin", () => { it("should return an array when passed a single occurrence", () => { const currentOptions = options.parse("--plugin single"); assert.isArray(currentOptions.plugin); assert.strictEqual(currentOptions.plugin.length, 1); assert.strictEqual(currentOptions.plugin[0], "single"); }); it("should return an array when passed a comma-delimiated string", () => { const currentOptions = options.parse("--plugin foo,bar"); assert.isArray(currentOptions.plugin); assert.strictEqual(currentOptions.plugin.length, 2); assert.strictEqual(currentOptions.plugin[0], "foo"); assert.strictEqual(currentOptions.plugin[1], "bar"); }); it("should return an array when passed multiple times", () => { const currentOptions = options.parse("--plugin foo --plugin bar"); assert.isArray(currentOptions.plugin); assert.strictEqual(currentOptions.plugin.length, 2); assert.strictEqual(currentOptions.plugin[0], "foo"); assert.strictEqual(currentOptions.plugin[1], "bar"); }); }); describe("--quiet", () => { it("should return true for .quiet when passed", () => { const currentOptions = options.parse("--quiet"); assert.isTrue(currentOptions.quiet); }); }); describe("--max-warnings", () => { it("should return correct value for .maxWarnings when passed", () => { const currentOptions = options.parse("--max-warnings 10"); assert.strictEqual(currentOptions.maxWarnings, 10); }); it("should return -1 for .maxWarnings when not passed", () => { const currentOptions = options.parse(""); assert.strictEqual(currentOptions.maxWarnings, -1); }); it("should throw an error when supplied with a non-integer", () => { assert.throws(() => { options.parse("--max-warnings 10.2"); }, /Invalid value for option 'max-warnings' - expected type Int/); }); }); describe("--init", () => { it("should return true for --init when passed", () => { const currentOptions = options.parse("--init"); assert.isTrue(currentOptions.init); }); }); describe("--fix", () => { it("should return true for --fix when passed", () => { const currentOptions = options.parse("--fix"); assert.isTrue(currentOptions.fix); }); }); describe("--debug", () => { it("should return true for --debug when passed", () => { const currentOptions = options.parse("--debug"); assert.isTrue(currentOptions.debug); }); }); describe("--inline-config", () => { it("should return false when passed --no-inline-config", () => { const currentOptions = options.parse("--no-inline-config"); assert.isFalse(currentOptions.inlineConfig); }); it("should return true for --inline-config when empty", () => { const currentOptions = options.parse(""); assert.isTrue(currentOptions.inlineConfig); }); }); describe("--parser", () => { it("should return a string for --parser when passed", () => { const currentOptions = options.parse("--parser test"); assert.strictEqual(currentOptions.parser, "test"); }); }); describe("--print-config", () => { it("should return file path when passed --print-config", () => { const currentOptions = options.parse("--print-config file.js"); assert.strictEqual(currentOptions.printConfig, "file.js"); }); }); });
{ "content_hash": "a991a91cdd7d3f259d111976013b83e3", "timestamp": "", "source": "github", "line_count": 368, "max_line_length": 110, "avg_line_length": 35.671195652173914, "alnum_prop": 0.5545821589091187, "repo_name": "ljharb/eslint", "id": "26a84b4a090d33279c9329815f1ded4739c60e68", "size": "13127", "binary": false, "copies": "5", "ref": "refs/heads/master", "path": "tests/lib/options.js", "mode": "33188", "license": "mit", "language": [ { "name": "HTML", "bytes": "8120" }, { "name": "JavaScript", "bytes": "7941918" } ], "symlink_target": "" }
if [ $# -lt 3 ] || [ $1 == --help ] || [ $1 == -h ]; then echo "Usage: $0 rpmdir login ip [outdir] [identity]" exit 1 fi rpmdir=$1 remote_login=$2 remote_ip=$3 if [ $# -gt 3 ]; then outdir=$4 mkdir -p $outdir else outdir=. fi if [ $# -gt 4 ]; then identity=$5 else identity=$HOME/.ssh/id_rsa fi alias ssh='ssh -i $identity' alias scp='scp -i $identity' basedir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" rm -f all_tests.xml pushd $rpmdir nkrpm=`ls -t memkind-devel*.rpm | head -n1` jerpm=`ls -t jemalloc-devel*.rpm | head -n1` scp $nkrpm $jerpm $remote_login@$remote_ip: popd scp $basedir/.libs/all_tests $remote_login@$remote_ip: scp $basedir/.libs/environerr_test $remote_login@$remote_ip: scp $basedir/.libs/mallctlerr_test $remote_login@$remote_ip: scp $basedir/.libs/mallocerr_test $remote_login@$remote_ip: scp $basedir/.libs/pmtterr_test $remote_login@$remote_ip: scp $basedir/.libs/schedcpu_test $remote_login@$remote_ip: scp $basedir/.libs/tieddisterr_test $remote_login@$remote_ip: scp $basedir/.libs/pmtterr_test $remote_login@$remote_ip: scp $basedir/mock-pmtt.txt $remote_login@$remote_ip:/tmp/ scp $basedir/libfopen.so $remote_login@$remote_ip: scp $basedir/libmallctl.so $remote_login@$remote_ip: scp $basedir/libmalloc.so $remote_login@$remote_ip: scp $basedir/libnumadist.so $remote_login@$remote_ip: scp $basedir/libsched.so $remote_login@$remote_ip: scp $basedir/.libs/hello_memkind $remote_login@$remote_ip: scp $basedir/.libs/hello_hbw $remote_login@$remote_ip: scp $basedir/.libs/filter_memkind $remote_login@$remote_ip: scp $basedir/.libs/stream $remote_login@$remote_ip: scp $basedir/.libs/stream_memkind $remote_login@$remote_ip: scp $basedir/.libs/new_kind $remote_login@$remote_ip: scp $basedir/.libs/gb_realloc $remote_login@$remote_ip: scp $basedir/test.sh $remote_login@$remote_ip: ssh root@$remote_ip "rpm -e memkind-devel >& /dev/null" ssh root@$remote_ip "rpm -e jemalloc-devel >& /dev/null" ssh root@$remote_ip "rpm -i ~$remote_login/$nkrpm ~$remote_login/$jerpm" ssh root@$remote_ip "echo 4000 > /proc/sys/vm/nr_hugepages" ssh root@$remote_ip "echo 4000 > /proc/sys/vm/nr_overcommit_hugepages" ssh $remote_login@$remote_ip "if [ -e /opt/mpss/coverage/memkind.cov ]; then mkdir -p gtest_output; cp /opt/mpss/coverage/memkind.cov gtest_output; fi" ssh $remote_login@$remote_ip "COVFILE=gtest_output/memkind.cov ./test.sh --gtest_output=xml:gtest_output/" 2>&1| tee $outdir/test.out err=${PIPESTATUS[0]} scp $remote_login@$remote_ip:gtest_output/\* $outdir exit $err
{ "content_hash": "56f3912d992c55fac27e3a571ace797f", "timestamp": "", "source": "github", "line_count": 68, "max_line_length": 151, "avg_line_length": 37.4264705882353, "alnum_prop": 0.7056974459724951, "repo_name": "cmcantalupo/memkind", "id": "3ab4167c5474d99a9d0358d6468b04581187339e", "size": "3911", "binary": false, "copies": "1", "ref": "refs/heads/dev", "path": "test/test_remote.sh", "mode": "33261", "license": "bsd-2-clause", "language": [ { "name": "C", "bytes": "332112" }, { "name": "C++", "bytes": "103513" }, { "name": "Makefile", "bytes": "16065" }, { "name": "Shell", "bytes": "17237" } ], "symlink_target": "" }
SYNONYM #### According to The Catalogue of Life, 3rd January 2011 #### Published in null #### Original name null ### Remarks null
{ "content_hash": "cc0d6fa5f1d6f97804f2f0b73d290647", "timestamp": "", "source": "github", "line_count": 13, "max_line_length": 39, "avg_line_length": 10.23076923076923, "alnum_prop": 0.6917293233082706, "repo_name": "mdoering/backbone", "id": "7f42255c313745593de76b36161a610f681874a1", "size": "175", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "life/Plantae/Magnoliophyta/Liliopsida/Asparagales/Iridaceae/Cipura/Cipura paludosa/ Syn. Cipura major/README.md", "mode": "33188", "license": "apache-2.0", "language": [], "symlink_target": "" }
package nl.tudelft.jpacman.util; import java.util.TimerTask; /** * Task that can be cloneable * @author Corentin Ducruet */ public abstract class TimerTaskCloneable extends TimerTask implements Cloneable{ public TimerTaskCloneable(){ super(); } public TimerTaskCloneable clone() throws CloneNotSupportedException { return (TimerTaskCloneable) super.clone(); } }
{ "content_hash": "c55f10abc7c363d9774c0b3e0129d344", "timestamp": "", "source": "github", "line_count": 19, "max_line_length": 80, "avg_line_length": 21.157894736842106, "alnum_prop": 0.7189054726368159, "repo_name": "RueRivoli/jpacman-framework", "id": "6df6a8bc2bd631ddb647579322e5d707845fe2cd", "size": "402", "binary": false, "copies": "1", "ref": "refs/heads/master", "path": "src/main/java/nl/tudelft/jpacman/util/TimerTaskCloneable.java", "mode": "33188", "license": "apache-2.0", "language": [ { "name": "Java", "bytes": "184425" }, { "name": "TeX", "bytes": "22564" } ], "symlink_target": "" }
id: a77dbc43c33f39daa4429b4f title: Identificar verdadeiro ou falso challengeType: 5 forumTopicId: 16000 dashedName: boo-who --- # --description-- Verifique se um valor é classificado como booleano primitivo. Retorna `true` ou `false`. Os booleanos primitivos são `true` e `false`. # --hints-- `booWho(true)` deve retornar `true`. ```js assert.strictEqual(booWho(true), true); ``` `booWho(false)` deve retornar `true`. ```js assert.strictEqual(booWho(false), true); ``` `booWho([1,2,3])` deve retornar `false`. ```js assert.strictEqual(booWho([1, 2, 3]), false); ``` `booWho([].slice)` deve retornar `false`. ```js assert.strictEqual(booWho([].slice), false); ``` `booWho({"a": 1})` deve retornar `false`. ```js assert.strictEqual(booWho({ a: 1 }), false); ``` `booWho(1)` deve retornar `false`. ```js assert.strictEqual(booWho(1), false); ``` `booWho(NaN)` deve retornar `false`. ```js assert.strictEqual(booWho(NaN), false); ``` `booWho("a")` deve retornar `false`. ```js assert.strictEqual(booWho('a'), false); ``` `booWho("true")` deve retornar `false`. ```js assert.strictEqual(booWho('true'), false); ``` `booWho("false")` deve retornar `false`. ```js assert.strictEqual(booWho('false'), false); ``` # --seed-- ## --seed-contents-- ```js function booWho(bool) { return bool; } booWho(null); ``` # --solutions-- ```js function booWho(bool) { return typeof bool === "boolean"; } booWho(null); ```
{ "content_hash": "02748bd9a20b2af31b758d15ae96d366", "timestamp": "", "source": "github", "line_count": 96, "max_line_length": 88, "avg_line_length": 14.958333333333334, "alnum_prop": 0.6518105849582173, "repo_name": "FreeCodeCamp/FreeCodeCamp", "id": "449aab09fd8d60083a0b162597079ac26070a71d", "size": "1442", "binary": false, "copies": "2", "ref": "refs/heads/i18n-sync-client", "path": "curriculum/challenges/portuguese/02-javascript-algorithms-and-data-structures/basic-algorithm-scripting/boo-who.md", "mode": "33188", "license": "bsd-3-clause", "language": [ { "name": "CSS", "bytes": "190263" }, { "name": "HTML", "bytes": "160430" }, { "name": "JavaScript", "bytes": "546299" } ], "symlink_target": "" }
using System; using System.Collections.Generic; using System.Linq; using UnityEditor; using UnityEngine; namespace UnityTest { class IntegrationTestGroupLine : IntegrationTestRendererBase { public static List<GameObject> FoldMarkers; private IntegrationTestRendererBase[] m_Children; public IntegrationTestGroupLine(GameObject gameObject) : base(gameObject) { } protected internal override void DrawLine(Rect rect, GUIContent label, bool isSelected, RenderingOptions options) { EditorGUILayout.BeginHorizontal(); EditorGUI.BeginChangeCheck(); var isClassFolded = !EditorGUI.Foldout(rect, !Folded, label, isSelected ? Styles.selectedFoldout : Styles.foldout); if (EditorGUI.EndChangeCheck()) Folded = isClassFolded; EditorGUILayout.EndHorizontal(); } private bool Folded { get { return FoldMarkers.Contains(m_GameObject); } set { if (value) FoldMarkers.Add(m_GameObject); else FoldMarkers.RemoveAll(s => s == m_GameObject); } } protected internal override void Render(int indend, RenderingOptions options) { base.Render(indend, options); if (!Folded) foreach (var child in m_Children) child.Render(indend + 1, options); } protected internal override TestResult.ResultType GetResult() { bool ignored = false; bool success = false; foreach (var child in m_Children) { var result = child.GetResult(); if (result == TestResult.ResultType.Failed || result == TestResult.ResultType.FailedException || result == TestResult.ResultType.Timeout) return TestResult.ResultType.Failed; if (result == TestResult.ResultType.Success) success = true; else if (result == TestResult.ResultType.Ignored) ignored = true; else ignored = false; } if (success) return TestResult.ResultType.Success; if (ignored) return TestResult.ResultType.Ignored; return TestResult.ResultType.NotRun; } protected internal override bool IsVisible(RenderingOptions options) { return m_Children.Any(c => c.IsVisible(options)); } public override bool SetCurrentTest(TestComponent tc) { m_IsRunning = false; foreach (var child in m_Children) m_IsRunning |= child.SetCurrentTest(tc); return m_IsRunning; } public void AddChildren(IntegrationTestRendererBase[] parseTestList) { m_Children = parseTestList; } } }
{ "content_hash": "a3ed0bccd562b8f307f3f78bc431d740", "timestamp": "", "source": "github", "line_count": 88, "max_line_length": 153, "avg_line_length": 33.30681818181818, "alnum_prop": 0.5851245308768338, "repo_name": "SavantCat/UNODE", "id": "5301958ad4ee931680b691be9524329913267d59", "size": "2931", "binary": false, "copies": "6", "ref": "refs/heads/v1.1", "path": "unodeV1.1/Assets/UnityTestTools/IntegrationTestsFramework/TestRunner/Editor/Renderer/IntegrationTestGroupLine.cs", "mode": "33188", "license": "mit", "language": [ { "name": "C", "bytes": "23547" }, { "name": "C#", "bytes": "436893" }, { "name": "JavaScript", "bytes": "109621" } ], "symlink_target": "" }