prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::iter::repeat; use std::io::Write; pub mod export; pub mod import; pub fn print_progress(noun: &str, start_time: ::time::Timespec, done: usize, total: usize) { let remaining_jobs = total - done; let progress: f64 = 100f64 * done as f64 / total as f64; let current_time = ::time::get_time().sec; let time_per_job = (current_time - start_time.sec) as f64 / done as f64; let remaining_time = time_per_job * remaining_jobs as f64; print!("\r{} {}/{} complete\t{:.2}% [{}]", noun, done, total, progress, ::util::make_progress_bar(progress / 100.0, 20)<|fim▁hole|> if remaining_jobs == 0 { println!(" (took {:.2} min) ", (current_time - start_time.sec) as f64 / 60.0); } else { print!(" ETA {:.2} min ", remaining_time / 60.0); ::std::io::stdout().flush().ok().expect("failed to flush io"); } } fn make_progress_bar(ratio: f64, length: usize) -> String { let filled = (ratio * length as f64).round() as usize; let mut bar: String = repeat('|').take(filled).collect(); for _ in 0..(length - filled) { bar.push('-'); } bar }<|fim▁end|>
);
<|file_name|>InputFragment.java<|end_file_name|><|fim▁begin|>package com.authpro.imageauthentication; import android.app.Fragment; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.Bundle; import android.support.annotation.Nullable; import android.util.Base64; import android.util.Pair; import android.view.DragEvent; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.SoundEffectConstants; import android.view.View; import android.view.ViewGroup; import android.widget.EditText; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.TextView; import java.util.ArrayList; import java.util.Collections; import static junit.framework.Assert.*; public class InputFragment extends Fragment implements ICallbackable<HttpResult> { private final int imageCount = 30; private int rowCount, columnCount; private ArrayList<Pair<Integer, Integer>> input = new ArrayList<>(); private TextView textView; private View initialButton = null; private ImageButton[][] imageButtons; private String[] imageHashes; private Bitmap[] images; private ArrayList<Integer> permutation; @Nullable @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_input, container, true); this.textView = (EditText)view.findViewById(R.id.textView); getDimensions(); setupButtons(view); fetchImages(); return view; } private void getDimensions() { Resources resources = getResources(); int columnCountResID = R.integer.gridColumnCount, rowCountResID = R.integer.gridRowCount; this.columnCount = resources.getInteger(columnCountResID); this.rowCount = resources.getInteger(rowCountResID); assertEquals(rowCount * columnCount, imageCount); } private void setupButtons(View view) {<|fim▁hole|> final int realRowCount = gridLayout.getChildCount(); assertEquals(realRowCount, rowCount); this.imageButtons = new ImageButton[rowCount][columnCount]; for (int i = 0; i < rowCount; i++) { final View child = gridLayout.getChildAt(i); assertTrue(child instanceof LinearLayout); LinearLayout row = (LinearLayout) child; final int realColumnCount = row.getChildCount(); assertEquals(realColumnCount, columnCount); for (int j = 0; j < columnCount; j++) { final View cell = ((ViewGroup)row.getChildAt(j)).getChildAt(0); assertTrue(cell instanceof ImageButton); final ImageButton imageButton = (ImageButton)cell; final int index = i * columnCount + j; imageButton.setTag(index); imageButtons[i][j] = imageButton; setupForDragEvent(cell); } } } private void setupForDragEvent(View view) { view.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { assertTrue(v instanceof ImageButton); int action = event.getAction() & MotionEvent.ACTION_MASK; switch (action) { case MotionEvent.ACTION_DOWN: assertNull(initialButton); // TODO: Fix the bug when this is not null sometimes. initialButton = v; View.DragShadowBuilder shadow = new View.DragShadowBuilder(); v.startDrag(null, shadow, null, 0); v.setPressed(true); break; case MotionEvent.ACTION_MOVE: case MotionEvent.ACTION_UP: case MotionEvent.ACTION_CANCEL: break; } return true; } }); view.setOnDragListener(new View.OnDragListener() { @Override public boolean onDrag(View v, DragEvent event) { int action = event.getAction(); switch (action) { case DragEvent.ACTION_DRAG_ENTERED: if (v != initialButton) v.setPressed(true); break; case DragEvent.ACTION_DRAG_EXITED: if (v != initialButton) v.setPressed(false); break; case DragEvent.ACTION_DROP: assertNotNull(initialButton); int firstIndex = (int)initialButton.getTag(), secondIndex = (int)v.getTag(); addInput(firstIndex, secondIndex); v.setPressed(false); v.playSoundEffect(SoundEffectConstants.CLICK); break; case DragEvent.ACTION_DRAG_ENDED: if (v == initialButton) { initialButton.setPressed(false); initialButton = null; } break; } return true; } }); } private void addInput(int firstIndex, int secondIndex) { input.add(new Pair<>(permutation.get(firstIndex), permutation.get(secondIndex))); textView.append("*"); assertEquals(textView.length(), input.size()); } private void fetchImages() { HttpMethod method = HttpMethod.GET; String url = Config.API_URL + "api/images"; HttpTask task = new HttpTask(this, method, url); task.execute(); } public void callback(HttpResult result) { HttpStatus status = result.getStatus(); switch (status) { case OK: String data = result.getContent(); setImages(data); break; default: // Silently fail. } } private void setImages(String data) { images = new Bitmap[imageCount]; imageHashes = new String[imageCount]; permutation = new ArrayList<>(imageCount); String[] base64Strings = data.split("\n"); for (int i = 0; i < rowCount; i++) for (int j = 0; j < columnCount; j++) { int index = i * columnCount + j; permutation.add(index); String base64 = base64Strings[index]; images[index] = fromBase64(base64); imageHashes[index] = Utils.computeHash(base64); } shuffle(); } public void shuffle() { Collections.shuffle(permutation); for (int i = 0; i < rowCount; i++) for (int j = 0; j < columnCount; j++) { ImageButton imageButton = imageButtons[i][j]; int index = i * columnCount + j; Bitmap image = images[permutation.get(index)]; imageButton.setImageBitmap(image); } } private Bitmap fromBase64(String base64) { byte[] bytes = Base64.decode(base64, Base64.DEFAULT); return BitmapFactory.decodeByteArray(bytes, 0, bytes.length); } public void clear() { input = new ArrayList<>(); textView.setText(""); } public String getInputString() // Should use char[] instead, for security reasons. { StringBuilder output = new StringBuilder(); for (Pair<Integer, Integer> pair : this.input) { if (pair.first.equals(pair.second)) output.append(imageHashes[pair.first]).append("_"); else output.append(imageHashes[pair.first]).append("+").append(imageHashes[pair.second]).append("_"); } return output.toString(); } }<|fim▁end|>
final ViewGroup gridLayout = (ViewGroup)view.findViewById(R.id.rows);
<|file_name|>fetchDealsData.js<|end_file_name|><|fim▁begin|>import { dealsService } from '../services'; const fetchDealsData = () => { return dealsService().getDeals() .then(res => { return res.data<|fim▁hole|> }) // Returning [] as a placeholder now so it does not error out when this service // fails. We should be handling this in our DISPATCH_REQUEST_FAILURE .catch(() => []); }; export default fetchDealsData;<|fim▁end|>
<|file_name|>bitcoin_ca_ES.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ca_ES" version="2.1"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About StarCoin</source> <translation>Sobre StarCoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;StarCoin&lt;/b&gt; version</source> <translation>versió &lt;b&gt;StarCoin&lt;/b&gt;</translation> </message> <message> <location line="+41"/> <source>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The StarCoin developers</source> <translation>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The StarCoin developers</translation> </message> <message> <location line="+15"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation>\n Aquest és software experimental.\n\n Distribuït sota llicència de software MIT/11, veure l&apos;arxiu COPYING o http://www.opensource.org/licenses/mit-license.php.\n\nAquest producte inclou software desarrollat pel projecte OpenSSL per a l&apos;ús de OppenSSL Toolkit (http://www.openssl.org/) i de software criptogràfic escrit per l&apos;Eric Young ([email protected]) i software UPnP escrit per en Thomas Bernard.</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Llibreta d&apos;adreces</translation> </message> <message> <location line="+22"/> <source>Double-click to edit address or label</source> <translation>Feu doble clic per editar l&apos;adreça o l&apos;etiqueta</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Crear una nova adreça</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copiar l&apos;adreça seleccionada al porta-retalls del sistema</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Nova adreça</translation> </message> <message> <location line="-46"/> <source>These are your StarCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Aquestes són les teves adreces de StarCoin per rebre els pagaments. És possible que vulgueu donar una diferent a cada remitent per a poder realitzar un seguiment de qui li está pagant.</translation> </message> <message> <location line="+60"/> <source>&amp;Copy Address</source> <translation>&amp;Copiar adreça</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Mostra el códi &amp;QR</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a StarCoin address</source> <translation>Signar un missatge per demostrar que és propietari d&apos;una adreça StarCoin</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Signar &amp;Message</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Esborrar l&apos;adreça sel·leccionada</translation> </message> <message> <location line="-14"/> <source>Verify a message to ensure it was signed with a specified StarCoin address</source> <translation>Comproveu el missatge per assegurar-se que es va signar amb una adreça StarCoin especificada.</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Esborrar</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+65"/> <source>Copy &amp;Label</source> <translation>Copiar &amp;Etiqueta</translation> </message> <message> <location line="+2"/> <source>&amp;Edit</source> <translation>&amp;Editar</translation> </message> <message> <location line="+250"/> <source>Export Address Book Data</source> <translation>Exportar dades de la llibreta d&apos;adreces </translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Error a l&apos;exportar</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure al fitxer %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adreça</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(sense etiqueta)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Dialeg de contrasenya</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Introdueix contrasenya</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nova contrasenya</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Repeteix la nova contrasenya</translation> </message> <message> <location line="+33"/> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation>Serveix per desactivar l&apos;enviament trivial de diners quan el compte del sistema operatiu ha estat compromès. No ofereix seguretat real.</translation> </message> <message> <location line="+3"/> <source>For staking only</source> <translation>Només per a fer &quot;stake&quot;</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+35"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Introdueixi la nova contrasenya al moneder&lt;br/&gt;Si us plau useu una contrasenya de &lt;b&gt;10 o més caracters aleatoris&lt;/b&gt;, o &lt;b&gt;vuit o més paraules&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Xifrar la cartera</translation> </message> <message> <location line="+7"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desbloquejar-lo.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Desbloqueja el moneder</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desencriptar-lo.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Desencripta el moneder</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Canviar la contrasenya</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Introdueixi tant l&apos;antiga com la nova contrasenya de moneder.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Confirmar l&apos;encriptació del moneder</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR COINS&lt;/b&gt;!</source> <translation>Avís: Si xifra la seva cartera i perd la contrasenya, podrà &lt;b&gt; PERDRE TOTES LES SEVES MONEDES &lt;/ b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Esteu segur que voleu encriptar el vostre moneder?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>IMPORTANT: Tota copia de seguretat que hagis realitzat hauria de ser reemplaçada pel, recentment generat, arxiu encriptat del moneder.</translation> </message> <message> <location line="+103"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Advertència: Les lletres majúscules estàn activades!</translation> </message> <message> <location line="-133"/> <location line="+60"/> <source>Wallet encrypted</source> <translation>Moneder encriptat</translation> </message> <message> <location line="-58"/> <source>StarCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source> <translation>StarCoin tancarà ara per acabar el procés de xifrat. Recordeu que l&apos;encriptació de la seva cartera no pot protegir completament les seves monedes de ser robades pel malware que pugui infectar al seu equip.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+44"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>L&apos;encriptació del moneder ha fallat</translation> </message> <message> <location line="-56"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>L&apos;encriptació del moneder ha fallat per un error intern. El seu moneder no ha estat encriptat.</translation> </message> <message> <location line="+7"/> <location line="+50"/> <source>The supplied passphrases do not match.</source> <translation>La contrasenya introduïda no coincideix.</translation> </message> <message> <location line="-38"/> <source>Wallet unlock failed</source> <translation>El desbloqueig del moneder ha fallat</translation> </message> <message> <location line="+1"/> <location line="+12"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>La contrasenya introduïda per a desencriptar el moneder és incorrecte.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>La desencriptació del moneder ha fallat</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>La contrasenya del moneder ha estat modificada correctament.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+282"/> <source>Sign &amp;message...</source> <translation>Signar &amp;missatge...</translation> </message> <message> <location line="+251"/> <source>Synchronizing with network...</source> <translation>Sincronitzant amb la xarxa ...</translation> </message> <message> <location line="-319"/> <source>&amp;Overview</source> <translation>&amp;Panorama general</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Mostra panorama general del moneder</translation> </message> <message> <location line="+17"/> <source>&amp;Transactions</source> <translation>&amp;Transaccions</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Cerca a l&apos;historial de transaccions</translation> </message> <message> <location line="+5"/> <source>&amp;Address Book</source> <translation>&amp;Llibreta d&apos;adreces</translation> </message> <message> <location line="+1"/> <source>Edit the list of stored addresses and labels</source> <translation>Edició de la llista d&apos;adreces i etiquetes emmagatzemades</translation> </message> <message> <location line="-13"/> <source>&amp;Receive coins</source> <translation>&amp;Rebre monedes</translation> </message> <message> <location line="+1"/> <source>Show the list of addresses for receiving payments</source> <translation>Mostra la llista d&apos;adreces per rebre pagaments</translation> </message> <message> <location line="-7"/> <source>&amp;Send coins</source> <translation>&amp;Enviar monedes</translation> </message> <message> <location line="+35"/> <source>E&amp;xit</source> <translation>S&amp;ortir</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Sortir de l&apos;aplicació</translation> </message> <message> <location line="+6"/> <source>Show information about StarCoin</source> <translation>Mostra informació sobre StarCoin</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Sobre &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Mostra informació sobre Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Opcions...</translation> </message> <message> <location line="+4"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Xifrar moneder</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Realitzant copia de seguretat del moneder...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Canviar contrasenya...</translation> </message> <message numerus="yes"> <location line="+259"/> <source>~%n block(s) remaining</source> <translation><numerusform>~%n bloc restant</numerusform><numerusform>~%n blocs restants</numerusform></translation> </message> <message> <location line="+6"/> <source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source> <translation>Descarregats %1 de %2 blocs d&apos;historial de transaccions (%3% completat).</translation> </message> <message> <location line="-256"/> <source>&amp;Export...</source> <translation>&amp;Exportar...</translation> </message> <message> <location line="-64"/> <source>Send coins to a StarCoin address</source> <translation>Enviar monedes a una adreça StarCoin</translation> </message> <message> <location line="+47"/> <source>Modify configuration options for StarCoin</source> <translation>Modificar les opcions de configuració per a StarCoin</translation> </message> <message> <location line="+18"/> <source>Export the data in the current tab to a file</source> <translation>Exportar les dades de la pestanya actual a un arxiu</translation> </message> <message> <location line="-14"/> <source>Encrypt or decrypt wallet</source> <translation>Xifrar o desxifrar cartera</translation> </message> <message> <location line="+3"/> <source>Backup wallet to another location</source> <translation>Realitzar còpia de seguretat del moneder a un altre directori</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Canviar la constrasenya d&apos;encriptació del moneder</translation> </message> <message> <location line="+10"/> <source>&amp;Debug window</source> <translation>&amp;Finestra de debug</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Obrir la consola de diagnòstic i debugging</translation> </message> <message> <location line="-5"/> <source>&amp;Verify message...</source> <translation>&amp;Verifica el missatge..</translation> </message> <message> <location line="-202"/> <source>StarCoin</source> <translation>StarCoin</translation> </message> <message> <location line="+0"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+180"/> <source>&amp;About StarCoin</source> <translation>&amp;Sobre StarCoin</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Mostrar / Amagar</translation> </message> <message> <location line="+9"/> <source>Unlock wallet</source> <translation>Desbloquejar la cartera</translation> </message> <message> <location line="+1"/> <source>&amp;Lock Wallet</source> <translation>&amp;Bloquejar cartera</translation> </message> <message> <location line="+1"/> <source>Lock wallet</source> <translation>Bloquejar cartera</translation> </message> <message> <location line="+35"/> <source>&amp;File</source> <translation>&amp;Arxiu</translation> </message> <message> <location line="+8"/> <source>&amp;Settings</source> <translation>&amp;Configuració</translation> </message> <message> <location line="+8"/> <source>&amp;Help</source> <translation>&amp;Ajuda</translation> </message> <message> <location line="+12"/> <source>Tabs toolbar</source> <translation>Barra d&apos;eines de seccions</translation> </message> <message> <location line="+8"/> <source>Actions toolbar</source> <translation>Barra d&apos;eines d&apos;accions</translation> </message> <message> <location line="+13"/> <location line="+9"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+0"/> <location line="+60"/> <source>StarCoin client</source> <translation>Client StarCoin</translation> </message> <message numerus="yes"> <location line="+75"/> <source>%n active connection(s) to StarCoin network</source> <translation><numerusform>%n conexió activa a la xarxa StarCoin</numerusform><numerusform>%n conexions actives a la xarxa StarCoin</numerusform></translation> </message> <message> <location line="+40"/> <source>Downloaded %1 blocks of transaction history.</source> <translation>Descarregats %1 blocs d&apos;historial de transaccions</translation> </message> <message> <location line="+413"/> <source>Staking.&lt;br&gt;Your weight is %1&lt;br&gt;Network weight is %2&lt;br&gt;Expected time to earn reward is %3</source> <translation>Fent &quot;stake&quot;.&lt;br&gt;El teu pes és %1&lt;br&gt;El pes de la xarxa és %2&lt;br&gt;El temps estimat per a guanyar una recompensa és 3%</translation> </message> <message> <location line="+6"/> <source>Not staking because wallet is locked</source> <translation>No s&apos;està fent &quot;stake&quot; perquè la cartera esa bloquejada</translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is offline</source> <translation>No s&apos;està fent &quot;stake&quot; perquè la cartera està fora de línia</translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is syncing</source> <translation>No s&apos;està fent &quot;stake&quot; perquè la cartera està sincronitzant</translation> </message> <message> <location line="+2"/> <source>Not staking because you don&apos;t have mature coins</source> <translation>No s&apos;està fent &quot;stake&quot; perquè no tens monedes madures</translation> </message> <message numerus="yes"> <location line="-403"/> <source>%n second(s) ago</source> <translation><numerusform>fa %n segon</numerusform><numerusform>fa %n segons</numerusform></translation> </message> <message> <location line="-312"/> <source>About StarCoin card</source> <translation>Sobre la tarjeta StarCoin</translation> </message> <message> <location line="+1"/> <source>Show information about StarCoin card</source> <translation>Mostra informació sobre la tarjeta StarCoin</translation> </message> <message> <location line="+18"/> <source>&amp;Unlock Wallet...</source> <translation>&amp;Desbloquejar cartera</translation> </message> <message numerus="yes"> <location line="+297"/> <source>%n minute(s) ago</source> <translation><numerusform>fa %n minut</numerusform><numerusform>fa %n minuts</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s) ago</source> <translation><numerusform>fa %n hora</numerusform><numerusform>fa %n hores</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s) ago</source> <translation><numerusform>fa %n dia</numerusform><numerusform>fa %n dies</numerusform></translation> </message> <message> <location line="+6"/> <source>Up to date</source> <translation>Al dia</translation> </message> <message> <location line="+7"/> <source>Catching up...</source> <translation>Posar-se al dia ...</translation> </message> <message> <location line="+10"/> <source>Last received block was generated %1.</source> <translation>El darrer bloc rebut s&apos;ha generat %1.</translation> </message> <message> <location line="+59"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Aquesta transacció es troba sobre el límit de mida. Encara pot enviar-la amb una comisió de 1%, aquesta va als nodes que processen la seva transacció i ajuda a mantenir la xarxa. Vol pagar la quota?</translation> </message> <message> <location line="+5"/> <source>Confirm transaction fee</source> <translation>Confirmeu comisió</translation> </message> <message> <location line="+27"/> <source>Sent transaction</source> <translation>Transacció enviada</translation> </message> <message> <location line="+1"/> <source>Incoming transaction</source> <translation>Transacció entrant</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Data: %1\nQuantitat %2\n Tipus: %3\n Adreça: %4\n</translation> </message> <message> <location line="+100"/> <location line="+15"/> <source>URI handling</source> <translation>Manejant URI</translation> </message> <message> <location line="-15"/> <location line="+15"/> <source>URI can not be parsed! This can be caused by an invalid StarCoin address or malformed URI parameters.</source> <translation>l&apos;URI no es pot analitzar! Això pot ser causat per una adreça StarCoin no vàlida o paràmetres URI malformats.</translation> </message> <message> <location line="+18"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;desbloquejat&lt;/b&gt;</translation> </message> <message> <location line="+10"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;bloquejat&lt;/b&gt;</translation> </message> <message> <location line="+25"/> <source>Backup Wallet</source> <translation>Realitzar còpia de seguretat del moneder</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Dades del moneder (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Còpia de seguretat fallida</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Hi ha un error al tractar de salvar les dades de la seva cartera a la nova ubicació.</translation> </message> <message numerus="yes"> <location line="+76"/> <source>%n second(s)</source> <translation><numerusform>%n segon</numerusform><numerusform>%n segons</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n minute(s)</source> <translation><numerusform>%n minut</numerusform><numerusform>%n minuts</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s)</source> <translation><numerusform>%n hora</numerusform><numerusform>%n hores</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n dia</numerusform><numerusform>%n dies</numerusform></translation> </message> <message> <location line="+18"/> <source>Not staking</source> <translation>No s&apos;està fent &quot;stake&quot; </translation> </message> <message> <location filename="../bitcoin.cpp" line="+109"/> <source>A fatal error occurred. StarCoin can no longer continue safely and will quit.</source> <translation>S&apos;ha produït un error fatal. StarCoin ja no pot continuar de forma segura i es tancarà.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+90"/> <source>Network Alert</source> <translation>Alerta de xarxa</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control</source> <translation>Opcions del control de monedes</translation> </message> <message> <location line="+31"/> <source>Quantity:</source> <translation>Quantitat:</translation> </message> <message> <location line="+32"/> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <location line="+48"/> <source>Amount:</source> <translation>Quantitat:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation>Prioritat:</translation> </message> <message> <location line="+48"/> <source>Fee:</source> <translation>Quota:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation>Sortida baixa:</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="+551"/> <source>no</source> <translation>no</translation> </message> <message> <location filename="../forms/coincontroldialog.ui" line="+51"/> <source>After Fee:</source> <translation>Quota posterior:</translation> </message> <message> <location line="+35"/> <source>Change:</source> <translation>Canvi:</translation> </message> <message> <location line="+69"/> <source>(un)select all</source> <translation>(de)seleccionar tot</translation> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation>Mode arbre</translation> </message> <message> <location line="+16"/> <source>List mode</source> <translation>Mode llista</translation> </message> <message> <location line="+45"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+5"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+5"/> <source>Address</source> <translation>Adreça</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation>Confirmacions</translation> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation>Confirmat</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation>Prioritat</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="-515"/> <source>Copy address</source> <translation>Copiar adreça </translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation>Copiar quantitat</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation>Copiar ID de transacció</translation> </message> <message> <location line="+24"/> <source>Copy quantity</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation>Copiar comisió</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Copiar després de comisió</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Copiar bytes</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Copiar prioritat</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>Copiar sortida baixa</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Copiar canvi</translation> </message> <message> <location line="+317"/> <source>highest</source> <translation>El més alt</translation> </message> <message> <location line="+1"/> <source>high</source> <translation>Alt</translation> </message> <message> <location line="+1"/> <source>medium-high</source> <translation>mig-alt</translation> </message> <message> <location line="+1"/> <source>medium</source> <translation>mig</translation> </message> <message> <location line="+4"/> <source>low-medium</source> <translation>baix-mig</translation> </message> <message> <location line="+1"/> <source>low</source> <translation>baix</translation> </message> <message> <location line="+1"/> <source>lowest</source> <translation>el més baix</translation> </message> <message> <location line="+155"/> <source>DUST</source> <translation>POLS</translation> </message> <message> <location line="+0"/> <source>yes</source> <translation>si</translation> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is bigger than 10000 bytes. This means a fee of at least %1 per kb is required. Can vary +/- 1 Byte per input.</source> <translation>Aquesta etiqueta es tornarà vermell, si la mida de la transacció és més gran que 10000 bytes. En aquest cas es requereix una comisió d&apos;almenys el 1% per kb. Pot variar + / - 1 Byte per entrada.</translation> </message> <message> <location line="+1"/> <source>Transactions with higher priority get more likely into a block. This label turns red, if the priority is smaller than &quot;medium&quot;. This means a fee of at least %1 per kb is required.</source> <translation>Les operacions amb més prioritat entren mes facilment a un bloc. Aquesta etiqueta es torna vermella, si la prioritat és menor que &quot;mitja&quot;. En aquest cas es requereix una comisió d&apos;almenys el 1% per kb.</translation> </message> <message> <location line="+1"/> <source>This label turns red, if any recipient receives an amount smaller than %1. This means a fee of at least %2 is required. Amounts below 0.546 times the minimum relay fee are shown as DUST.</source> <translation>Aquesta etiqueta es torna vermella, si qualsevol beneficiari rep una quantitat inferior a 1%. En aquest cas es requereix una comisió d&apos;almenys 2%. Les quantitats inferiors a 0.546 vegades la quota mínima del relé es mostren com a POLS.</translation> </message> <message> <location line="+1"/> <source>This label turns red, if the change is smaller than %1. This means a fee of at least %2 is required.</source> <translation>Aquesta etiqueta es torna vermella, si el canvi és menor que 1%. En aquest cas es requereix una comisió d&apos;almenys 2%.</translation> </message> <message> <location line="+37"/> <location line="+66"/> <source>(no label)</source> <translation>(sense etiqueta)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation>canvi desde %1 (%2)</translation> </message> <message> <location line="+1"/> <source>(change)</source> <translation>(canviar)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Editar Adreça</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Etiqueta</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>L&apos;etiqueta associada amb aquesta entrada de la llibreta d&apos;adreces</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Direcció</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>La direcció associada amb aquesta entrada de la llibreta d&apos;adreces. Només pot ser modificada per a l&apos;enviament d&apos;adreces.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+20"/> <source>New receiving address</source> <translation>Nova adreça de recepció.</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nova adreça d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Editar adreces de recepció</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Editar adreces d&apos;enviament</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>L&apos;adreça introduïda &quot;%1&quot; ja és present a la llibreta d&apos;adreces.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid StarCoin address.</source> <translation>La direcció introduïda &quot;%1&quot; no és una adreça StarCoin vàlida.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>No s&apos;ha pogut desbloquejar el moneder.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Ha fallat la generació d&apos;una nova clau.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+420"/> <location line="+12"/> <source>StarCoin-Qt</source> <translation>StarCoin-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>versió</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>Opcions de la línia d&apos;ordres</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Opcions de IU</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Definir llenguatge, per exemple &quot;de_DE&quot; (per defecte: Preferències locals de sistema)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Iniciar minimitzat</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Mostrar finestra de benvinguda a l&apos;inici (per defecte: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Opcions</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Principal</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source> <translation>Comisió opcional per kB que ajuda a assegurar-se que les seves transaccions es processen ràpidament. La majoria de les transaccions són 1 kB. Comisió d&apos;0.01 recomenada.</translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Pagar &amp;comisió de transacció</translation> </message> <message> <location line="+31"/> <source>Reserved amount does not participate in staking and is therefore spendable at any time.</source> <translation>La quantitat reservada no participa en fer &quot;stake&quot; i per tant es pot gastar en qualsevol moment.</translation> </message> <message> <location line="+15"/> <source>Reserve</source> <translation>Reserva</translation> </message> <message> <location line="+31"/> <source>Automatically start StarCoin after logging in to the system.</source> <translation>Inicia automàticament StarCoin després d&apos;entrar en el sistema.</translation> </message> <message> <location line="+3"/> <source>&amp;Start StarCoin on system login</source> <translation>&amp;Iniciar StarCoin amb l&apos;inici de sessió</translation> </message> <message> <location line="+7"/> <source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source> <translation>Separeu el bloc i les bases de dades d&apos;adreces en apagar l&apos;equip. En aquest cas es pot moure a un altre directori de dades, però alenteix l&apos;apagada. La cartera està sempre separada.</translation> </message> <message> <location line="+3"/> <source>&amp;Detach databases at shutdown</source> <translation>&amp;Separar bases de dades a l&apos;apagar l&apos;equip</translation> </message> <message> <location line="+21"/> <source>&amp;Network</source> <translation>&amp;Xarxa</translation> </message> <message> <location line="+6"/> <source>Automatically open the StarCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Obrir automàticament el port de client StarCoin en el router. Això només funciona quan el router és compatible amb UPnP i està habilitat.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Port obert amb &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the StarCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Connecteu-vos a la xarxa StarCoin través d&apos;un proxy SOCKS (per exemple, quan es connecta a través de Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Conectar a través d&apos;un proxy SOCKS:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP del proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Adreça IP del servidor proxy (per exemple, 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Port:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port del proxy (per exemple 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>&amp;Versió de SOCKS:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Versió SOCKS del proxy (per exemple 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Finestra</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Mostrar només l&apos;icona de la barra al minimitzar l&apos;aplicació.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimitzar a la barra d&apos;aplicacions</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimitza en comptes de sortir de la aplicació al tancar la finestra. Quan aquesta opció està activa, la aplicació només es tancarà al seleccionar Sortir al menú.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimitzar al tancar</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Pantalla</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Llenguatge de la Interfície d&apos;Usuari:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting StarCoin.</source> <translation>L&apos;idioma de la interfície d&apos;usuari es pot configurar aquí. Aquesta configuració s&apos;aplicarà després de reiniciar StarCoin.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Unitats per mostrar les quantitats en:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Sel·lecciona la unitat de subdivisió per defecte per mostrar en la interficie quan s&apos;envien monedes.</translation> </message> <message> <location line="+9"/> <source>Whether to show StarCoin addresses in the transaction list or not.</source> <translation>Per mostrar StarCoin adreces a la llista de transaccions o no.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Mostrar adreces al llistat de transaccions</translation> </message> <message> <location line="+7"/> <source>Whether to show coin control features or not.</source> <translation>Per mostrar les característiques de control de la moneda o no.</translation> </message> <message> <location line="+3"/> <source>Display coin &amp;control features (experts only!)</source> <translation>Mostrar controls i característiques de la moneda (només per a experts!)</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Cancel·la</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Aplicar</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+55"/> <source>default</source> <translation>Per defecte</translation> </message> <message> <location line="+149"/> <location line="+9"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting StarCoin.</source> <translation>Aquesta configuració s&apos;aplicarà després de reiniciar StarCoin.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>L&apos;adreça proxy introduïda és invalida.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+33"/> <location line="+231"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the StarCoin network after a connection is established, but this process has not completed yet.</source> <translation>La informació mostrada pot estar fora de data. La seva cartera es sincronitza automàticament amb la xarxa StarCoin després d&apos;establir una connexió, però aquest procés no s&apos;ha completat encara.</translation> </message> <message> <location line="-160"/> <source>Stake:</source> <translation>En &quot;stake&quot;:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Sense confirmar:</translation> </message> <message> <location line="-107"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+49"/> <source>Spendable:</source> <translation>Pot gastar-se:</translation> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation>El balanç de saldo actual que pot gastar-se</translation> </message> <message> <location line="+71"/> <source>Immature:</source> <translation>Immatur:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Balanç minat que encara no ha madurat</translation> </message> <message> <location line="+20"/> <source>Total:</source> <translation>Total:</translation> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation>El seu balanç total</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Transaccions recents&lt;/b&gt;</translation> </message> <message> <location line="-108"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Total de transaccions que encara no s&apos;han confirmat, i encara no compten per al balanç actual</translation> </message> <message> <location line="-29"/> <source>Total of coins that was staked, and do not yet count toward the current balance</source> <translation>Total de les monedes que s&apos;han posat a fer &quot;stake&quot; (en joc, aposta), i encara no compten per al balanç actual</translation> </message> <message> <location filename="../overviewpage.cpp" line="+113"/> <location line="+1"/> <source>out of sync</source> <translation>Fora de sincronia</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Diàleg de codi QR</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Sol·licitud de pagament</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Quantitat:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Etiqueta:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Missatge:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Desa com ...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Error codificant la URI en un codi QR.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>La quantitat introduïda no és vàlida, comproveu-ho si us plau.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>URI resultant massa llarga, intenta reduir el text per a la etiqueta / missatge</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Desar codi QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Imatges PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Nom del client</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+348"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Versió del client</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informació</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Utilitzant OpenSSL versió</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>&amp;Temps d&apos;inici</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Xarxa</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Nombre de connexions</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>A testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Bloquejar cadena</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Nombre de blocs actuals</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Total estimat de blocs</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Últim temps de bloc</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Obrir</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Opcions de la línia d&apos;ordres</translation> </message> <message> <location line="+7"/> <source>Show the StarCoin-Qt help message to get a list with possible StarCoin command-line options.</source> <translation>Mostra el missatge d&apos;ajuda de StarCoin-Qt per obtenir una llista amb les possibles opcions de línia d&apos;ordres StarCoin.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Mostra</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Consola</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Data de compilació</translation> </message> <message> <location line="-104"/> <source>StarCoin - Debug window</source> <translation>StarCoin - Finestra Depuració</translation> </message> <message> <location line="+25"/> <source>StarCoin Core</source> <translation>Nucli StarCoin</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Dietàri de debug</translation> </message> <message> <location line="+7"/> <source>Open the StarCoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Obriu el fitxer de registre de depuració StarCoin des del directori de dades actual. Això pot trigar uns segons en els arxius de registre de grans dimensions.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Netejar consola</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-33"/> <source>Welcome to the StarCoin RPC console.</source> <translation>Benvingut a la consola RPC de StarCoin.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Utilitza les fletxes d&apos;amunt i avall per navegar per l&apos;històric, i &lt;b&gt;Ctrl-L&lt;\b&gt; per netejar la pantalla.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Escriu &lt;b&gt;help&lt;\b&gt; per a obtenir una llistat de les ordres disponibles.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+182"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Enviar monedes</translation> </message> <message> <location line="+76"/> <source>Coin Control Features</source> <translation>(Opcions del control del Coin)</translation> </message> <message> <location line="+20"/> <source>Inputs...</source> <translation>Entrades</translation> </message> <message> <location line="+7"/> <source>automatically selected</source> <translation>Seleccionat automàticament</translation> </message> <message> <location line="+19"/> <source>Insufficient funds!</source> <translation>Fons insuficient</translation> </message> <message> <location line="+77"/> <source>Quantity:</source> <translation>Quantitat:</translation> </message> <message> <location line="+22"/> <location line="+35"/> <source>0</source> <translation>0</translation> </message> <message> <location line="-19"/> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <location line="+51"/> <source>Amount:</source> <translation>Quantitat:</translation> </message> <message> <location line="+22"/> <location line="+86"/> <location line="+86"/> <location line="+32"/> <source>0.00 hack</source> <translation>123.456 hack {0.00 ?}</translation> </message> <message> <location line="-191"/> <source>Priority:</source> <translation>Prioritat:</translation> </message> <message> <location line="+19"/> <source>medium</source> <translation>mig</translation> </message> <message> <location line="+32"/> <source>Fee:</source> <translation>Quota:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation>Sortida baixa:</translation> </message> <message> <location line="+19"/> <source>no</source> <translation>no</translation> </message> <message> <location line="+32"/> <source>After Fee:</source> <translation>Quota posterior:</translation> </message> <message> <location line="+35"/> <source>Change</source> <translation>Canvi</translation> </message> <message> <location line="+50"/> <source>custom change address</source> <translation>Adreça de canvi pròpia</translation> </message> <message> <location line="+106"/> <source>Send to multiple recipients at once</source> <translation>Enviar a multiples destinataris al mateix temps</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Affegir &amp;Destinatari</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source><|fim▁hole|> <translation>Traieu tots els camps de transacció</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="+28"/> <source>Balance:</source> <translation>Balanç:</translation> </message> <message> <location line="+16"/> <source>123.456 hack</source> <translation>123.456 hack</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Confirmi l&apos;acció d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>E&amp;nviar</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-173"/> <source>Enter a StarCoin address (e.g. StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Introdueix una adreça StarCoin (p.ex. StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+15"/> <source>Copy quantity</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+1"/> <source>Copy fee</source> <translation>Copiar comisió</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Copiar després de comisió</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Copiar bytes</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Copiar prioritat</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>Copiar sortida baixa</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Copiar canvi</translation> </message> <message> <location line="+86"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; a %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Confirmar l&apos;enviament de monedes</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Esteu segur que voleu enviar %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>i</translation> </message> <message> <location line="+29"/> <source>The recipient address is not valid, please recheck.</source> <translation>L&apos;adreça remetent no és vàlida, si us plau comprovi-la.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>La quantitat a pagar ha de ser major que 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Import superi el saldo de la seva compte.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>El total excedeix el teu balanç quan s&apos;afegeix la comisió a la transacció %1.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>S&apos;ha trobat una adreça duplicada, tan sols es pot enviar a cada adreça un cop per ordre de enviament.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed.</source> <translation>Error: La creació de transacció ha fallat.</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha sigut rebutjada. Això pot passar si algunes de les monedes a la cartera ja s&apos;han gastat, per exemple, si vostè utilitza una còpia del wallet.dat i les monedes han estat gastades a la cópia pero no s&apos;han marcat com a gastades aqui.</translation> </message> <message> <location line="+251"/> <source>WARNING: Invalid StarCoin address</source> <translation>ADVERTÈNCIA: Direcció StarCoin invàlida</translation> </message> <message> <location line="+13"/> <source>(no label)</source> <translation>(sense etiqueta)</translation> </message> <message> <location line="+4"/> <source>WARNING: unknown change address</source> <translation>ADVERTÈNCIA: direcció de canvi desconeguda</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Q&amp;uantitat:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Pagar &amp;A:</translation> </message> <message> <location line="+24"/> <location filename="../sendcoinsentry.cpp" line="+25"/> <source>Enter a label for this address to add it to your address book</source> <translation>Introdueixi una etiquera per a aquesta adreça per afegir-la a la llibreta d&apos;adreces</translation> </message> <message> <location line="+9"/> <source>&amp;Label:</source> <translation>&amp;Etiqueta:</translation> </message> <message> <location line="+18"/> <source>The address to send the payment to (e.g. StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>L&apos;adreça per a enviar el pagament (per exemple: StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <source>Choose address from address book</source> <translation>Trieu la direcció de la llibreta d&apos;adreces</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Eliminar aquest destinatari</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a StarCoin address (e.g. StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Introdueix una adreça StarCoin (p.ex. StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Signatures .Signar/Verificar un Missatge</translation> </message> <message> <location line="+13"/> <location line="+124"/> <source>&amp;Sign Message</source> <translation>&amp;Signar Missatge</translation> </message> <message> <location line="-118"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Pots signar missatges amb la teva adreça per provar que són teus. Sigues cautelòs al signar qualsevol cosa, ja que els atacs phising poden intentar confondre&apos;t per a que els hi signis amb la teva identitat. Tan sols signa als documents completament detallats amb els que hi estàs d&apos;acord.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>L&apos;adreça per a signar el missatge (per exemple StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+203"/> <source>Choose an address from the address book</source> <translation>Trieu una adreça de la llibreta d&apos;adreces</translation> </message> <message> <location line="-193"/> <location line="+203"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="-193"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Introdueix aqui el missatge que vols signar</translation> </message> <message> <location line="+24"/> <source>Copy the current signature to the system clipboard</source> <translation>Copiar la signatura actual al porta-retalls del sistema</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this StarCoin address</source> <translation>Signar un missatge per demostrar que és propietari d&apos;aquesta adreça StarCoin</translation> </message> <message> <location line="+17"/> <source>Reset all sign message fields</source> <translation>Neteja tots els camps de clau</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="-87"/> <location line="+70"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="-64"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Introdueixi l&apos;adreça signant, missatge (assegura&apos;t que copies salts de línia, espais, tabuladors, etc excactament tot el text) i la signatura a sota per verificar el missatge. Per evitar ser enganyat per un atac home-entre-mig, vés amb compte de no llegir més en la signatura del que hi ha al missatge signat mateix.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>La direcció que va ser signada amb un missatge (per exemple StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified StarCoin address</source> <translation>Comproveu el missatge per assegurar-se que es va signar amb l&apos;adreça StarCoin especificada.</translation> </message> <message> <location line="+17"/> <source>Reset all verify message fields</source> <translation>Neteja tots els camps de verificació de missatge</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a StarCoin address (e.g. StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Introdueix una adreça StarCoin (p.ex. StarCoinfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Clica &quot;Signar Missatge&quot; per a generar una signatura</translation> </message> <message> <location line="+3"/> <source>Enter StarCoin signature</source> <translation>Introduïu la signatura StarCoin</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>L&apos;adreça intoduïda és invàlida.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Siu us plau, comprovi l&apos;adreça i provi de nou.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>L&apos;adreça introduïda no referencia a cap clau.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>El desbloqueig del moneder ha estat cancelat.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>La clau privada per a la adreça introduïda no està disponible.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>El signat del missatge ha fallat.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Missatge signat.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>La signatura no s&apos;ha pogut decodificar .</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Su us plau, comprovi la signatura i provi de nou.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>La signatura no coincideix amb el resum del missatge.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Ha fallat la verificació del missatge.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Missatge verificat.</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+19"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message numerus="yes"> <location line="-2"/> <source>Open for %n block(s)</source> <translation><numerusform>Obert per a %n bloc</numerusform><numerusform>Obert per a %n blocs</numerusform></translation> </message> <message> <location line="+8"/> <source>conflicted</source> <translation>conflicte</translation> </message> <message> <location line="+2"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/sense confirmar</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 confrimacions</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Estat</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, transmès a través de %n node</numerusform><numerusform>, transmès a través de %n nodes</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Font</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generat</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Des de</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>A</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>Adreça pròpia</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>etiqueta</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Crèdit</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>madura en %n bloc més</numerusform><numerusform>madura en %n blocs més</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>no acceptat</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Dèbit</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Comissió de transacció</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Quantitat neta</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Missatge</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Comentar</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID de transacció</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Les monedes generades han de madurar 510 blocs abans de poder-se gastar. En generar aquest bloc, que va ser transmès a la xarxa per ser afegit a la cadena de bloc. Si no aconsegueix entrar a la cadena, el seu estat canviarà a &quot;no acceptat&quot; i no es podrà gastar. Això pot succeir de tant en tant si un altre node genera un bloc a pocs segons del seu.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Informació de debug</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transacció</translation> </message> <message> <location line="+5"/> <source>Inputs</source> <translation>Entrades</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>cert</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>fals</translation> </message> <message> <location line="-211"/> <source>, has not been successfully broadcast yet</source> <translation>, encara no ha estat emès correctement</translation> </message> <message> <location line="+35"/> <source>unknown</source> <translation>desconegut</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detall de la transacció</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Aquest panell mostra una descripció detallada de la transacció</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+226"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+60"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message> <location line="+12"/> <source>Confirmed (%1 confirmations)</source> <translation>Confirmat (%1 confirmacions)</translation> </message> <message numerus="yes"> <location line="-15"/> <source>Open for %n more block(s)</source> <translation><numerusform>Obert per a %n bloc més</numerusform><numerusform>Obert per a %n blocs més</numerusform></translation> </message> <message> <location line="+6"/> <source>Offline</source> <translation>Desconnectat</translation> </message> <message> <location line="+3"/> <source>Unconfirmed</source> <translation>Sense confirmar</translation> </message> <message> <location line="+3"/> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>Confirmant (%1 de %2 confirmacions recomanat)</translation> </message> <message> <location line="+6"/> <source>Conflicted</source> <translation>Conflicte</translation> </message> <message> <location line="+3"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>Immadurs (%1 confirmacions, estaran disponibles després de %2)</translation> </message> <message> <location line="+3"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Aquest bloc no ha estat rebut per cap altre node i probablement no serà acceptat!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generat però no acceptat</translation> </message> <message> <location line="+42"/> <source>Received with</source> <translation>Rebut amb</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Rebut de</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Pagament a un mateix</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+190"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Estat de la transacció. Desplaça&apos;t per aquí sobre per mostrar el nombre de confirmacions.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Data i hora en que la transacció va ser rebuda.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tipus de transacció.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Adreça del destinatari de la transacció.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Quantitat extreta o afegida del balanç.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+55"/> <location line="+16"/> <source>All</source> <translation>Tot</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Avui</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Aquesta setmana</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Aquest mes</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>El mes passat</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Enguany</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Rang...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Rebut amb</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>A tu mateix</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Altres</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Introdueix una adreça o una etiqueta per cercar</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Quantitat mínima</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Copiar adreça </translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Copiar ID de transacció</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Editar etiqueta</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Mostra detalls de la transacció</translation> </message> <message> <location line="+144"/> <source>Export Transaction Data</source> <translation>Exportació de dades de transaccions</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Confirmat</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Error a l&apos;exportar</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure al fitxer %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Rang:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>a</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+206"/> <source>Sending...</source> <translation>Enviant...</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+33"/> <source>StarCoin version</source> <translation>versió StarCoin</translation> </message> <message> <location line="+1"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="+1"/> <source>Send command to -server or StarCoind</source> <translation>Enviar comandes a -server o StarCoind</translation> </message> <message> <location line="+1"/> <source>List commands</source> <translation>Llista d&apos;ordres</translation> </message> <message> <location line="+1"/> <source>Get help for a command</source> <translation>Obtenir ajuda per a un ordre.</translation> </message> <message> <location line="+2"/> <source>Options:</source> <translation>Opcions:</translation> </message> <message> <location line="+2"/> <source>Specify configuration file (default: StarCoin.conf)</source> <translation>Especifiqueu el fitxer de configuració (per defecte: StarCoin.conf)</translation> </message> <message> <location line="+1"/> <source>Specify pid file (default: StarCoind.pid)</source> <translation>Especificar arxiu pid (per defecte: StarCoind.pid)</translation> </message> <message> <location line="+2"/> <source>Specify wallet file (within data directory)</source> <translation>Especifica un arxiu de moneder (dintre del directori de les dades)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Especificar directori de dades</translation> </message> <message> <location line="+2"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Establir tamany de la memoria cau en megabytes (per defecte: 25)</translation> </message> <message> <location line="+1"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation>Configurar la mida del registre en disc de la base de dades en megabytes (per defecte: 100)</translation> </message> <message> <location line="+6"/> <source>Listen for connections on &lt;port&gt; (default: 15714 or testnet: 25714)</source> <translation>Escoltar connexions en &lt;port&gt; (per defecte: 15714 o testnet: 25714)</translation> </message> <message> <location line="+1"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Mantenir com a molt &lt;n&gt; connexions a peers (per defecte: 125)</translation> </message> <message> <location line="+3"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Connectar al node per obtenir les adreces de les connexions, i desconectar</translation> </message> <message> <location line="+1"/> <source>Specify your own public address</source> <translation>Especificar la teva adreça pública</translation> </message> <message> <location line="+5"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation>Enllaçar a l&apos;adreça donada. Utilitzeu la notació [host]:port per a IPv6</translation> </message> <message> <location line="+2"/> <source>Stake your coins to support network and gain reward (default: 1)</source> <translation>Posa les teves monedes a fer &quot;stake&quot; per donar suport a la xarxa i obtenir una recompensa (per defecte: 1)</translation> </message> <message> <location line="+5"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Límit per a desconectar connexions errònies (per defecte: 100)</translation> </message> <message> <location line="+1"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Nombre de segons abans de reconectar amb connexions errònies (per defecte: 86400)</translation> </message> <message> <location line="-44"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv4: %s</translation> </message> <message> <location line="+51"/> <source>Detach block and address databases. Increases shutdown time (default: 0)</source> <translation>Separeu el bloc i les bases de dades d&apos;adreces. Augmenta el temps d&apos;apagada (per defecte: 0)</translation> </message> <message> <location line="+109"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha sigut rebutjada. Això pot passar si algunes de les monedes a la cartera ja s&apos;han gastat, per exemple, si vostè utilitza una còpia del wallet.dat i les monedes han estat gastades a la cópia pero no s&apos;han marcat com a gastades aqui.</translation> </message> <message> <location line="-5"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source> <translation>Error: Aquesta transacció requereix una comisió d&apos;almenys %s degut a la seva quantitat, complexitat, o l&apos;ús dels fons rebuts recentment</translation> </message> <message> <location line="-87"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 15715 or testnet: 25715)</source> <translation>Escoltar connexions JSON-RPC al port &lt;port&gt; (per defecte: 15715 o testnet: 25715)</translation> </message> <message> <location line="-11"/> <source>Accept command line and JSON-RPC commands</source> <translation>Acceptar línia d&apos;ordres i ordres JSON-RPC </translation> </message> <message> <location line="+101"/> <source>Error: Transaction creation failed </source> <translation>Error: La creació de transacció ha fallat.</translation> </message> <message> <location line="-5"/> <source>Error: Wallet locked, unable to create transaction </source> <translation>Error: Cartera bloquejada, no es pot de crear la transacció</translation> </message> <message> <location line="-8"/> <source>Importing blockchain data file.</source> <translation>Important fitxer de dades de la cadena de blocs</translation> </message> <message> <location line="+1"/> <source>Importing bootstrap blockchain data file.</source> <translation>Important fitxer de dades d&apos;arrencada de la cadena de blocs</translation> </message> <message> <location line="-88"/> <source>Run in the background as a daemon and accept commands</source> <translation>Executar en segon pla com a programa dimoni i acceptar ordres</translation> </message> <message> <location line="+1"/> <source>Use the test network</source> <translation>Usar la xarxa de prova</translation> </message> <message> <location line="-24"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Aceptar connexions d&apos;afora (per defecte: 1 si no -proxy o -connect)</translation> </message> <message> <location line="-38"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv6, retrocedint a IPv4: %s</translation> </message> <message> <location line="+117"/> <source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source> <translation>Error en inicialitzar l&apos;entorn de base de dades %s! Per recuperar, FACI UNA COPIA DE SEGURETAT D&apos;AQUEST DIRECTORI, a continuació, retiri tot d&apos;ella excepte l&apos;arxiu wallet.dat.</translation> </message> <message> <location line="-20"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Establir la grandària màxima de les transaccions alta-prioritat/baixa-comisió en bytes (per defecte: 27000)</translation> </message> <message> <location line="+11"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Advertència: el -paytxfee és molt elevat! Aquesta és la comissió de transacció que pagaràs quan enviis una transacció.</translation> </message> <message> <location line="+61"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong StarCoin will not work properly.</source> <translation>Avís: Comproveu que la data i hora de l&apos;equip siguin correctes! Si el seu rellotge és erroni StarCoin no funcionarà correctament.</translation> </message> <message> <location line="-31"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Advertència: Error llegint l&apos;arxiu wallet.dat!! Totes les claus es llegeixen correctament, però hi ha dades de transaccions o entrades del llibre d&apos;adreces absents o bé son incorrectes.</translation> </message> <message> <location line="-18"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Advertència: L&apos;arxiu wallet.dat és corrupte, dades rescatades! L&apos;arxiu wallet.dat original ha estat desat com wallet.{estampa_temporal}.bak al directori %s; si el teu balanç o transaccions son incorrectes hauries de restaurar-lo de un backup.</translation> </message> <message> <location line="-30"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Intentar recuperar les claus privades d&apos;un arxiu wallet.dat corrupte</translation> </message> <message> <location line="+4"/> <source>Block creation options:</source> <translation>Opcions de la creació de blocs:</translation> </message> <message> <location line="-62"/> <source>Connect only to the specified node(s)</source> <translation>Connectar només al(s) node(s) especificats</translation> </message> <message> <location line="+4"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Descobrir la pròpia adreça IP (per defecte: 1 quan escoltant i no -externalip)</translation> </message> <message> <location line="+94"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Error al escoltar a qualsevol port. Utilitza -listen=0 si vols això.</translation> </message> <message> <location line="-90"/> <source>Find peers using DNS lookup (default: 1)</source> <translation>Trobar companys utilitzant la recerca de DNS (per defecte: 1)</translation> </message> <message> <location line="+5"/> <source>Sync checkpoints policy (default: strict)</source> <translation>Política dels punts de control de sincronització (per defecte: estricta)</translation> </message> <message> <location line="+83"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Adreça -tor invalida: &apos;%s&apos;</translation> </message> <message> <location line="+4"/> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation>Quantitat invalida per a -reservebalance=&lt;amount&gt;</translation> </message> <message> <location line="-82"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Mida màxima del buffer de recepció per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Mida màxima del buffer d&apos;enviament per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="-16"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Només connectar als nodes de la xarxa &lt;net&gt; (IPv4, IPv6 o Tor)</translation> </message> <message> <location line="+28"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Sortida d&apos;informació de depuració extra. Implica totes les opcions de depuracó -debug*</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Sortida d&apos;informació de depuració de xarxa addicional</translation> </message> <message> <location line="+1"/> <source>Prepend debug output with timestamp</source> <translation>Anteposar marca de temps a la sortida de depuració</translation> </message> <message> <location line="+35"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>Opcions SSL: (veure la Wiki de Bitcoin per a instruccions de configuració SSL)</translation> </message> <message> <location line="-74"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Seleccioneu la versió de proxy socks per utilitzar (4-5, per defecte: 5)</translation> </message> <message> <location line="+41"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Enviar informació de traça/debug a la consola en comptes del arxiu debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Enviar informació de traça/depuració al depurador</translation> </message> <message> <location line="+28"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Establir una mida máxima de bloc en bytes (per defecte: 250000)</translation> </message> <message> <location line="-1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Establir una mida mínima de bloc en bytes (per defecte: 0)</translation> </message> <message> <location line="-29"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Reduir l&apos;arxiu debug.log al iniciar el client (per defecte 1 quan no -debug)</translation> </message> <message> <location line="-42"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Especificar el temps limit per a un intent de connexió en milisegons (per defecte: 5000)</translation> </message> <message> <location line="+109"/> <source>Unable to sign checkpoint, wrong checkpointkey? </source> <translation>No es pot signar el punt de control, la clau del punt de control esta malament? </translation> </message> <message> <location line="-80"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 0)</translation> </message> <message> <location line="-1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 1 quan s&apos;escolta)</translation> </message> <message> <location line="-25"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Utilitza proxy per arribar als serveis ocults de Tor (per defecte: la mateixa que -proxy)</translation> </message> <message> <location line="+42"/> <source>Username for JSON-RPC connections</source> <translation>Nom d&apos;usuari per a connexions JSON-RPC</translation> </message> <message> <location line="+47"/> <source>Verifying database integrity...</source> <translation>Comprovant la integritat de la base de dades ...</translation> </message> <message> <location line="+57"/> <source>WARNING: syncronized checkpoint violation detected, but skipped!</source> <translation>ADVERTÈNCIA: violació de punt de control sincronitzat detectada, es saltarà!</translation> </message> <message> <location line="+1"/> <source>Warning: Disk space is low!</source> <translation>Avís: L&apos;espai en disc és baix!</translation> </message> <message> <location line="-2"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Advertència: Aquetsa versió està obsoleta, és necessari actualitzar!</translation> </message> <message> <location line="-48"/> <source>wallet.dat corrupt, salvage failed</source> <translation>L&apos;arxiu wallet.data és corrupte, el rescat de les dades ha fallat</translation> </message> <message> <location line="-54"/> <source>Password for JSON-RPC connections</source> <translation>Contrasenya per a connexions JSON-RPC</translation> </message> <message> <location line="-84"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=StarCoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;StarCoin Alert&quot; [email protected] </source> <translation>%s, ha d&apos;establir un rpcpassword al fitxer de configuració: %s Es recomana utilitzar la següent contrasenya aleatòria: rpcuser=StarCoinrpc rpcpassword=%s (No cal recordar aquesta contrasenya) El nom d&apos;usuari i contrasenya NO HA DE SER el mateix. Si no hi ha l&apos;arxiu, s&apos;ha de crear amb els permisos de només lectura per al propietari. També es recomana establir alertnotify per a que se li notifiquin els problemes; per exemple: alertnotify=echo %%s | mail -s &quot;StarCoin Alert&quot; [email protected] </translation> </message> <message> <location line="+51"/> <source>Find peers using internet relay chat (default: 0)</source> <translation>Trobar companys utilitzant l&apos;IRC (per defecte: 1) {0)?}</translation> </message> <message> <location line="+5"/> <source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source> <translation>Sincronitzar el temps amb altres nodes. Desactivar si el temps al seu sistema és precís, per exemple, si fa ús de sincronització amb NTP (per defecte: 1)</translation> </message> <message> <location line="+15"/> <source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source> <translation>En crear transaccions, ignorar les entrades amb valor inferior a aquesta (per defecte: 0.01)</translation> </message> <message> <location line="+16"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Permetre connexions JSON-RPC d&apos;adreces IP específiques</translation> </message> <message> <location line="+1"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Enviar ordre al node en execució a &lt;ip&gt; (per defecte: 127.0.0.1)</translation> </message> <message> <location line="+1"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Executar orde quan el millor bloc canviï (%s al cmd es reemplaça per un bloc de hash)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Executar una ordre quan una transacció del moneder canviï (%s in cmd es canvia per TxID)</translation> </message> <message> <location line="+3"/> <source>Require a confirmations for change (default: 0)</source> <translation>Requerir les confirmacions de canvi (per defecte: 0)</translation> </message> <message> <location line="+1"/> <source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source> <translation>Fer complir als scripts de transaccions d&apos;utilitzar operadors PUSH canòniques (per defecte: 1)</translation> </message> <message> <location line="+2"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation> Executar una ordre quan es rep un avís rellevant (%s en cmd es substitueix per missatge)</translation> </message> <message> <location line="+3"/> <source>Upgrade wallet to latest format</source> <translation>Actualitzar moneder a l&apos;últim format</translation> </message> <message> <location line="+1"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Establir límit de nombre de claus a &lt;n&gt; (per defecte: 100)</translation> </message> <message> <location line="+1"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Re-escanejar cadena de blocs en cerca de transaccions de moneder perdudes</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 2500, 0 = all)</source> <translation>Quants blocs s&apos;han de confirmar a l&apos;inici (per defecte: 2500, 0 = tots)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation>Com és de minuciosa la verificació del bloc (0-6, per defecte: 1)</translation> </message> <message> <location line="+1"/> <source>Imports blocks from external blk000?.dat file</source> <translation>Importar blocs desde l&apos;arxiu extern blk000?.dat</translation> </message> <message> <location line="+8"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Utilitzar OpenSSL (https) per a connexions JSON-RPC</translation> </message> <message> <location line="+1"/> <source>Server certificate file (default: server.cert)</source> <translation>Arxiu del certificat de servidor (per defecte: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Clau privada del servidor (per defecte: server.pem)</translation> </message> <message> <location line="+1"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Xifres acceptables (per defecte: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+53"/> <source>Error: Wallet unlocked for staking only, unable to create transaction.</source> <translation>Error: Cartera bloquejada nomès per a fer &quot;stake&quot;, no es pot de crear la transacció</translation> </message> <message> <location line="+18"/> <source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source> <translation>ADVERTÈNCIA: Punt de control invàlid! Les transaccions mostrades podríen no ser correctes! Podria ser necessari actualitzar o notificar-ho als desenvolupadors.</translation> </message> <message> <location line="-158"/> <source>This help message</source> <translation>Aquest misatge d&apos;ajuda</translation> </message> <message> <location line="+95"/> <source>Wallet %s resides outside data directory %s.</source> <translation>La cartera %s resideix fora del directori de dades %s.</translation> </message> <message> <location line="+1"/> <source>Cannot obtain a lock on data directory %s. StarCoin is probably already running.</source> <translation>No es pot obtenir un bloqueig en el directori de dades %s. StarCoin probablement ja estigui en funcionament.</translation> </message> <message> <location line="-98"/> <source>StarCoin</source> <translation>StarCoin</translation> </message> <message> <location line="+140"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Impossible d&apos;unir %s a aquest ordinador (s&apos;ha retornat l&apos;error %d, %s)</translation> </message> <message> <location line="-130"/> <source>Connect through socks proxy</source> <translation>Conectar a través d&apos;un proxy SOCKS</translation> </message> <message> <location line="+3"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Permetre consultes DNS per a -addnode, -seednode i -connect</translation> </message> <message> <location line="+122"/> <source>Loading addresses...</source> <translation>Carregant adreces...</translation> </message> <message> <location line="-15"/> <source>Error loading blkindex.dat</source> <translation>Error carregant blkindex.dat</translation> </message> <message> <location line="+2"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Error carregant wallet.dat: Moneder corrupte</translation> </message> <message> <location line="+4"/> <source>Error loading wallet.dat: Wallet requires newer version of StarCoin</source> <translation>Error en carregar wallet.dat: La cartera requereix la versió més recent de StarCoin</translation> </message> <message> <location line="+1"/> <source>Wallet needed to be rewritten: restart StarCoin to complete</source> <translation>La cartera necessita ser reescrita: reiniciar StarCoin per completar</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat</source> <translation>Error carregant wallet.dat</translation> </message> <message> <location line="-16"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Adreça -proxy invalida: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Xarxa desconeguda especificada a -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>S&apos;ha demanat una versió desconeguda de -socks proxy: %i</translation> </message> <message> <location line="+4"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -bind: &apos;%s&apos;</translation> </message> <message> <location line="+2"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -externalip: &apos;%s&apos;</translation> </message> <message> <location line="-24"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Quantitat invalida per a -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Error: could not start node</source> <translation>Error: no s&apos;ha pogut iniciar el node</translation> </message> <message> <location line="+11"/> <source>Sending...</source> <translation>Enviant...</translation> </message> <message> <location line="+5"/> <source>Invalid amount</source> <translation>Quanitat invalida</translation> </message> <message> <location line="+1"/> <source>Insufficient funds</source> <translation>Balanç insuficient</translation> </message> <message> <location line="-34"/> <source>Loading block index...</source> <translation>Carregant índex de blocs...</translation> </message> <message> <location line="-103"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Afegir un node per a connectar&apos;s-hi i intentar mantenir la connexió oberta</translation> </message> <message> <location line="+122"/> <source>Unable to bind to %s on this computer. StarCoin is probably already running.</source> <translation>No es pot enllaçar a %s en aquest equip. StarCoin probablement ja estigui en funcionament.</translation> </message> <message> <location line="-97"/> <source>Fee per KB to add to transactions you send</source> <translation>Comisió per KB per a afegir a les transaccions que enviï</translation> </message> <message> <location line="+55"/> <source>Invalid amount for -mininput=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Quantitat invalida per a -mininput=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+25"/> <source>Loading wallet...</source> <translation>Carregant moneder...</translation> </message> <message> <location line="+8"/> <source>Cannot downgrade wallet</source> <translation>No es pot reduir la versió del moneder</translation> </message> <message> <location line="+1"/> <source>Cannot initialize keypool</source> <translation>No es pot inicialitzar el keypool</translation> </message> <message> <location line="+1"/> <source>Cannot write default address</source> <translation>No es pot escriure l&apos;adreça per defecte</translation> </message> <message> <location line="+1"/> <source>Rescanning...</source> <translation>Re-escanejant...</translation> </message> <message> <location line="+5"/> <source>Done loading</source> <translation>Càrrega acabada</translation> </message> <message> <location line="-167"/> <source>To use the %s option</source> <translation>Utilitza la opció %s</translation> </message> <message> <location line="+14"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="+6"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Has de configurar el rpcpassword=&lt;password&gt; a l&apos;arxiu de configuració:\n %s\n Si l&apos;arxiu no existeix, crea&apos;l amb els permís owner-readable-only.</translation> </message> </context> </TS><|fim▁end|>
<|file_name|>maintenance.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from datetime import date, datetime, timedelta from odoo import api, fields, models, SUPERUSER_ID, _ from odoo.exceptions import UserError from odoo.tools import DEFAULT_SERVER_DATE_FORMAT, DEFAULT_SERVER_DATETIME_FORMAT class MaintenanceStage(models.Model): """ Model for case stages. This models the main stages of a Maintenance Request management flow. """ _name = 'maintenance.stage' _description = 'Maintenance Stage' _order = 'sequence, id' name = fields.Char('Name', required=True, translate=True) sequence = fields.Integer('Sequence', default=20) fold = fields.Boolean('Folded in Maintenance Pipe') done = fields.Boolean('Request Done') class MaintenanceEquipmentCategory(models.Model): _name = 'maintenance.equipment.category' _inherit = ['mail.alias.mixin', 'mail.thread'] _description = 'Asset Category' @api.one @api.depends('equipment_ids') def _compute_fold(self): self.fold = False if self.equipment_count else True name = fields.Char('Category Name', required=True, translate=True) technician_user_id = fields.Many2one('res.users', 'Responsible', track_visibility='onchange', default=lambda self: self.env.uid, oldname='user_id') color = fields.Integer('Color Index') note = fields.Text('Comments', translate=True) equipment_ids = fields.One2many('maintenance.equipment', 'category_id', string='Equipments', copy=False) equipment_count = fields.Integer(string="Equipment", compute='_compute_equipment_count') maintenance_ids = fields.One2many('maintenance.request', 'category_id', copy=False) maintenance_count = fields.Integer(string="Maintenance", compute='_compute_maintenance_count') alias_id = fields.Many2one( 'mail.alias', 'Alias', ondelete='restrict', required=True, help="Email alias for this equipment category. New emails will automatically " "create new maintenance request for this equipment category.") fold = fields.Boolean(string='Folded in Maintenance Pipe', compute='_compute_fold', store=True) @api.multi def _compute_equipment_count(self): equipment_data = self.env['maintenance.equipment'].read_group([('category_id', 'in', self.ids)], ['category_id'], ['category_id']) mapped_data = dict([(m['category_id'][0], m['category_id_count']) for m in equipment_data]) for category in self: category.equipment_count = mapped_data.get(category.id, 0) @api.multi def _compute_maintenance_count(self): maintenance_data = self.env['maintenance.request'].read_group([('category_id', 'in', self.ids)], ['category_id'], ['category_id']) mapped_data = dict([(m['category_id'][0], m['category_id_count']) for m in maintenance_data]) for category in self: category.maintenance_count = mapped_data.get(category.id, 0) @api.model def create(self, vals): self = self.with_context(alias_model_name='maintenance.request', alias_parent_model_name=self._name) if not vals.get('alias_name'): vals['alias_name'] = vals.get('name') category_id = super(MaintenanceEquipmentCategory, self).create(vals) category_id.alias_id.write({'alias_parent_thread_id': category_id.id, 'alias_defaults': {'category_id': category_id.id}}) return category_id @api.multi def unlink(self): MailAlias = self.env['mail.alias'] for category in self: if category.equipment_ids or category.maintenance_ids: raise UserError(_("You cannot delete an equipment category containing equipments or maintenance requests.")) MailAlias += category.alias_id res = super(MaintenanceEquipmentCategory, self).unlink() MailAlias.unlink() return res def get_alias_model_name(self, vals): return vals.get('alias_model', 'maintenance.equipment') def get_alias_values(self): values = super(MaintenanceEquipmentCategory, self).get_alias_values() values['alias_defaults'] = {'category_id': self.id} return values class MaintenanceEquipment(models.Model): _name = 'maintenance.equipment' _inherit = ['mail.thread', 'mail.activity.mixin'] _description = 'Equipment' @api.multi def _track_subtype(self, init_values): self.ensure_one() if 'owner_user_id' in init_values and self.owner_user_id: return 'maintenance.mt_mat_assign' return super(MaintenanceEquipment, self)._track_subtype(init_values) @api.multi def name_get(self): result = [] for record in self: if record.name and record.serial_no: result.append((record.id, record.name + '/' + record.serial_no)) if record.name and not record.serial_no: result.append((record.id, record.name)) return result @api.model def name_search(self, name, args=None, operator='ilike', limit=100): args = args or [] recs = self.browse() if name: recs = self.search([('name', '=', name)] + args, limit=limit) if not recs: recs = self.search([('name', operator, name)] + args, limit=limit) return recs.name_get() name = fields.Char('Equipment Name', required=True, translate=True) active = fields.Boolean(default=True) technician_user_id = fields.Many2one('res.users', string='Technician', track_visibility='onchange', oldname='user_id') owner_user_id = fields.Many2one('res.users', string='Owner', track_visibility='onchange') category_id = fields.Many2one('maintenance.equipment.category', string='Equipment Category', track_visibility='onchange', group_expand='_read_group_category_ids') partner_id = fields.Many2one('res.partner', string='Vendor', domain="[('supplier', '=', 1)]") partner_ref = fields.Char('Vendor Reference') location = fields.Char('Location') model = fields.Char('Model') serial_no = fields.Char('Serial Number', copy=False) assign_date = fields.Date('Assigned Date', track_visibility='onchange') cost = fields.Float('Cost') note = fields.Text('Note') warranty = fields.Date('Warranty') color = fields.Integer('Color Index') scrap_date = fields.Date('Scrap Date') maintenance_ids = fields.One2many('maintenance.request', 'equipment_id') maintenance_count = fields.Integer(compute='_compute_maintenance_count', string="Maintenance", store=True) maintenance_open_count = fields.Integer(compute='_compute_maintenance_count', string="Current Maintenance", store=True) period = fields.Integer('Days between each preventive maintenance') next_action_date = fields.Date(compute='_compute_next_maintenance', string='Date of the next preventive maintenance', store=True) maintenance_team_id = fields.Many2one('maintenance.team', string='Maintenance Team') maintenance_duration = fields.Float(help="Maintenance Duration in hours.") @api.depends('period', 'maintenance_ids.request_date', 'maintenance_ids.close_date') def _compute_next_maintenance(self): date_now = fields.Date.context_today(self) for equipment in self.filtered(lambda x: x.period > 0): next_maintenance_todo = self.env['maintenance.request'].search([ ('equipment_id', '=', equipment.id), ('maintenance_type', '=', 'preventive'), ('stage_id.done', '!=', True), ('close_date', '=', False)], order="request_date asc", limit=1) last_maintenance_done = self.env['maintenance.request'].search([ ('equipment_id', '=', equipment.id), ('maintenance_type', '=', 'preventive'), ('stage_id.done', '=', True), ('close_date', '!=', False)], order="close_date desc", limit=1) if next_maintenance_todo and last_maintenance_done: next_date = next_maintenance_todo.request_date date_gap = fields.Date.from_string(next_maintenance_todo.request_date) - fields.Date.from_string(last_maintenance_done.close_date) # If the gap between the last_maintenance_done and the next_maintenance_todo one is bigger than 2 times the period and next request is in the future # We use 2 times the period to avoid creation too closed request from a manually one created if date_gap > timedelta(0) and date_gap > timedelta(days=equipment.period) * 2 and fields.Date.from_string(next_maintenance_todo.request_date) > fields.Date.from_string(date_now): # If the new date still in the past, we set it for today if fields.Date.from_string(last_maintenance_done.close_date) + timedelta(days=equipment.period) < fields.Date.from_string(date_now): next_date = date_now else: next_date = fields.Date.to_string(fields.Date.from_string(last_maintenance_done.close_date) + timedelta(days=equipment.period)) elif next_maintenance_todo: next_date = next_maintenance_todo.request_date date_gap = fields.Date.from_string(next_maintenance_todo.request_date) - fields.Date.from_string(date_now) # If next maintenance to do is in the future, and in more than 2 times the period, we insert an new request # We use 2 times the period to avoid creation too closed request from a manually one created if date_gap > timedelta(0) and date_gap > timedelta(days=equipment.period) * 2: next_date = fields.Date.to_string(fields.Date.from_string(date_now)+timedelta(days=equipment.period)) elif last_maintenance_done: next_date = fields.Date.from_string(last_maintenance_done.close_date)+timedelta(days=equipment.period) # If when we add the period to the last maintenance done and we still in past, we plan it for today if next_date < fields.Date.from_string(date_now): next_date = date_now else: next_date = fields.Date.to_string(fields.Date.from_string(date_now) + timedelta(days=equipment.period)) equipment.next_action_date = next_date @api.one @api.depends('maintenance_ids.stage_id.done') def _compute_maintenance_count(self): self.maintenance_count = len(self.maintenance_ids) self.maintenance_open_count = len(self.maintenance_ids.filtered(lambda x: not x.stage_id.done)) @api.onchange('category_id') def _onchange_category_id(self): self.technician_user_id = self.category_id.technician_user_id _sql_constraints = [ ('serial_no', 'unique(serial_no)', "Another asset already exists with this serial number!"), ] @api.model def create(self, vals): equipment = super(MaintenanceEquipment, self).create(vals) if equipment.owner_user_id: equipment.message_subscribe_users(user_ids=[equipment.owner_user_id.id]) return equipment @api.multi def write(self, vals): if vals.get('owner_user_id'): self.message_subscribe_users(user_ids=[vals['owner_user_id']]) return super(MaintenanceEquipment, self).write(vals) @api.model def _read_group_category_ids(self, categories, domain, order): """ Read group customization in order to display all the categories in the kanban view, even if they are empty. """ category_ids = categories._search([], order=order, access_rights_uid=SUPERUSER_ID) return categories.browse(category_ids) def _create_new_request(self, date): self.ensure_one() self.env['maintenance.request'].create({ 'name': _('Preventive Maintenance - %s') % self.name, 'request_date': date, 'schedule_date': date, 'category_id': self.category_id.id, 'equipment_id': self.id, 'maintenance_type': 'preventive', 'owner_user_id': self.owner_user_id.id, 'technician_user_id': self.technician_user_id.id, 'maintenance_team_id': self.maintenance_team_id.id, 'duration': self.maintenance_duration, }) @api.model def _cron_generate_requests(self): """ Generates maintenance request on the next_action_date or today if none exists """ for equipment in self.search([('period', '>', 0)]): next_requests = self.env['maintenance.request'].search([('stage_id.done', '=', False), ('equipment_id', '=', equipment.id), ('maintenance_type', '=', 'preventive'), ('request_date', '=', equipment.next_action_date)]) if not next_requests: equipment._create_new_request(equipment.next_action_date) class MaintenanceRequest(models.Model): _name = 'maintenance.request' _inherit = ['mail.thread', 'mail.activity.mixin'] _description = 'Maintenance Requests' _order = "id desc" @api.returns('self') def _default_stage(self): return self.env['maintenance.stage'].search([], limit=1) @api.multi def _track_subtype(self, init_values): self.ensure_one() if 'stage_id' in init_values and self.stage_id.sequence <= 1: return 'maintenance.mt_req_created' elif 'stage_id' in init_values and self.stage_id.sequence > 1: return 'maintenance.mt_req_status' return super(MaintenanceRequest, self)._track_subtype(init_values) def _get_default_team_id(self): return self.env.ref('maintenance.equipment_team_maintenance', raise_if_not_found=False) name = fields.Char('Subjects', required=True) description = fields.Text('Description') request_date = fields.Date('Request Date', track_visibility='onchange', default=fields.Date.context_today, help="Date requested for the maintenance to happen") owner_user_id = fields.Many2one('res.users', string='Created by', default=lambda s: s.env.uid) category_id = fields.Many2one('maintenance.equipment.category', related='equipment_id.category_id', string='Category', store=True, readonly=True) equipment_id = fields.Many2one('maintenance.equipment', string='Equipment', index=True) technician_user_id = fields.Many2one('res.users', string='Owner', track_visibility='onchange', oldname='user_id') stage_id = fields.Many2one('maintenance.stage', string='Stage', track_visibility='onchange', group_expand='_read_group_stage_ids', default=_default_stage) priority = fields.Selection([('0', 'Very Low'), ('1', 'Low'), ('2', 'Normal'), ('3', 'High')], string='Priority') color = fields.Integer('Color Index') close_date = fields.Date('Close Date', help="Date the maintenance was finished. ") kanban_state = fields.Selection([('normal', 'In Progress'), ('blocked', 'Blocked'), ('done', 'Ready for next stage')], string='Kanban State', required=True, default='normal', track_visibility='onchange') # active = fields.Boolean(default=True, help="Set active to false to hide the maintenance request without deleting it.") archive = fields.Boolean(default=False, help="Set archive to true to hide the maintenance request without deleting it.") maintenance_type = fields.Selection([('corrective', 'Corrective'), ('preventive', 'Preventive')], string='Maintenance Type', default="corrective") schedule_date = fields.Datetime('Scheduled Date', help="Date the maintenance team plans the maintenance. It should not differ much from the Request Date. ") maintenance_team_id = fields.Many2one('maintenance.team', string='Team', required=True, default=_get_default_team_id) duration = fields.Float(help="Duration in minutes and seconds.") @api.multi def archive_equipment_request(self): self.write({'archive': True}) @api.multi def reset_equipment_request(self): """ Reinsert the maintenance request into the maintenance pipe in the first stage""" first_stage_obj = self.env['maintenance.stage'].search([], order="sequence asc", limit=1) # self.write({'active': True, 'stage_id': first_stage_obj.id}) self.write({'archive': False, 'stage_id': first_stage_obj.id}) @api.onchange('equipment_id') def onchange_equipment_id(self): if self.equipment_id: self.technician_user_id = self.equipment_id.technician_user_id if self.equipment_id.technician_user_id else self.equipment_id.category_id.technician_user_id self.category_id = self.equipment_id.category_id if self.equipment_id.maintenance_team_id: self.maintenance_team_id = self.equipment_id.maintenance_team_id.id @api.onchange('category_id') def onchange_category_id(self): if not self.technician_user_id or not self.equipment_id or (self.technician_user_id and not self.equipment_id.technician_user_id): self.technician_user_id = self.category_id.technician_user_id @api.model def create(self, vals): # context: no_log, because subtype already handle this self = self.with_context(mail_create_nolog=True) request = super(MaintenanceRequest, self).create(vals) if request.owner_user_id or request.technician_user_id: request._add_followers() if request.equipment_id and not request.maintenance_team_id: request.maintenance_team_id = request.equipment_id.maintenance_team_id return request @api.multi def write(self, vals):<|fim▁hole|> # Overridden to reset the kanban_state to normal whenever # the stage (stage_id) of the Maintenance Request changes. if vals and 'kanban_state' not in vals and 'stage_id' in vals: vals['kanban_state'] = 'normal' res = super(MaintenanceRequest, self).write(vals) if vals.get('owner_user_id') or vals.get('technician_user_id'): self._add_followers() if self.stage_id.done and 'stage_id' in vals: self.write({'close_date': fields.Date.today()}) return res def _add_followers(self): for request in self: user_ids = (request.owner_user_id + request.technician_user_id).ids request.message_subscribe_users(user_ids=user_ids) @api.model def _read_group_stage_ids(self, stages, domain, order): """ Read group customization in order to display all the stages in the kanban view, even if they are empty """ stage_ids = stages._search([], order=order, access_rights_uid=SUPERUSER_ID) return stages.browse(stage_ids) class MaintenanceTeam(models.Model): _name = 'maintenance.team' _description = 'Maintenance Teams' name = fields.Char(required=True) member_ids = fields.Many2many('res.users', 'maintenance_team_users_rel', string="Team Members") color = fields.Integer("Color Index", default=1) request_ids = fields.One2many('maintenance.request', 'maintenance_team_id', copy=False) equipment_ids = fields.One2many('maintenance.equipment', 'maintenance_team_id', copy=False) # For the dashboard only todo_request_ids = fields.One2many('maintenance.request', copy=False, compute='_compute_todo_requests') todo_request_count = fields.Integer(compute='_compute_todo_requests') todo_request_count_date = fields.Integer(compute='_compute_todo_requests') todo_request_count_high_priority = fields.Integer(compute='_compute_todo_requests') todo_request_count_block = fields.Integer(compute='_compute_todo_requests') todo_request_count_unscheduled = fields.Integer(compute='_compute_todo_requests') @api.one @api.depends('request_ids.stage_id.done') def _compute_todo_requests(self): self.todo_request_ids = self.request_ids.filtered(lambda e: e.stage_id.done==False) self.todo_request_count = len(self.todo_request_ids) self.todo_request_count_date = len(self.todo_request_ids.filtered(lambda e: e.schedule_date != False)) self.todo_request_count_high_priority = len(self.todo_request_ids.filtered(lambda e: e.priority == '3')) self.todo_request_count_block = len(self.todo_request_ids.filtered(lambda e: e.kanban_state == 'blocked')) self.todo_request_count_unscheduled = len(self.todo_request_ids.filtered(lambda e: not e.schedule_date)) @api.one @api.depends('equipment_ids') def _compute_equipment(self): self.equipment_count = len(self.equipment_ids)<|fim▁end|>
<|file_name|>test_video.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # pylint: disable=protected-access """Test for Video Xmodule functional logic. These test data read from xml, not from mongo. We have a ModuleStoreTestCase class defined in common/lib/xmodule/xmodule/modulestore/tests/django_utils.py. You can search for usages of this in the cms and lms tests for examples. You use this so that it will do things like point the modulestore setting to mongo, flush the contentstore before and after, load the templates, etc. You can then use the CourseFactory and XModuleItemFactory as defined in common/lib/xmodule/xmodule/modulestore/tests/factories.py to create the course, section, subsection, unit, etc. """ import unittest import datetime from uuid import uuid4 from lxml import etree from mock import ANY, Mock, patch import ddt from django.conf import settings from opaque_keys.edx.locations import SlashSeparatedCourseKey from opaque_keys.edx.keys import CourseKey from xblock.field_data import DictFieldData from xblock.fields import ScopeIds from xmodule.tests import get_test_descriptor_system from xmodule.video_module import VideoDescriptor, create_youtube_string from xmodule.video_module.transcripts_utils import download_youtube_subs, save_to_store from . import LogicTest from .test_import import DummySystem SRT_FILEDATA = ''' 0 00:00:00,270 --> 00:00:02,720 sprechen sie deutsch? 1 00:00:02,720 --> 00:00:05,430 Ja, ich spreche Deutsch ''' CRO_SRT_FILEDATA = ''' 0 00:00:00,270 --> 00:00:02,720 Dobar dan! 1 00:00:02,720 --> 00:00:05,430 Kako ste danas? ''' YOUTUBE_SUBTITLES = ( "LILA FISHER: Hi, welcome to Edx. I'm Lila Fisher, an Edx fellow helping to put together these" " courses. As you know, our courses are entirely online. So before we start learning about the" " subjects that brought you here, let's learn about the tools that you will use to navigate through" " the course material. Let's start with what is on your screen right now. You are watching a video" " of me talking. You have several tools associated with these videos. Some of them are standard" " video buttons, like the play Pause Button on the bottom left. Like most video players, you can see" " how far you are into this particular video segment and how long the entire video segment is." " Something that you might not be used to is the speed option. While you are going through the" " videos, you can speed up or slow down the video player with these buttons. Go ahead and try that" " now. Make me talk faster and slower. If you ever get frustrated by the pace of speech, you can" " adjust it this way. Another great feature is the transcript on the side. This will follow along" " with everything that I am saying as I am saying it, so you can read along if you like. You can" " also click on any of the words, and you will notice that the video jumps to that word. The video" " slider at the bottom of the video will let you navigate through the video quickly. If you ever" " find the transcript distracting, you can toggle the captioning button in order to make it go away" " or reappear. Now that you know about the video player, I want to point out the sequence navigator." " Right now you're in a lecture sequence, which interweaves many videos and practice exercises. You" " can see how far you are in a particular sequence by observing which tab you're on. You can" " navigate directly to any video or exercise by clicking on the appropriate tab. You can also" " progress to the next element by pressing the Arrow button, or by clicking on the next tab. Try" " that now. The tutorial will continue in the next video." ) def instantiate_descriptor(**field_data): """ Instantiate descriptor with most properties. """ system = get_test_descriptor_system() course_key = SlashSeparatedCourseKey('org', 'course', 'run') usage_key = course_key.make_usage_key('video', 'SampleProblem') return system.construct_xblock_from_class( VideoDescriptor, scope_ids=ScopeIds(None, None, usage_key, usage_key), field_data=DictFieldData(field_data), ) # Because of the way xmodule.video_module.video_module imports edxval.api, we # must mock the entire module, which requires making mock exception classes. class _MockValVideoNotFoundError(Exception): """Mock ValVideoNotFoundError exception""" pass class _MockValCannotCreateError(Exception): """Mock ValCannotCreateError exception""" pass class VideoModuleTest(LogicTest): """Logic tests for Video Xmodule.""" descriptor_class = VideoDescriptor raw_field_data = { 'data': '<video />' } def test_parse_youtube(self): """Test parsing old-style Youtube ID strings into a dict.""" youtube_str = '0.75:jNCf2gIqpeE,1.00:ZwkTiUPN0mg,1.25:rsq9auxASqI,1.50:kMyNdzVHHgg' output = VideoDescriptor._parse_youtube(youtube_str) self.assertEqual(output, {'0.75': 'jNCf2gIqpeE', '1.00': 'ZwkTiUPN0mg', '1.25': 'rsq9auxASqI', '1.50': 'kMyNdzVHHgg'}) def test_parse_youtube_one_video(self): """ Ensure that all keys are present and missing speeds map to the empty string. """ youtube_str = '0.75:jNCf2gIqpeE' output = VideoDescriptor._parse_youtube(youtube_str) self.assertEqual(output, {'0.75': 'jNCf2gIqpeE', '1.00': '', '1.25': '', '1.50': ''}) def test_parse_youtube_invalid(self): """Ensure that ids that are invalid return an empty dict""" # invalid id youtube_str = 'thisisaninvalidid' output = VideoDescriptor._parse_youtube(youtube_str) self.assertEqual(output, {'0.75': '', '1.00': '', '1.25': '', '1.50': ''}) # another invalid id youtube_str = ',::,:,,' output = VideoDescriptor._parse_youtube(youtube_str) self.assertEqual(output, {'0.75': '', '1.00': '', '1.25': '', '1.50': ''}) # and another one, partially invalid youtube_str = '0.75_BAD!!!,1.0:AXdE34_U,1.25:KLHF9K_Y,1.5:VO3SxfeD,' output = VideoDescriptor._parse_youtube(youtube_str) self.assertEqual(output, {'0.75': '', '1.00': 'AXdE34_U', '1.25': 'KLHF9K_Y', '1.50': 'VO3SxfeD'}) def test_parse_youtube_key_format(self): """ Make sure that inconsistent speed keys are parsed correctly. """ youtube_str = '1.00:p2Q6BrNhdh8' youtube_str_hack = '1.0:p2Q6BrNhdh8' self.assertEqual( VideoDescriptor._parse_youtube(youtube_str), VideoDescriptor._parse_youtube(youtube_str_hack) ) def test_parse_youtube_empty(self): """ Some courses have empty youtube attributes, so we should handle that well. """ self.assertEqual( VideoDescriptor._parse_youtube(''), {'0.75': '', '1.00': '', '1.25': '', '1.50': ''} ) class VideoDescriptorTestBase(unittest.TestCase): """ Base class for tests for VideoDescriptor """ def setUp(self): super(VideoDescriptorTestBase, self).setUp() self.descriptor = instantiate_descriptor() def assertXmlEqual(self, expected, xml): """ Assert that the given XML fragments have the same attributes, text, and (recursively) children """ def get_child_tags(elem): """Extract the list of tag names for children of elem""" return [child.tag for child in elem] for attr in ['tag', 'attrib', 'text', 'tail']: self.assertEqual(getattr(expected, attr), getattr(xml, attr)) self.assertEqual(get_child_tags(expected), get_child_tags(xml)) for left, right in zip(expected, xml): self.assertXmlEqual(left, right) class TestCreateYoutubeString(VideoDescriptorTestBase): """ Checks that create_youtube_string correcty extracts information from Video descriptor. """ def test_create_youtube_string(self): """ Test that Youtube ID strings are correctly created when writing back out to XML. """ self.descriptor.youtube_id_0_75 = 'izygArpw-Qo' self.descriptor.youtube_id_1_0 = 'p2Q6BrNhdh8' self.descriptor.youtube_id_1_25 = '1EeWXzPdhSA' self.descriptor.youtube_id_1_5 = 'rABDYkeK0x8' expected = "0.75:izygArpw-Qo,1.00:p2Q6BrNhdh8,1.25:1EeWXzPdhSA,1.50:rABDYkeK0x8" self.assertEqual(create_youtube_string(self.descriptor), expected) def test_create_youtube_string_missing(self): """ Test that Youtube IDs which aren't explicitly set aren't included in the output string. """ self.descriptor.youtube_id_0_75 = 'izygArpw-Qo' self.descriptor.youtube_id_1_0 = 'p2Q6BrNhdh8' self.descriptor.youtube_id_1_25 = '1EeWXzPdhSA' expected = "0.75:izygArpw-Qo,1.00:p2Q6BrNhdh8,1.25:1EeWXzPdhSA" self.assertEqual(create_youtube_string(self.descriptor), expected) @ddt.ddt class VideoDescriptorImportTestCase(unittest.TestCase): """ Make sure that VideoDescriptor can import an old XML-based video correctly. """ def assert_attributes_equal(self, video, attrs): """ Assert that `video` has the correct attributes. `attrs` is a map of {metadata_field: value}. """ for key, value in attrs.items(): self.assertEquals(getattr(video, key), value) def test_constructor(self): sample_xml = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" download_track="true" download_video="true" start_time="00:00:01" end_time="00:01:00"> <source src="http://www.example.com/source.mp4"/> <source src="http://www.example.com/source.ogg"/> <track src="http://www.example.com/track"/> <handout src="http://www.example.com/handout"/> <transcript language="ua" src="ukrainian_translation.srt" /> <transcript language="ge" src="german_translation.srt" /> </video> ''' descriptor = instantiate_descriptor(data=sample_xml) self.assert_attributes_equal(descriptor, { 'youtube_id_0_75': 'izygArpw-Qo', 'youtube_id_1_0': 'p2Q6BrNhdh8', 'youtube_id_1_25': '1EeWXzPdhSA', 'youtube_id_1_5': 'rABDYkeK0x8', 'download_video': True, 'show_captions': False, 'start_time': datetime.timedelta(seconds=1), 'end_time': datetime.timedelta(seconds=60), 'track': 'http://www.example.com/track', 'handout': 'http://www.example.com/handout', 'download_track': True, 'html5_sources': ['http://www.example.com/source.mp4', 'http://www.example.com/source.ogg'], 'data': '', 'transcripts': {'ua': 'ukrainian_translation.srt', 'ge': 'german_translation.srt'} }) def test_from_xml(self): module_system = DummySystem(load_error_modules=True) xml_data = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" download_track="false" start_time="00:00:01" download_video="false" end_time="00:01:00"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> <handout src="http://www.example.com/handout"/> <transcript language="uk" src="ukrainian_translation.srt" /> <transcript language="de" src="german_translation.srt" /> </video> ''' output = VideoDescriptor.from_xml(xml_data, module_system, Mock()) self.assert_attributes_equal(output, { 'youtube_id_0_75': 'izygArpw-Qo', 'youtube_id_1_0': 'p2Q6BrNhdh8', 'youtube_id_1_25': '1EeWXzPdhSA', 'youtube_id_1_5': 'rABDYkeK0x8', 'show_captions': False, 'start_time': datetime.timedelta(seconds=1), 'end_time': datetime.timedelta(seconds=60), 'track': 'http://www.example.com/track', 'handout': 'http://www.example.com/handout', 'download_track': False, 'download_video': False, 'html5_sources': ['http://www.example.com/source.mp4'], 'data': '', 'transcripts': {'uk': 'ukrainian_translation.srt', 'de': 'german_translation.srt'}, }) @ddt.data( ('course-v1:test_org+test_course+test_run', '/asset-v1:test_org+test_course+test_run+type@[email protected]'), ('test_org/test_course/test_run', '/c4x/test_org/test_course/asset/test.png') ) @ddt.unpack def test_from_xml_when_handout_is_course_asset(self, course_id_string, expected_handout_link): """ Test that if handout link is course_asset then it will contain targeted course_id in handout link. """ module_system = DummySystem(load_error_modules=True) course_id = CourseKey.from_string(course_id_string) xml_data = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" download_track="false" start_time="00:00:01" download_video="false" end_time="00:01:00"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> <handout src="/asset-v1:test_org_1+test_course_1+test_run_1+type@[email protected]"/> <transcript language="uk" src="ukrainian_translation.srt" /> <transcript language="de" src="german_translation.srt" /> </video> ''' id_generator = Mock() id_generator.target_course_id = course_id output = VideoDescriptor.from_xml(xml_data, module_system, id_generator) self.assert_attributes_equal(output, { 'youtube_id_0_75': 'izygArpw-Qo', 'youtube_id_1_0': 'p2Q6BrNhdh8', 'youtube_id_1_25': '1EeWXzPdhSA', 'youtube_id_1_5': 'rABDYkeK0x8', 'show_captions': False, 'start_time': datetime.timedelta(seconds=1), 'end_time': datetime.timedelta(seconds=60), 'track': 'http://www.example.com/track', 'handout': expected_handout_link, 'download_track': False, 'download_video': False, 'html5_sources': ['http://www.example.com/source.mp4'], 'data': '', 'transcripts': {'uk': 'ukrainian_translation.srt', 'de': 'german_translation.srt'}, }) def test_from_xml_missing_attributes(self): """ Ensure that attributes have the right values if they aren't explicitly set in XML. """ module_system = DummySystem(load_error_modules=True) xml_data = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,1.25:1EeWXzPdhSA" show_captions="true"> <source src="http://www.example.com/source.mp4"/> </video> ''' output = VideoDescriptor.from_xml(xml_data, module_system, Mock()) self.assert_attributes_equal(output, { 'youtube_id_0_75': '', 'youtube_id_1_0': 'p2Q6BrNhdh8', 'youtube_id_1_25': '1EeWXzPdhSA', 'youtube_id_1_5': '', 'show_captions': True, 'start_time': datetime.timedelta(seconds=0.0), 'end_time': datetime.timedelta(seconds=0.0), 'track': '', 'handout': None, 'download_track': False, 'download_video': True, 'html5_sources': ['http://www.example.com/source.mp4'], 'data': '' }) def test_from_xml_missing_download_track(self): """ Ensure that attributes have the right values if they aren't explicitly set in XML. """ module_system = DummySystem(load_error_modules=True) xml_data = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,1.25:1EeWXzPdhSA" show_captions="true"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> </video> ''' output = VideoDescriptor.from_xml(xml_data, module_system, Mock()) self.assert_attributes_equal(output, { 'youtube_id_0_75': '', 'youtube_id_1_0': 'p2Q6BrNhdh8', 'youtube_id_1_25': '1EeWXzPdhSA', 'youtube_id_1_5': '', 'show_captions': True, 'start_time': datetime.timedelta(seconds=0.0), 'end_time': datetime.timedelta(seconds=0.0), 'track': 'http://www.example.com/track', 'download_track': True, 'download_video': True, 'html5_sources': ['http://www.example.com/source.mp4'], 'data': '', 'transcripts': {}, }) def test_from_xml_no_attributes(self): """ Make sure settings are correct if none are explicitly set in XML. """ module_system = DummySystem(load_error_modules=True) xml_data = '<video></video>' output = VideoDescriptor.from_xml(xml_data, module_system, Mock()) self.assert_attributes_equal(output, { 'youtube_id_0_75': '', 'youtube_id_1_0': '3_yD_cEKoCk', 'youtube_id_1_25': '', 'youtube_id_1_5': '', 'show_captions': True, 'start_time': datetime.timedelta(seconds=0.0), 'end_time': datetime.timedelta(seconds=0.0), 'track': '', 'handout': None, 'download_track': False, 'download_video': False, 'html5_sources': [], 'data': '', 'transcripts': {}, }) def test_from_xml_double_quotes(self): """ Make sure we can handle the double-quoted string format (which was used for exporting for a few weeks). """ module_system = DummySystem(load_error_modules=True) xml_data = ''' <video display_name="&quot;display_name&quot;" html5_sources="[&quot;source_1&quot;, &quot;source_2&quot;]" show_captions="false" download_video="true" sub="&quot;html5_subtitles&quot;" track="&quot;http://www.example.com/track&quot;" handout="&quot;http://www.example.com/handout&quot;" download_track="true" youtube_id_0_75="&quot;OEoXaMPEzf65&quot;" youtube_id_1_25="&quot;OEoXaMPEzf125&quot;" youtube_id_1_5="&quot;OEoXaMPEzf15&quot;" youtube_id_1_0="&quot;OEoXaMPEzf10&quot;" /> ''' output = VideoDescriptor.from_xml(xml_data, module_system, Mock()) self.assert_attributes_equal(output, { 'youtube_id_0_75': 'OEoXaMPEzf65', 'youtube_id_1_0': 'OEoXaMPEzf10', 'youtube_id_1_25': 'OEoXaMPEzf125', 'youtube_id_1_5': 'OEoXaMPEzf15', 'show_captions': False, 'start_time': datetime.timedelta(seconds=0.0), 'end_time': datetime.timedelta(seconds=0.0), 'track': 'http://www.example.com/track', 'handout': 'http://www.example.com/handout', 'download_track': True, 'download_video': True, 'html5_sources': ["source_1", "source_2"], 'data': '' }) def test_from_xml_double_quote_concatenated_youtube(self): module_system = DummySystem(load_error_modules=True) xml_data = ''' <video display_name="Test Video" youtube="1.0:&quot;p2Q6BrNhdh8&quot;,1.25:&quot;1EeWXzPdhSA&quot;"> </video> ''' output = VideoDescriptor.from_xml(xml_data, module_system, Mock()) self.assert_attributes_equal(output, { 'youtube_id_0_75': '', 'youtube_id_1_0': 'p2Q6BrNhdh8', 'youtube_id_1_25': '1EeWXzPdhSA', 'youtube_id_1_5': '', 'show_captions': True, 'start_time': datetime.timedelta(seconds=0.0), 'end_time': datetime.timedelta(seconds=0.0), 'track': '', 'handout': None, 'download_track': False, 'download_video': False, 'html5_sources': [], 'data': '' }) def test_old_video_format(self): """ Test backwards compatibility with VideoModule's XML format. """ module_system = DummySystem(load_error_modules=True) xml_data = """ <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" source="http://www.example.com/source.mp4" from="00:00:01" to="00:01:00"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> </video> """ output = VideoDescriptor.from_xml(xml_data, module_system, Mock()) self.assert_attributes_equal(output, { 'youtube_id_0_75': 'izygArpw-Qo', 'youtube_id_1_0': 'p2Q6BrNhdh8', 'youtube_id_1_25': '1EeWXzPdhSA', 'youtube_id_1_5': 'rABDYkeK0x8', 'show_captions': False, 'start_time': datetime.timedelta(seconds=1), 'end_time': datetime.timedelta(seconds=60), 'track': 'http://www.example.com/track', # 'download_track': True, 'html5_sources': ['http://www.example.com/source.mp4'], 'data': '', }) def test_old_video_data(self): """ Ensure that Video is able to read VideoModule's model data. """ module_system = DummySystem(load_error_modules=True) xml_data = """ <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" from="00:00:01" to="00:01:00"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> </video> """ video = VideoDescriptor.from_xml(xml_data, module_system, Mock()) self.assert_attributes_equal(video, { 'youtube_id_0_75': 'izygArpw-Qo', 'youtube_id_1_0': 'p2Q6BrNhdh8', 'youtube_id_1_25': '1EeWXzPdhSA', 'youtube_id_1_5': 'rABDYkeK0x8', 'show_captions': False, 'start_time': datetime.timedelta(seconds=1), 'end_time': datetime.timedelta(seconds=60), 'track': 'http://www.example.com/track', # 'download_track': True, 'html5_sources': ['http://www.example.com/source.mp4'], 'data': '' }) def test_import_with_float_times(self): """ Ensure that Video is able to read VideoModule's model data. """ module_system = DummySystem(load_error_modules=True) xml_data = """ <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" from="1.0" to="60.0"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> </video> """ video = VideoDescriptor.from_xml(xml_data, module_system, Mock()) self.assert_attributes_equal(video, { 'youtube_id_0_75': 'izygArpw-Qo', 'youtube_id_1_0': 'p2Q6BrNhdh8', 'youtube_id_1_25': '1EeWXzPdhSA', 'youtube_id_1_5': 'rABDYkeK0x8', 'show_captions': False, 'start_time': datetime.timedelta(seconds=1), 'end_time': datetime.timedelta(seconds=60), 'track': 'http://www.example.com/track', # 'download_track': True, 'html5_sources': ['http://www.example.com/source.mp4'], 'data': '' }) @patch('xmodule.video_module.video_module.edxval_api') def test_import_val_data(self, mock_val_api): def mock_val_import(xml, edx_video_id, course_id): """Mock edxval.api.import_from_xml""" self.assertEqual(xml.tag, 'video_asset') self.assertEqual(dict(xml.items()), {'mock_attr': ''}) self.assertEqual(edx_video_id, 'test_edx_video_id') self.assertEqual(course_id, 'test_course_id') mock_val_api.import_from_xml = Mock(wraps=mock_val_import) module_system = DummySystem(load_error_modules=True) # import new edx_video_id xml_data = """ <video edx_video_id="test_edx_video_id"> <video_asset mock_attr=""/> </video> """ id_generator = Mock() id_generator.target_course_id = 'test_course_id' video = VideoDescriptor.from_xml(xml_data, module_system, id_generator) self.assert_attributes_equal(video, {'edx_video_id': 'test_edx_video_id'}) mock_val_api.import_from_xml.assert_called_once_with(ANY, 'test_edx_video_id', course_id='test_course_id') @patch('xmodule.video_module.video_module.edxval_api') def test_import_val_data_invalid(self, mock_val_api): mock_val_api.ValCannotCreateError = _MockValCannotCreateError mock_val_api.import_from_xml = Mock(side_effect=mock_val_api.ValCannotCreateError) module_system = DummySystem(load_error_modules=True) # Negative duration is invalid xml_data = """ <video edx_video_id="test_edx_video_id"> <video_asset client_video_id="test_client_video_id" duration="-1"/> </video> """ with self.assertRaises(mock_val_api.ValCannotCreateError): VideoDescriptor.from_xml(xml_data, module_system, id_generator=Mock()) class VideoExportTestCase(VideoDescriptorTestBase): """ Make sure that VideoDescriptor can export itself to XML correctly. """ @patch('xmodule.video_module.video_module.edxval_api') def test_export_to_xml(self, mock_val_api): """ Test that we write the correct XML on export. """ def mock_val_export(edx_video_id): """Mock edxval.api.export_to_xml""" return etree.Element( 'video_asset', attrib={'export_edx_video_id': edx_video_id} ) mock_val_api.export_to_xml = mock_val_export self.descriptor.youtube_id_0_75 = 'izygArpw-Qo' self.descriptor.youtube_id_1_0 = 'p2Q6BrNhdh8' self.descriptor.youtube_id_1_25 = '1EeWXzPdhSA' self.descriptor.youtube_id_1_5 = 'rABDYkeK0x8' self.descriptor.show_captions = False self.descriptor.start_time = datetime.timedelta(seconds=1.0) self.descriptor.end_time = datetime.timedelta(seconds=60) self.descriptor.track = 'http://www.example.com/track' self.descriptor.handout = 'http://www.example.com/handout' self.descriptor.download_track = True self.descriptor.html5_sources = ['http://www.example.com/source.mp4', 'http://www.example.com/source.ogg'] self.descriptor.download_video = True self.descriptor.transcripts = {'ua': 'ukrainian_translation.srt', 'ge': 'german_translation.srt'} self.descriptor.edx_video_id = 'test_edx_video_id' xml = self.descriptor.definition_to_xml(None) # We don't use the `resource_fs` parameter parser = etree.XMLParser(remove_blank_text=True) xml_string = '''\ <video url_name="SampleProblem" start_time="0:00:01" youtube="0.75:izygArpw-Qo,1.00:p2Q6BrNhdh8,1.25:1EeWXzPdhSA,1.50:rABDYkeK0x8" show_captions="false" end_time="0:01:00" download_video="true" download_track="true"> <source src="http://www.example.com/source.mp4"/> <source src="http://www.example.com/source.ogg"/> <track src="http://www.example.com/track"/> <handout src="http://www.example.com/handout"/> <transcript language="ge" src="german_translation.srt" /> <transcript language="ua" src="ukrainian_translation.srt" /> <video_asset export_edx_video_id="test_edx_video_id"/> </video> ''' expected = etree.XML(xml_string, parser=parser) self.assertXmlEqual(expected, xml) @patch('xmodule.video_module.video_module.edxval_api') def test_export_to_xml_val_error(self, mock_val_api): # Export should succeed without VAL data if video does not exist mock_val_api.ValVideoNotFoundError = _MockValVideoNotFoundError mock_val_api.export_to_xml = Mock(side_effect=mock_val_api.ValVideoNotFoundError) self.descriptor.edx_video_id = 'test_edx_video_id' xml = self.descriptor.definition_to_xml(None) parser = etree.XMLParser(remove_blank_text=True) xml_string = '<video url_name="SampleProblem" download_video="false"/>' expected = etree.XML(xml_string, parser=parser) self.assertXmlEqual(expected, xml) def test_export_to_xml_empty_end_time(self): """ Test that we write the correct XML on export. """ self.descriptor.youtube_id_0_75 = 'izygArpw-Qo' self.descriptor.youtube_id_1_0 = 'p2Q6BrNhdh8' self.descriptor.youtube_id_1_25 = '1EeWXzPdhSA' self.descriptor.youtube_id_1_5 = 'rABDYkeK0x8' self.descriptor.show_captions = False self.descriptor.start_time = datetime.timedelta(seconds=5.0) self.descriptor.end_time = datetime.timedelta(seconds=0.0) self.descriptor.track = 'http://www.example.com/track' self.descriptor.download_track = True self.descriptor.html5_sources = ['http://www.example.com/source.mp4', 'http://www.example.com/source.ogg'] self.descriptor.download_video = True xml = self.descriptor.definition_to_xml(None) # We don't use the `resource_fs` parameter parser = etree.XMLParser(remove_blank_text=True) xml_string = '''\ <video url_name="SampleProblem" start_time="0:00:05" youtube="0.75:izygArpw-Qo,1.00:p2Q6BrNhdh8,1.25:1EeWXzPdhSA,1.50:rABDYkeK0x8" show_captions="false" download_video="true" download_track="true"> <source src="http://www.example.com/source.mp4"/> <source src="http://www.example.com/source.ogg"/> <track src="http://www.example.com/track"/> </video> ''' expected = etree.XML(xml_string, parser=parser) self.assertXmlEqual(expected, xml) def test_export_to_xml_empty_parameters(self): """ Test XML export with defaults. """ xml = self.descriptor.definition_to_xml(None) # Check that download_video field is also set to default (False) in xml for backward compatibility expected = '<video url_name="SampleProblem" download_video="false"/>\n' self.assertEquals(expected, etree.tostring(xml, pretty_print=True)) def test_export_to_xml_with_transcripts_as_none(self): """ Test XML export with transcripts being overridden to None. """ self.descriptor.transcripts = None xml = self.descriptor.definition_to_xml(None) expected = '<video url_name="SampleProblem" download_video="false"/>\n' self.assertEquals(expected, etree.tostring(xml, pretty_print=True)) def test_export_to_xml_invalid_characters_in_attributes(self): """ Test XML export will raise TypeError by lxml library if contains illegal characters. """ self.descriptor.display_name = '\x1e' with self.assertRaises(ValueError): self.descriptor.definition_to_xml(None) def test_export_to_xml_unicode_characters(self): """ Test XML export handles the unicode characters. """ self.descriptor.display_name = '这是文' xml = self.descriptor.definition_to_xml(None) self.assertEqual(xml.get('display_name'), u'\u8fd9\u662f\u6587') class VideoDescriptorIndexingTestCase(unittest.TestCase): """ Make sure that VideoDescriptor can format data for indexing as expected. """ def setUp(self): """ Overrides YOUTUBE and CONTENTSTORE settings """ super(VideoDescriptorIndexingTestCase, self).setUp() self.youtube_setting = getattr(settings, "YOUTUBE", None) self.contentstore_setting = getattr(settings, "CONTENTSTORE", None) settings.YOUTUBE = { # YouTube JavaScript API 'API': 'www.youtube.com/iframe_api', # URL to get YouTube metadata 'METADATA_URL': 'www.googleapis.com/youtube/v3/videos/', # Current youtube api for requesting transcripts. # For example: http://video.google.com/timedtext?lang=en&v=j_jEn79vS3g. 'TEXT_API': { 'url': 'video.google.com/timedtext', 'params': { 'lang': 'en', 'v': 'set_youtube_id_of_11_symbols_here', }, }, } settings.CONTENTSTORE = { 'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore', 'DOC_STORE_CONFIG': { 'host': 'localhost', 'db': 'test_xcontent_%s' % uuid4().hex, }, # allow for additional options that can be keyed on a name, e.g. 'trashcan' 'ADDITIONAL_OPTIONS': { 'trashcan': { 'bucket': 'trash_fs' } } } self.addCleanup(self.cleanup) def cleanup(self): """ Returns YOUTUBE and CONTENTSTORE settings to a default value """ if self.youtube_setting: settings.YOUTUBE = self.youtube_setting self.youtube_setting = None else: del settings.YOUTUBE if self.contentstore_setting: settings.CONTENTSTORE = self.contentstore_setting self.contentstore_setting = None else: del settings.CONTENTSTORE def test_video_with_no_subs_index_dictionary(self): """ Test index dictionary of a video module without subtitles. """ xml_data = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" download_track="false" start_time="00:00:01" download_video="false" end_time="00:01:00"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> <handout src="http://www.example.com/handout"/> </video> ''' descriptor = instantiate_descriptor(data=xml_data) self.assertEqual(descriptor.index_dictionary(), { "content": {"display_name": "Test Video"}, "content_type": "Video" }) def test_video_with_youtube_subs_index_dictionary(self): """ Test index dictionary of a video module with YouTube subtitles. """ xml_data_sub = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" download_track="false" sub="OEoXaMPEzfM" start_time="00:00:01" download_video="false" end_time="00:01:00"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> <handout src="http://www.example.com/handout"/> </video> ''' descriptor = instantiate_descriptor(data=xml_data_sub) download_youtube_subs('OEoXaMPEzfM', descriptor, settings) self.assertEqual(descriptor.index_dictionary(), { "content": { "display_name": "Test Video", "transcript_en": YOUTUBE_SUBTITLES }, "content_type": "Video" }) def test_video_with_subs_and_transcript_index_dictionary(self): """ Test index dictionary of a video module with YouTube subtitles and German transcript uploaded by a user. """ xml_data_sub_transcript = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" download_track="false" sub="OEoXaMPEzfM" start_time="00:00:01" download_video="false" end_time="00:01:00"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> <handout src="http://www.example.com/handout"/> <transcript language="ge" src="subs_grmtran1.srt" /> </video> ''' descriptor = instantiate_descriptor(data=xml_data_sub_transcript) download_youtube_subs('OEoXaMPEzfM', descriptor, settings) save_to_store(SRT_FILEDATA, "subs_grmtran1.srt", 'text/srt', descriptor.location) self.assertEqual(descriptor.index_dictionary(), { "content": { "display_name": "Test Video", "transcript_en": YOUTUBE_SUBTITLES, "transcript_ge": "sprechen sie deutsch? Ja, ich spreche Deutsch", }, "content_type": "Video" }) def test_video_with_multiple_transcripts_index_dictionary(self): """ Test index dictionary of a video module with two transcripts uploaded by a user. """ xml_data_transcripts = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" download_track="false" start_time="00:00:01" download_video="false" end_time="00:01:00"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> <handout src="http://www.example.com/handout"/> <transcript language="ge" src="subs_grmtran1.srt" /> <transcript language="hr" src="subs_croatian1.srt" /> </video> '''<|fim▁hole|> descriptor = instantiate_descriptor(data=xml_data_transcripts) save_to_store(SRT_FILEDATA, "subs_grmtran1.srt", 'text/srt', descriptor.location) save_to_store(CRO_SRT_FILEDATA, "subs_croatian1.srt", 'text/srt', descriptor.location) self.assertEqual(descriptor.index_dictionary(), { "content": { "display_name": "Test Video", "transcript_ge": "sprechen sie deutsch? Ja, ich spreche Deutsch", "transcript_hr": "Dobar dan! Kako ste danas?" }, "content_type": "Video" }) def test_video_with_multiple_transcripts_translation_retrieval(self): """ Test translation retrieval of a video module with multiple transcripts uploaded by a user. """ xml_data_transcripts = ''' <video display_name="Test Video" youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8" show_captions="false" download_track="false" start_time="00:00:01" download_video="false" end_time="00:01:00"> <source src="http://www.example.com/source.mp4"/> <track src="http://www.example.com/track"/> <handout src="http://www.example.com/handout"/> <transcript language="ge" src="subs_grmtran1.srt" /> <transcript language="hr" src="subs_croatian1.srt" /> </video> ''' descriptor = instantiate_descriptor(data=xml_data_transcripts) translations = descriptor.available_translations(descriptor.get_transcripts_info(), verify_assets=False) self.assertEqual(translations, ['hr', 'ge']) def test_video_with_no_transcripts_translation_retrieval(self): """ Test translation retrieval of a video module with no transcripts uploaded by a user- ie, that retrieval does not throw an exception. """ descriptor = instantiate_descriptor(data=None) translations = descriptor.available_translations(descriptor.get_transcripts_info(), verify_assets=False) self.assertEqual(translations, ['en'])<|fim▁end|>
<|file_name|>mydict2.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # 文档测试 class Dict(dict): ''' Simple dict but also support access as x.y style >>> d1 = Dict() >>> d1['x'] = 100 >>> d1.x 100 >>> d1.y = 200 >>> d1['y'] 200 >>> d2 = Dict(a=1, b=2, c='3') >>> d2.c '3' >>> d2['empty'] Traceback (most recent call last): ... KeyError: 'empty' >>> d2.empty Traceback (most recent call last): ... AttributeError: 'Dict' object has no attribute 'empty' ''' def __init__(self, **kw): super().__init__(**kw) def __getattr__(self, key): try: return self[key] except KeyError: raise AttributeError(r"'Dict' object has no attribute '%s'" % key) def __setattr__(self, key, value): self[key] = value <|fim▁hole|>if __name__ == '__main__': import doctest doctest.testmod()<|fim▁end|>
<|file_name|>webinterface_handler_wsgi.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ## This file is part of Invenio. ## Copyright (C) 2009, 2010, 2011, 2012 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """mod_python->WSGI Framework""" import sys import os import re import cgi import inspect from fnmatch import fnmatch from urlparse import urlparse, urlunparse from wsgiref.validate import validator from wsgiref.util import FileWrapper, guess_scheme if __name__ != "__main__": # Chances are that we are inside mod_wsgi. ## You can't write to stdout in mod_wsgi, but some of our ## dependecies do this! (e.g. 4Suite) sys.stdout = sys.stderr from invenio.session import get_session from invenio.webinterface_handler import CFG_HAS_HTTPS_SUPPORT, CFG_FULL_HTTPS from invenio.webinterface_layout import invenio_handler from invenio.webinterface_handler_wsgi_utils import table, FieldStorage from invenio.webinterface_handler_config import \ HTTP_STATUS_MAP, SERVER_RETURN, OK, DONE, \ HTTP_NOT_FOUND, HTTP_INTERNAL_SERVER_ERROR from invenio.config import CFG_WEBDIR, CFG_SITE_LANG, \ CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST, CFG_DEVEL_SITE, CFG_SITE_URL, \ CFG_SITE_SECURE_URL, CFG_WEBSTYLE_REVERSE_PROXY_IPS from invenio.errorlib import register_exception, get_pretty_traceback ## Static files are usually handled directly by the webserver (e.g. Apache) ## However in case WSGI is required to handle static files too (such ## as when running wsgiref simple server), then this flag can be ## turned on (it is done automatically by wsgi_handler_test). CFG_WSGI_SERVE_STATIC_FILES = False ## Magic regexp to search for usage of CFG_SITE_URL within src/href or ## any src usage of an external website _RE_HTTPS_REPLACES = re.compile(r"\b((?:src\s*=|url\s*\()\s*[\"']?)http\://", re.I) ## Regexp to verify that the IP starts with a number (filter cases where 'unknown') ## It is faster to verify only the start (585 ns) compared with verifying ## the whole ip address - re.compile('^\d+\.\d+\.\d+\.\d+$') (1.01 µs) _RE_IPADDRESS_START = re.compile("^\d+\.") def _http_replace_func(match): ## src external_site -> CFG_SITE_SECURE_URL/sslredirect/external_site return match.group(1) + CFG_SITE_SECURE_URL + '/sslredirect/' _ESCAPED_CFG_SITE_URL = cgi.escape(CFG_SITE_URL, True) _ESCAPED_CFG_SITE_SECURE_URL = cgi.escape(CFG_SITE_SECURE_URL, True) def https_replace(html): html = html.replace(_ESCAPED_CFG_SITE_URL, _ESCAPED_CFG_SITE_SECURE_URL) return _RE_HTTPS_REPLACES.sub(_http_replace_func, html) class InputProcessed(object): """ Auxiliary class used when reading input. @see: <http://www.wsgi.org/wsgi/Specifications/handling_post_forms>. """ def read(self, *args): raise EOFError('The wsgi.input stream has already been consumed') readline = readlines = __iter__ = read class SimulatedModPythonRequest(object): """ mod_python like request object. Minimum and cleaned implementation to make moving out of mod_python easy. @see: <http://www.modpython.org/live/current/doc-html/pyapi-mprequest.html> """ def __init__(self, environ, start_response): self.__environ = environ self.__start_response = start_response self.__response_sent_p = False self.__buffer = '' self.__low_level_headers = [] self.__headers = table(self.__low_level_headers) self.__headers.add = self.__headers.add_header self.__status = "200 OK" self.__filename = None self.__disposition_type = None self.__bytes_sent = 0 self.__allowed_methods = [] self.__cleanups = [] self.headers_out = self.__headers ## See: <http://www.python.org/dev/peps/pep-0333/#the-write-callable> self.__write = None self.__write_error = False self.__errors = environ['wsgi.errors'] self.__headers_in = table([]) self.__tainted = False self.__is_https = int(guess_scheme(self.__environ) == 'https') self.__replace_https = False self.track_writings = False self.__what_was_written = "" for key, value in environ.iteritems(): if key.startswith('HTTP_'): self.__headers_in[key[len('HTTP_'):].replace('_', '-')] = value if environ.get('CONTENT_LENGTH'): self.__headers_in['content-length'] = environ['CONTENT_LENGTH'] if environ.get('CONTENT_TYPE'): self.__headers_in['content-type'] = environ['CONTENT_TYPE'] def get_wsgi_environ(self): return self.__environ def get_post_form(self): self.__tainted = True post_form = self.__environ.get('wsgi.post_form') input = self.__environ['wsgi.input'] if (post_form is not None and post_form[0] is input): return post_form[2] # This must be done to avoid a bug in cgi.FieldStorage self.__environ.setdefault('QUERY_STRING', '') ## Video handler hack: uri = self.__environ['PATH_INFO'] if uri.endswith("upload_video"): tmp_shared = True else: tmp_shared = False fs = FieldStorage(self, keep_blank_values=1, to_tmp_shared=tmp_shared) if fs.wsgi_input_consumed: new_input = InputProcessed() post_form = (new_input, input, fs) self.__environ['wsgi.post_form'] = post_form self.__environ['wsgi.input'] = new_input else: post_form = (input, None, fs) self.__environ['wsgi.post_form'] = post_form return fs def get_response_sent_p(self): return self.__response_sent_p def get_low_level_headers(self): return self.__low_level_headers def get_buffer(self): return self.__buffer def write(self, string, flush=1): if isinstance(string, unicode): self.__buffer += string.encode('utf8') else: self.__buffer += string if flush: self.flush() def flush(self): self.send_http_header() if self.__buffer: self.__bytes_sent += len(self.__buffer) try: if not self.__write_error: if self.__replace_https: self.__write(https_replace(self.__buffer)) else: self.__write(self.__buffer) if self.track_writings: if self.__replace_https: self.__what_was_written += https_replace(self.__buffer) else: self.__what_was_written += self.__buffer except IOError, err: if "failed to write data" in str(err) or "client connection closed" in str(err): ## Let's just log this exception without alerting the admin: register_exception(req=self) self.__write_error = True ## This flag is there just ## to not report later other errors to the admin. else: raise self.__buffer = '' def set_content_type(self, content_type): self.__headers['content-type'] = content_type if self.__is_https: if content_type.startswith("text/html") or content_type.startswith("application/rss+xml"): self.__replace_https = True def get_content_type(self): return self.__headers['content-type'] def send_http_header(self): if not self.__response_sent_p: self.__tainted = True if self.__allowed_methods and self.__status.startswith('405 ') or self.__status.startswith('501 '): self.__headers['Allow'] = ', '.join(self.__allowed_methods) ## See: <http://www.python.org/dev/peps/pep-0333/#the-write-callable> #print self.__low_level_headers self.__write = self.__start_response(self.__status, self.__low_level_headers) self.__response_sent_p = True #print "Response sent: %s" % self.__headers def get_unparsed_uri(self): return '?'.join([self.__environ['PATH_INFO'], self.__environ['QUERY_STRING']]) def get_uri(self): return self.__environ['PATH_INFO'] def get_headers_in(self): return self.__headers_in def get_subprocess_env(self): return self.__environ def add_common_vars(self): pass def get_args(self): return self.__environ['QUERY_STRING'] def get_remote_ip(self): if 'X-FORWARDED-FOR' in self.__headers_in and \ self.__headers_in.get('X-FORWARDED-SERVER', '') == \ self.__headers_in.get('X-FORWARDED-HOST', '') == \ urlparse(CFG_SITE_URL)[1]: # we are using proxy setup if self.__environ.get('REMOTE_ADDR') in CFG_WEBSTYLE_REVERSE_PROXY_IPS: # we trust this proxy ip_list = self.__headers_in['X-FORWARDED-FOR'].split(',') for ip in ip_list: if _RE_IPADDRESS_START.match(ip): return ip # no IP has the correct format, return a default IP return '10.0.0.10' else: # we don't trust this proxy register_exception(prefix="You are running in a proxy configuration, but the " + \ "CFG_WEBSTYLE_REVERSE_PROXY_IPS variable does not contain " + \ "the IP of your proxy, thus the remote IP addresses of your " + \ "clients are not trusted. Please configure this variable.", alert_admin=True) return '10.0.0.11' return self.__environ.get('REMOTE_ADDR') def get_remote_host(self): return self.__environ.get('REMOTE_HOST') def get_header_only(self): return self.__environ['REQUEST_METHOD'] == 'HEAD' def set_status(self, status): self.__status = '%s %s' % (status, HTTP_STATUS_MAP.get(int(status), 'Explanation not available')) def get_status(self): return int(self.__status.split(' ')[0]) def get_wsgi_status(self): return self.__status def sendfile(self, path, offset=0, the_len=-1): try: self.send_http_header() file_to_send = open(path) file_to_send.seek(offset) file_wrapper = FileWrapper(file_to_send) count = 0 if the_len < 0: for chunk in file_wrapper: count += len(chunk) self.__bytes_sent += len(chunk) self.__write(chunk) else: for chunk in file_wrapper: if the_len >= len(chunk): the_len -= len(chunk) count += len(chunk) self.__bytes_sent += len(chunk) self.__write(chunk) else: count += the_len self.__bytes_sent += the_len self.__write(chunk[:the_len]) break except IOError, err: if "failed to write data" in str(err) or "client connection closed" in str(err): ## Let's just log this exception without alerting the admin: register_exception(req=self) else: raise return self.__bytes_sent def set_content_length(self, content_length): if content_length is not None: self.__headers['content-length'] = str(content_length) else: del self.__headers['content-length'] def is_https(self): return self.__is_https def get_method(self): return self.__environ['REQUEST_METHOD'] def get_hostname(self): return self.__environ.get('HTTP_HOST', '') def set_filename(self, filename): self.__filename = filename if self.__disposition_type is None: self.__disposition_type = 'inline' self.__headers['content-disposition'] = '%s; filename=%s' % (self.__disposition_type, self.__filename) def set_encoding(self, encoding): if encoding: self.__headers['content-encoding'] = str(encoding) else: del self.__headers['content-encoding'] def get_bytes_sent(self): return self.__bytes_sent def log_error(self, message): self.__errors.write(message.strip() + '\n') def get_content_type_set_p(self): return bool(self.__headers['content-type']) def allow_methods(self, methods, reset=0): if reset: self.__allowed_methods = [] self.__allowed_methods += [method.upper().strip() for method in methods] def get_allowed_methods(self): return self.__allowed_methods def readline(self, hint=None): try: return self.__environ['wsgi.input'].readline(hint) except TypeError: ## the hint param is not part of wsgi pep, although ## it's great to exploit it in when reading FORM ## with large files, in order to avoid filling up the memory ## Too bad it's not there :-( return self.__environ['wsgi.input'].readline() def readlines(self, hint=None): return self.__environ['wsgi.input'].readlines(hint) def read(self, hint=None): return self.__environ['wsgi.input'].read(hint) def register_cleanup(self, callback, data=None): self.__cleanups.append((callback, data)) def get_cleanups(self): return self.__cleanups def get_referer(self): return self.headers_in.get('referer') def get_what_was_written(self): return self.__what_was_written def __str__(self): from pprint import pformat out = "" for key in dir(self): try: if not callable(getattr(self, key)) and not key.startswith("_SimulatedModPythonRequest") and not key.startswith('__'): out += 'req.%s: %s\n' % (key, pformat(getattr(self, key))) except: pass return out def get_original_wsgi_environment(self): """ Return the original WSGI environment used to initialize this request object. @return: environ, start_response @raise AssertionError: in case the environment has been altered, i.e. either the input has been consumed or something has already been written to the output. """ assert not self.__tainted, "The original WSGI environment is tainted since at least req.write or req.form has been used." return self.__environ, self.__start_response content_type = property(get_content_type, set_content_type) unparsed_uri = property(get_unparsed_uri) uri = property(get_uri) headers_in = property(get_headers_in) subprocess_env = property(get_subprocess_env) args = property(get_args) header_only = property(get_header_only) status = property(get_status, set_status) method = property(get_method) hostname = property(get_hostname) filename = property(fset=set_filename) encoding = property(fset=set_encoding) bytes_sent = property(get_bytes_sent) content_type_set_p = property(get_content_type_set_p) allowed_methods = property(get_allowed_methods) response_sent_p = property(get_response_sent_p) form = property(get_post_form) remote_ip = property(get_remote_ip) remote_host = property(get_remote_host) referer = property(get_referer) what_was_written = property(get_what_was_written) def alert_admin_for_server_status_p(status, referer): """ Check the configuration variable CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST to see if the exception should be registered and the admin should be alerted. """ status = str(status) for pattern in CFG_WEBSTYLE_HTTP_STATUS_ALERT_LIST: pattern = pattern.lower() must_have_referer = False if pattern.endswith('r'): ## e.g. "404 r" must_have_referer = True pattern = pattern[:-1].strip() ## -> "404" if fnmatch(status, pattern) and (not must_have_referer or referer): return True return False def application(environ, start_response): """ Entry point for wsgi. """ ## Needed for mod_wsgi, see: <http://code.google.com/p/modwsgi/wiki/ApplicationIssues> req = SimulatedModPythonRequest(environ, start_response) #print 'Starting mod_python simulation' try: try: possible_module, possible_handler = is_mp_legacy_publisher_path(environ['PATH_INFO']) if possible_module is not None: mp_legacy_publisher(req, possible_module, possible_handler) elif CFG_WSGI_SERVE_STATIC_FILES: possible_static_path = is_static_path(environ['PATH_INFO']) if possible_static_path is not None: from invenio.bibdocfile import stream_file stream_file(req, possible_static_path) else: ret = invenio_handler(req) else: ret = invenio_handler(req) req.flush() except SERVER_RETURN, status: status = int(str(status)) if status not in (OK, DONE): req.status = status req.headers_out['content-type'] = 'text/html' admin_to_be_alerted = alert_admin_for_server_status_p(status, req.headers_in.get('referer')) if admin_to_be_alerted: register_exception(req=req, alert_admin=True) if not req.response_sent_p: start_response(req.get_wsgi_status(), req.get_low_level_headers(), sys.exc_info()) return generate_error_page(req, admin_to_be_alerted) else: req.flush() except: register_exception(req=req, alert_admin=True) if not req.response_sent_p: req.status = HTTP_INTERNAL_SERVER_ERROR req.headers_out['content-type'] = 'text/html' start_response(req.get_wsgi_status(), req.get_low_level_headers(), sys.exc_info()) if CFG_DEVEL_SITE: return ["<pre>%s</pre>" % cgi.escape(get_pretty_traceback(req=req, exc_info=sys.exc_info()))] from cgitb import html return [html(sys.exc_info())] return generate_error_page(req) else: return generate_error_page(req, page_already_started=True) finally: for (callback, data) in req.get_cleanups(): callback(data) return [] def generate_error_page(req, admin_was_alerted=True, page_already_started=False): """ Returns an iterable with the error page to be sent to the user browser. """ from invenio.webpage import page from invenio import template webstyle_templates = template.load('webstyle') ln = req.form.get('ln', CFG_SITE_LANG) if page_already_started: return [webstyle_templates.tmpl_error_page(status=req.get_wsgi_status(), ln=ln, admin_was_alerted=admin_was_alerted)] else: return [page(title=req.get_wsgi_status(), body=webstyle_templates.tmpl_error_page(status=req.get_wsgi_status(), ln=ln, admin_was_alerted=admin_was_alerted), language=ln, req=req)] def is_static_path(path): """ Returns True if path corresponds to an exsting file under CFG_WEBDIR. @param path: the path. @type path: string @return: True if path corresponds to an exsting file under CFG_WEBDIR. @rtype: bool """ path = os.path.abspath(CFG_WEBDIR + path)<|fim▁hole|> def is_mp_legacy_publisher_path(path): """ Checks path corresponds to an exsting Python file under CFG_WEBDIR. @param path: the path. @type path: string @return: the path of the module to load and the function to call there. @rtype: tuple """ path = path.split('/') for index, component in enumerate(path): if component.endswith('.py'): possible_module = os.path.abspath(CFG_WEBDIR + os.path.sep + os.path.sep.join(path[:index + 1])) possible_handler = '/'.join(path[index + 1:]).strip() if possible_handler.startswith('_'): return None, None if not possible_handler: possible_handler = 'index' if os.path.exists(possible_module) and possible_module.startswith(CFG_WEBDIR): return (possible_module, possible_handler) else: return None, None def mp_legacy_publisher(req, possible_module, possible_handler): """ mod_python legacy publisher minimum implementation. """ the_module = open(possible_module).read() module_globals = {} exec(the_module, module_globals) if possible_handler in module_globals and callable(module_globals[possible_handler]): from invenio.webinterface_handler import _check_result ## req is the required first parameter of any handler expected_args = list(inspect.getargspec(module_globals[possible_handler])[0]) if not expected_args or 'req' != expected_args[0]: ## req was not the first argument. Too bad! raise SERVER_RETURN, HTTP_NOT_FOUND ## the req.form must be casted to dict because of Python 2.4 and earlier ## otherwise any object exposing the mapping interface can be ## used with the magic ** form = dict(req.form) for key, value in form.items(): ## FIXME: this is a backward compatibility workaround ## because most of the old administration web handler ## expect parameters to be of type str. ## When legacy publisher will be removed all this ## pain will go away anyway :-) if isinstance(value, str): form[key] = str(value) else: ## NOTE: this is a workaround for e.g. legacy webupload ## that is still using legacy publisher and expect to ## have a file (Field) instance instead of a string. form[key] = value if (CFG_FULL_HTTPS or CFG_HAS_HTTPS_SUPPORT and get_session(req).need_https) and not req.is_https(): from invenio.urlutils import redirect_to_url # We need to isolate the part of the URI that is after # CFG_SITE_URL, and append that to our CFG_SITE_SECURE_URL. original_parts = urlparse(req.unparsed_uri) plain_prefix_parts = urlparse(CFG_SITE_URL) secure_prefix_parts = urlparse(CFG_SITE_SECURE_URL) # Compute the new path plain_path = original_parts[2] plain_path = secure_prefix_parts[2] + \ plain_path[len(plain_prefix_parts[2]):] # ...and recompose the complete URL final_parts = list(secure_prefix_parts) final_parts[2] = plain_path final_parts[-3:] = original_parts[-3:] target = urlunparse(final_parts) redirect_to_url(req, target) try: return _check_result(req, module_globals[possible_handler](req, **form)) except TypeError, err: if ("%s() got an unexpected keyword argument" % possible_handler) in str(err) or ('%s() takes at least' % possible_handler) in str(err): inspected_args = inspect.getargspec(module_globals[possible_handler]) expected_args = list(inspected_args[0]) expected_defaults = list(inspected_args[3]) expected_args.reverse() expected_defaults.reverse() register_exception(req=req, prefix="Wrong GET parameter set in calling a legacy publisher handler for %s: expected_args=%s, found_args=%s" % (possible_handler, repr(expected_args), repr(req.form.keys())), alert_admin=CFG_DEVEL_SITE) cleaned_form = {} for index, arg in enumerate(expected_args): if arg == 'req': continue if index < len(expected_defaults): cleaned_form[arg] = form.get(arg, expected_defaults[index]) else: cleaned_form[arg] = form.get(arg, None) return _check_result(req, module_globals[possible_handler](req, **cleaned_form)) else: raise else: raise SERVER_RETURN, HTTP_NOT_FOUND def check_wsgiref_testing_feasability(): """ In order to use wsgiref for running Invenio, CFG_SITE_URL and CFG_SITE_SECURE_URL must not use HTTPS because SSL is not supported. """ if CFG_SITE_URL.lower().startswith('https'): print >> sys.stderr, """ ERROR: SSL is not supported by the wsgiref simple server implementation. Please set CFG_SITE_URL not to start with "https". Currently CFG_SITE_URL is set to: "%s".""" % CFG_SITE_URL sys.exit(1) if CFG_SITE_SECURE_URL.lower().startswith('https'): print >> sys.stderr, """ ERROR: SSL is not supported by the wsgiref simple server implementation. Please set CFG_SITE_SECURE_URL not to start with "https". Currently CFG_SITE_SECURE_URL is set to: "%s".""" % CFG_SITE_SECURE_URL sys.exit(1) def wsgi_handler_test(port=80): """ Simple WSGI testing environment based on wsgiref. """ from wsgiref.simple_server import make_server global CFG_WSGI_SERVE_STATIC_FILES CFG_WSGI_SERVE_STATIC_FILES = True check_wsgiref_testing_feasability() validator_app = validator(application) httpd = make_server('', port, validator_app) print "Serving on port %s..." % port httpd.serve_forever() def main(): from optparse import OptionParser parser = OptionParser() parser.add_option('-t', '--test', action='store_true', dest='test', default=False, help="Run a WSGI test server via wsgiref (not using Apache).") parser.add_option('-p', '--port', type='int', dest='port', default='80', help="The port where the WSGI test server will listen. [80]") (options, args) = parser.parse_args() if options.test: wsgi_handler_test(options.port) else: parser.print_help() if __name__ == "__main__": main()<|fim▁end|>
if path.startswith(CFG_WEBDIR) and os.path.isfile(path): return path return None
<|file_name|>ex14_word_query.cpp<|end_file_name|><|fim▁begin|>// // Stroustrup - Programming Principles & Practice // // Chapter 21 Exercise 14 // // Write a word query program that provides information about the text in a // document. Use the text cleaner from ex13 to produce input. // #include <iostream> #include <stdexcept> #include <string> #include <map> #include <vector> int count_word(const std::map<std::string,int>& msi, const std::string& w) // return the number of times word 'w' is used in msi { auto it = msi.find(w); if (it == msi.end()) return 0; return it->second; } std::string most_freq(const std::map<std::string,int>& msi) { auto freq = msi.begin(); for (auto it = msi.begin(); it != msi.end(); ++it) if (it->second > freq->second) freq = it; return freq->first; } std::string longest_word(const std::map<std::string,int>& msi) { auto longest = msi.begin(); for (auto it = msi.begin(); it != msi.end(); ++it) if (it->first.size() > longest->first.size()) longest = it; return longest->first; } std::string shortest_word(const std::map<std::string,int>& msi) { auto shortest = msi.begin(); for (auto it = msi.begin(); it != msi.end(); ++it) if (it->first.size() < shortest->first.size()) shortest = it; return shortest->first; } std::vector<std::string> search_starts(const std::map<std::string,int>& msi, const std::string& sub) { std::vector<std::string> vs; for (auto it = msi.begin(); it != msi.end(); ++it) if (std::equal(sub.begin(), sub.end(), it->first.begin())) vs.push_back(it->first); <|fim▁hole|> return vs; } std::vector<std::string> search_size(const std::map<std::string,int>& msi, int n) { std::vector<std::string> vs; for (auto it = msi.begin(); it != msi.end(); ++it) if (it->first.size() == n) vs.push_back(it->first); return vs; } // forward declaration? see if this works std::map<std::string,int> get_word_data(const std::string&); // it does! ex 13 was compiled with 14! int main() try { const std::string fname = "./input_story.txt"; std::map<std::string,int> word_data = get_word_data(fname); std::cout << "'toddler' was used " << count_word(word_data, "toddler") << " times\n"; std::cout << "the most frequently used word was " << most_freq(word_data) << '\n'; std::cout << "the longest word was: " << longest_word(word_data) << '\n'; std::cout << "the shortest word was: " << shortest_word(word_data) << '\n'; std::vector<std::string> starts_f = search_starts(word_data, "f"); std::cout << "All of the 'f' words were:\n"; for (const auto& a : starts_f) std::cout << a << ' '; std::cout << '\n'; std::vector<std::string> fours = search_size(word_data, 4); std::cout << "there were " << fours.size() << " four-letter words\n"; for (const auto& a : fours) std::cout << a << ' '; std::cout << '\n'; } catch(std::exception& e) { std::cerr << "Exception: " << e.what() << '\n'; return 1; } catch(...) { std::cerr << "Unknown exception\n"; return 2; }<|fim▁end|>
<|file_name|>testing_public_spec.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {CompilerConfig, ResourceLoader} from '@angular/compiler'; import {Compiler, Component, ComponentFactoryResolver, CUSTOM_ELEMENTS_SCHEMA, Directive, Inject, Injectable, InjectionToken, Injector, Input, NgModule, Optional, Pipe, SkipSelf, ɵstringify as stringify} from '@angular/core'; import {fakeAsync, getTestBed, inject, TestBed, tick, waitForAsync, withModule} from '@angular/core/testing'; import {expect} from '@angular/platform-browser/testing/src/matchers'; import {ivyEnabled, modifiedInIvy, obsoleteInIvy, onlyInIvy} from '@angular/private/testing'; // Services, and components for the tests. @Component({selector: 'child-comp', template: `<span>Original {{childBinding}}</span>`}) @Injectable() class ChildComp { childBinding: string; constructor() { this.childBinding = 'Child'; } } @Component({selector: 'child-comp', template: `<span>Mock</span>`}) @Injectable() class MockChildComp { } @Component({ selector: 'parent-comp', template: `Parent(<child-comp></child-comp>)`, }) @Injectable() class ParentComp { } @Component({selector: 'my-if-comp', template: `MyIf(<span *ngIf="showMore">More</span>)`}) @Injectable() class MyIfComp { showMore: boolean = false; } @Component({selector: 'child-child-comp', template: `<span>ChildChild</span>`}) @Injectable() class ChildChildComp { } @Component({ selector: 'child-comp', template: `<span>Original {{childBinding}}(<child-child-comp></child-child-comp>)</span>`, }) @Injectable() class ChildWithChildComp { childBinding: string; constructor() { this.childBinding = 'Child'; } } class FancyService { value: string = 'real value'; getAsyncValue() { return Promise.resolve('async value'); } getTimeoutValue() { return new Promise<string>((resolve, reject) => setTimeout(() => resolve('timeout value'), 10)); } } class MockFancyService extends FancyService { override value: string = 'mocked out value'; } @Component({ selector: 'my-service-comp', providers: [FancyService], template: `injected value: {{fancyService.value}}` }) class TestProvidersComp { constructor(private fancyService: FancyService) {} } @Component({ selector: 'my-service-comp', viewProviders: [FancyService], template: `injected value: {{fancyService.value}}` }) class TestViewProvidersComp { constructor(private fancyService: FancyService) {} } @Directive({selector: '[someDir]', host: {'[title]': 'someDir'}}) class SomeDirective { // TODO(issue/24571): remove '!'. @Input() someDir!: string; } @Pipe({name: 'somePipe'}) class SomePipe { transform(value: string) { return `transformed ${value}`; } } @Component({selector: 'comp', template: `<div [someDir]="'someValue' | somePipe"></div>`}) class CompUsingModuleDirectiveAndPipe { } @NgModule() class SomeLibModule { } @Component({ selector: 'comp', templateUrl: '/base/angular/packages/platform-browser/test/static_assets/test.html' }) class CompWithUrlTemplate { } const aTok = new InjectionToken<string>('a'); const bTok = new InjectionToken<string>('b'); { describe('public testing API', () => { describe('using the async helper with context passing', () => { type TestContext = {actuallyDone: boolean}; beforeEach(function(this: TestContext) { this.actuallyDone = false; }); afterEach(function(this: TestContext) { expect(this.actuallyDone).toEqual(true); }); it('should run normal tests', function(this: TestContext) { this.actuallyDone = true; }); it('should run normal async tests', function(this: TestContext, done) { setTimeout(() => { this.actuallyDone = true; done(); }, 0); }); it('should run async tests with tasks', waitForAsync(function(this: TestContext) { setTimeout(() => this.actuallyDone = true, 0); })); it('should run async tests with promises', waitForAsync(function(this: TestContext) { const p = new Promise((resolve, reject) => setTimeout(resolve, 10)); p.then(() => this.actuallyDone = true); })); }); describe('basic context passing to inject, fakeAsync and withModule helpers', () => { const moduleConfig = { providers: [FancyService], }; type TestContext = {contextModified: boolean}; beforeEach(function(this: TestContext) { this.contextModified = false; }); afterEach(function(this: TestContext) { expect(this.contextModified).toEqual(true); }); it('should pass context to inject helper', inject([], function(this: TestContext) { this.contextModified = true; })); it('should pass context to fakeAsync helper', fakeAsync(function(this: TestContext) { this.contextModified = true; })); it('should pass context to withModule helper - simple', withModule(moduleConfig, function(this: TestContext) { this.contextModified = true; })); it('should pass context to withModule helper - advanced', withModule(moduleConfig) .inject([FancyService], function(this: TestContext, service: FancyService) { expect(service.value).toBe('real value'); this.contextModified = true; })); it('should preserve context when async and inject helpers are combined', waitForAsync(inject([], function(this: TestContext) { setTimeout(() => this.contextModified = true, 0); }))); it('should preserve context when fakeAsync and inject helpers are combined', fakeAsync(inject([], function(this: TestContext) { setTimeout(() => this.contextModified = true, 0); tick(1); }))); }); describe('using the test injector with the inject helper', () => { describe('setting up Providers', () => { beforeEach(() => { TestBed.configureTestingModule( {providers: [{provide: FancyService, useValue: new FancyService()}]}); it('should use set up providers', inject([FancyService], (service: FancyService) => { expect(service.value).toEqual('real value'); })); it('should wait until returned promises', waitForAsync(inject([FancyService], (service: FancyService) => { service.getAsyncValue().then((value) => expect(value).toEqual('async value')); service.getTimeoutValue().then((value) => expect(value).toEqual('timeout value')); }))); it('should allow the use of fakeAsync', fakeAsync(inject([FancyService], (service: FancyService) => { let value: string = undefined!; service.getAsyncValue().then((val) => value = val); tick(); expect(value).toEqual('async value'); }))); it('should allow use of "done"', (done) => { inject([FancyService], (service: FancyService) => { let count = 0; const id = setInterval(() => { count++; if (count > 2) { clearInterval(id); done(); } }, 5); })(); // inject needs to be invoked explicitly with (). }); describe('using beforeEach', () => { beforeEach(inject([FancyService], (service: FancyService) => { service.value = 'value modified in beforeEach'; })); it('should use modified providers', inject([FancyService], (service: FancyService) => { expect(service.value).toEqual('value modified in beforeEach'); })); }); describe('using async beforeEach', () => { beforeEach(waitForAsync(inject([FancyService], (service: FancyService) => { service.getAsyncValue().then((value) => service.value = value); }))); it('should use asynchronously modified value', inject([FancyService], (service: FancyService) => { expect(service.value).toEqual('async value'); })); }); }); }); }); describe('using the test injector with modules', () => { const moduleConfig = { providers: [FancyService], imports: [SomeLibModule], declarations: [SomeDirective, SomePipe, CompUsingModuleDirectiveAndPipe], }; describe('setting up a module', () => { beforeEach(() => TestBed.configureTestingModule(moduleConfig)); it('should use set up providers', inject([FancyService], (service: FancyService) => { expect(service.value).toEqual('real value'); })); it('should be able to create any declared components', () => { const compFixture = TestBed.createComponent(CompUsingModuleDirectiveAndPipe); expect(compFixture.componentInstance).toBeAnInstanceOf(CompUsingModuleDirectiveAndPipe); }); it('should use set up directives and pipes', () => { const compFixture = TestBed.createComponent(CompUsingModuleDirectiveAndPipe); const el = compFixture.debugElement; compFixture.detectChanges(); expect(el.children[0].properties['title']).toBe('transformed someValue'); }); it('should use set up imported modules', inject([SomeLibModule], (libModule: SomeLibModule) => { expect(libModule).toBeAnInstanceOf(SomeLibModule); })); describe('provided schemas', () => { @Component({template: '<some-element [someUnknownProp]="true"></some-element>'}) class ComponentUsingInvalidProperty { } beforeEach(() => { TestBed.configureTestingModule( {schemas: [CUSTOM_ELEMENTS_SCHEMA], declarations: [ComponentUsingInvalidProperty]}); }); it('should not error on unknown bound properties on custom elements when using the CUSTOM_ELEMENTS_SCHEMA', () => { expect(TestBed.createComponent(ComponentUsingInvalidProperty).componentInstance) .toBeAnInstanceOf(ComponentUsingInvalidProperty); }); }); }); describe('per test modules', () => { it('should use set up providers', withModule(moduleConfig).inject([FancyService], (service: FancyService) => { expect(service.value).toEqual('real value'); })); it('should use set up directives and pipes', withModule(moduleConfig, () => { const compFixture = TestBed.createComponent(CompUsingModuleDirectiveAndPipe); const el = compFixture.debugElement; compFixture.detectChanges(); expect(el.children[0].properties['title']).toBe('transformed someValue'); })); it('should use set up library modules', withModule(moduleConfig).inject([SomeLibModule], (libModule: SomeLibModule) => { expect(libModule).toBeAnInstanceOf(SomeLibModule); })); }); describe('components with template url', () => { beforeEach(waitForAsync(async () => { TestBed.configureTestingModule({declarations: [CompWithUrlTemplate]}); await TestBed.compileComponents(); })); isBrowser && it('should allow to createSync components with templateUrl after explicit async compilation', () => { const fixture = TestBed.createComponent(CompWithUrlTemplate); expect(fixture.nativeElement).toHaveText('from external template'); }); }); describe('overwriting metadata', () => { @Pipe({name: 'undefined'}) class SomePipe { transform(value: string): string { return `transformed ${value}`; } } @Directive({selector: '[undefined]'}) class SomeDirective { someProp = 'hello'; } @Component({selector: 'comp', template: 'someText'}) class SomeComponent { } @Component({selector: 'comp', template: 'someOtherText'}) class SomeOtherComponent { } @NgModule({declarations: [SomeComponent, SomeDirective, SomePipe]}) class SomeModule { } beforeEach(() => TestBed.configureTestingModule({imports: [SomeModule]})); describe('module', () => { beforeEach(() => { TestBed.overrideModule(SomeModule, {set: {declarations: [SomeOtherComponent]}}); }); it('should work', () => { expect(TestBed.createComponent(SomeOtherComponent).componentInstance) .toBeAnInstanceOf(SomeOtherComponent); }); }); describe('component', () => { beforeEach(() => { TestBed.overrideComponent( SomeComponent, {set: {selector: 'comp', template: 'newText'}}); }); it('should work', () => { expect(TestBed.createComponent(SomeComponent).nativeElement).toHaveText('newText'); }); }); describe('directive', () => { beforeEach(() => { TestBed .overrideComponent( SomeComponent, {set: {selector: 'comp', template: `<div someDir></div>`}}) .overrideDirective( SomeDirective, {set: {selector: '[someDir]', host: {'[title]': 'someProp'}}}); }); it('should work', () => { const compFixture = TestBed.createComponent(SomeComponent); compFixture.detectChanges(); expect(compFixture.debugElement.children[0].properties['title']).toEqual('hello'); }); }); describe('pipe', () => { beforeEach(() => { TestBed .overrideComponent( SomeComponent, {set: {selector: 'comp', template: `{{'hello' | somePipe}}`}}) .overridePipe(SomePipe, {set: {name: 'somePipe'}}) .overridePipe(SomePipe, {add: {pure: false}}); }); it('should work', () => { const compFixture = TestBed.createComponent(SomeComponent); compFixture.detectChanges(); expect(compFixture.nativeElement).toHaveText('transformed hello'); }); }); describe('template', () => { let testBedSpy: any; beforeEach(() => { testBedSpy = spyOn(getTestBed(), 'overrideComponent').and.callThrough(); TestBed.overrideTemplate(SomeComponent, 'newText'); }); it(`should override component's template`, () => { const fixture = TestBed.createComponent(SomeComponent); expect(fixture.nativeElement).toHaveText('newText'); expect(testBedSpy).toHaveBeenCalledWith(SomeComponent, { set: {template: 'newText', templateUrl: null} }); }); }); }); describe('overriding providers', () => { describe('in core', () => { it('ComponentFactoryResolver', () => { const componentFactoryMock = jasmine.createSpyObj('componentFactory', ['resolveComponentFactory']); TestBed.overrideProvider(ComponentFactoryResolver, {useValue: componentFactoryMock}); expect(TestBed.get(ComponentFactoryResolver)).toEqual(componentFactoryMock); }); }); describe('in NgModules', () => { it('should support useValue', () => { TestBed.configureTestingModule({ providers: [ {provide: aTok, useValue: 'aValue'}, ] }); TestBed.overrideProvider(aTok, {useValue: 'mockValue'}); expect(TestBed.inject(aTok)).toBe('mockValue'); }); it('should support useFactory', () => { TestBed.configureTestingModule({ providers: [ {provide: 'dep', useValue: 'depValue'}, {provide: aTok, useValue: 'aValue'}, ] }); TestBed.overrideProvider( aTok, {useFactory: (dep: any) => `mockA: ${dep}`, deps: ['dep']}); expect(TestBed.inject(aTok)).toBe('mockA: depValue'); }); it('should support @Optional without matches', () => { TestBed.configureTestingModule({ providers: [ {provide: aTok, useValue: 'aValue'}, ] }); TestBed.overrideProvider( aTok, {useFactory: (dep: any) => `mockA: ${dep}`, deps: [[new Optional(), 'dep']]}); expect(TestBed.inject(aTok)).toBe('mockA: null'); }); it('should support Optional with matches', () => { TestBed.configureTestingModule({ providers: [ {provide: 'dep', useValue: 'depValue'}, {provide: aTok, useValue: 'aValue'}, ] }); TestBed.overrideProvider( aTok, {useFactory: (dep: any) => `mockA: ${dep}`, deps: [[new Optional(), 'dep']]}); expect(TestBed.inject(aTok)).toBe('mockA: depValue'); }); it('should support SkipSelf', () => { @NgModule({ providers: [ {provide: aTok, useValue: 'aValue'}, {provide: 'dep', useValue: 'depValue'}, ]<|fim▁hole|> TestBed.overrideProvider( aTok, {useFactory: (dep: any) => `mockA: ${dep}`, deps: [[new SkipSelf(), 'dep']]}); TestBed.configureTestingModule( {providers: [{provide: 'dep', useValue: 'parentDepValue'}]}); const compiler = TestBed.inject(Compiler); const modFactory = compiler.compileModuleSync(MyModule); expect(modFactory.create(getTestBed()).injector.get(aTok)) .toBe('mockA: parentDepValue'); }); it('should keep imported NgModules eager', () => { let someModule: SomeModule|undefined; @NgModule() class SomeModule { constructor() { someModule = this; } } TestBed.configureTestingModule({ providers: [ {provide: aTok, useValue: 'aValue'}, ], imports: [SomeModule] }); TestBed.overrideProvider(aTok, {useValue: 'mockValue'}); expect(TestBed.inject(aTok)).toBe('mockValue'); expect(someModule).toBeAnInstanceOf(SomeModule); }); describe('injecting eager providers into an eager overwritten provider', () => { @NgModule({ providers: [ {provide: aTok, useFactory: () => 'aValue'}, {provide: bTok, useFactory: () => 'bValue'}, ] }) class MyModule { // NgModule is eager, which makes all of its deps eager constructor(@Inject(aTok) a: any, @Inject(bTok) b: any) {} } it('should inject providers that were declared before', () => { TestBed.configureTestingModule({imports: [MyModule]}); TestBed.overrideProvider( bTok, {useFactory: (a: string) => `mockB: ${a}`, deps: [aTok]}); expect(TestBed.inject(bTok)).toBe('mockB: aValue'); }); it('should inject providers that were declared afterwards', () => { TestBed.configureTestingModule({imports: [MyModule]}); TestBed.overrideProvider( aTok, {useFactory: (b: string) => `mockA: ${b}`, deps: [bTok]}); expect(TestBed.inject(aTok)).toBe('mockA: bValue'); }); }); }); describe('in Components', () => { it('should support useValue', () => { @Component({ template: '', providers: [ {provide: aTok, useValue: 'aValue'}, ] }) class MComp { } TestBed.overrideProvider(aTok, {useValue: 'mockValue'}); const ctx = TestBed.configureTestingModule({declarations: [MComp]}).createComponent(MComp); expect(ctx.debugElement.injector.get(aTok)).toBe('mockValue'); }); it('should support useFactory', () => { @Component({ template: '', providers: [ {provide: 'dep', useValue: 'depValue'}, {provide: aTok, useValue: 'aValue'}, ] }) class MyComp { } TestBed.overrideProvider( aTok, {useFactory: (dep: any) => `mockA: ${dep}`, deps: ['dep']}); const ctx = TestBed.configureTestingModule({declarations: [MyComp]}).createComponent(MyComp); expect(ctx.debugElement.injector.get(aTok)).toBe('mockA: depValue'); }); it('should support @Optional without matches', () => { @Component({ template: '', providers: [ {provide: aTok, useValue: 'aValue'}, ] }) class MyComp { } TestBed.overrideProvider( aTok, {useFactory: (dep: any) => `mockA: ${dep}`, deps: [[new Optional(), 'dep']]}); const ctx = TestBed.configureTestingModule({declarations: [MyComp]}).createComponent(MyComp); expect(ctx.debugElement.injector.get(aTok)).toBe('mockA: null'); }); it('should support Optional with matches', () => { @Component({ template: '', providers: [ {provide: 'dep', useValue: 'depValue'}, {provide: aTok, useValue: 'aValue'}, ] }) class MyComp { } TestBed.overrideProvider( aTok, {useFactory: (dep: any) => `mockA: ${dep}`, deps: [[new Optional(), 'dep']]}); const ctx = TestBed.configureTestingModule({declarations: [MyComp]}).createComponent(MyComp); expect(ctx.debugElement.injector.get(aTok)).toBe('mockA: depValue'); }); it('should support SkipSelf', () => { @Directive({ selector: '[myDir]', providers: [ {provide: aTok, useValue: 'aValue'}, {provide: 'dep', useValue: 'depValue'}, ] }) class MyDir { } @Component({ template: '<div myDir></div>', providers: [ {provide: 'dep', useValue: 'parentDepValue'}, ] }) class MyComp { } TestBed.overrideProvider( aTok, {useFactory: (dep: any) => `mockA: ${dep}`, deps: [[new SkipSelf(), 'dep']]}); const ctx = TestBed.configureTestingModule({declarations: [MyComp, MyDir]}) .createComponent(MyComp); expect(ctx.debugElement.children[0].injector.get(aTok)).toBe('mockA: parentDepValue'); }); it('should support multiple providers in a template', () => { @Directive({ selector: '[myDir1]', providers: [ {provide: aTok, useValue: 'aValue1'}, ] }) class MyDir1 { } @Directive({ selector: '[myDir2]', providers: [ {provide: aTok, useValue: 'aValue2'}, ] }) class MyDir2 { } @Component({ template: '<div myDir1></div><div myDir2></div>', }) class MyComp { } TestBed.overrideProvider(aTok, {useValue: 'mockA'}); const ctx = TestBed.configureTestingModule({declarations: [MyComp, MyDir1, MyDir2]}) .createComponent(MyComp); expect(ctx.debugElement.children[0].injector.get(aTok)).toBe('mockA'); expect(ctx.debugElement.children[1].injector.get(aTok)).toBe('mockA'); }); describe('injecting eager providers into an eager overwritten provider', () => { @Component({ template: '', providers: [ {provide: aTok, useFactory: () => 'aValue'}, {provide: bTok, useFactory: () => 'bValue'}, ] }) class MyComp { // Component is eager, which makes all of its deps eager constructor(@Inject(aTok) a: any, @Inject(bTok) b: any) {} } it('should inject providers that were declared before it', () => { TestBed.overrideProvider( bTok, {useFactory: (a: string) => `mockB: ${a}`, deps: [aTok]}); const ctx = TestBed.configureTestingModule({declarations: [MyComp]}).createComponent(MyComp); expect(ctx.debugElement.injector.get(bTok)).toBe('mockB: aValue'); }); it('should inject providers that were declared after it', () => { TestBed.overrideProvider( aTok, {useFactory: (b: string) => `mockA: ${b}`, deps: [bTok]}); const ctx = TestBed.configureTestingModule({declarations: [MyComp]}).createComponent(MyComp); expect(ctx.debugElement.injector.get(aTok)).toBe('mockA: bValue'); }); }); }); it('should reset overrides when the testing modules is resetted', () => { TestBed.overrideProvider(aTok, {useValue: 'mockValue'}); TestBed.resetTestingModule(); TestBed.configureTestingModule({providers: [{provide: aTok, useValue: 'aValue'}]}); expect(TestBed.inject(aTok)).toBe('aValue'); }); }); describe('overrideTemplateUsingTestingModule', () => { it('should compile the template in the context of the testing module', () => { @Component({selector: 'comp', template: 'a'}) class MyComponent { prop = 'some prop'; } let testDir: TestDir|undefined; @Directive({selector: '[test]'}) class TestDir { constructor() { testDir = this; } // TODO(issue/24571): remove '!'. @Input('test') test!: string; } TestBed.overrideTemplateUsingTestingModule( MyComponent, '<div [test]="prop">Hello world!</div>'); const fixture = TestBed.configureTestingModule({declarations: [MyComponent, TestDir]}) .createComponent(MyComponent); fixture.detectChanges(); expect(fixture.nativeElement).toHaveText('Hello world!'); expect(testDir).toBeAnInstanceOf(TestDir); expect(testDir!.test).toBe('some prop'); }); it('should reset overrides when the testing module is resetted', () => { @Component({selector: 'comp', template: 'a'}) class MyComponent { } TestBed.overrideTemplateUsingTestingModule(MyComponent, 'b'); const fixture = TestBed.resetTestingModule() .configureTestingModule({declarations: [MyComponent]}) .createComponent(MyComponent); expect(fixture.nativeElement).toHaveText('a'); }); }); describe('setting up the compiler', () => { describe('providers', () => { it('should use set up providers', fakeAsync(() => { // Keeping this component inside the test is needed to make sure it's not resolved // prior to this test, thus having ɵcmp and a reference in resource // resolution queue. This is done to check external resoution logic in isolation by // configuring TestBed with the necessary ResourceLoader instance. @Component({ selector: 'comp', templateUrl: '/base/angular/packages/platform-browser/test/static_assets/test.html' }) class InternalCompWithUrlTemplate { } const resourceLoaderGet = jasmine.createSpy('resourceLoaderGet') .and.returnValue(Promise.resolve('Hello world!')); TestBed.configureTestingModule({declarations: [InternalCompWithUrlTemplate]}); TestBed.configureCompiler( {providers: [{provide: ResourceLoader, useValue: {get: resourceLoaderGet}}]}); TestBed.compileComponents(); tick(); const compFixture = TestBed.createComponent(InternalCompWithUrlTemplate); expect(compFixture.nativeElement).toHaveText('Hello world!'); })); }); describe('useJit true', () => { beforeEach(() => TestBed.configureCompiler({useJit: true})); obsoleteInIvy('the Render3 compiler JiT mode is not configurable') .it('should set the value into CompilerConfig', inject([CompilerConfig], (config: CompilerConfig) => { expect(config.useJit).toBe(true); })); }); describe('useJit false', () => { beforeEach(() => TestBed.configureCompiler({useJit: false})); obsoleteInIvy('the Render3 compiler JiT mode is not configurable') .it('should set the value into CompilerConfig', inject([CompilerConfig], (config: CompilerConfig) => { expect(config.useJit).toBe(false); })); }); }); }); describe('errors', () => { let originalJasmineIt: (description: string, func: () => void) => jasmine.Spec; const patchJasmineIt = () => { let resolve: (result: any) => void; let reject: (error: any) => void; const promise = new Promise((res, rej) => { resolve = res; reject = rej; }); const jasmineEnv = jasmine.getEnv() as any; originalJasmineIt = jasmineEnv.it; jasmineEnv.it = (description: string, fn: (done: DoneFn) => void): any => { const done = <DoneFn>(() => resolve(null)); done.fail = (err) => reject(err); fn(done); return null; }; return promise; }; const restoreJasmineIt = () => ((jasmine.getEnv() as any).it = originalJasmineIt); it('should fail when an asynchronous error is thrown', (done) => { const itPromise = patchJasmineIt(); const barError = new Error('bar'); it('throws an async error', waitForAsync(inject([], () => setTimeout(() => { throw barError; }, 0)))); itPromise.then(() => done.fail('Expected test to fail, but it did not'), (err) => { expect(err).toEqual(barError); done(); }); restoreJasmineIt(); }); it('should fail when a returned promise is rejected', (done) => { const itPromise = patchJasmineIt(); it('should fail with an error from a promise', waitForAsync(inject([], () => { let reject: (error: any) => void = undefined!; const promise = new Promise((_, rej) => reject = rej); const p = promise.then(() => expect(1).toEqual(2)); reject('baz'); return p; }))); itPromise.then(() => done.fail('Expected test to fail, but it did not'), (err) => { expect(err.message).toEqual('Uncaught (in promise): baz'); done(); }); restoreJasmineIt(); }); describe('components', () => { let resourceLoaderGet: jasmine.Spy; beforeEach(() => { resourceLoaderGet = jasmine.createSpy('resourceLoaderGet') .and.returnValue(Promise.resolve('Hello world!')); TestBed.configureCompiler( {providers: [{provide: ResourceLoader, useValue: {get: resourceLoaderGet}}]}); }); it('should report an error for declared components with templateUrl which never call TestBed.compileComponents', () => { @Component({ selector: 'comp', templateUrl: '/base/angular/packages/platform-browser/test/static_assets/test.html', }) class InlineCompWithUrlTemplate { } expect(withModule( {declarations: [InlineCompWithUrlTemplate]}, () => TestBed.createComponent(InlineCompWithUrlTemplate))) .toThrowError( ivyEnabled ? `Component 'InlineCompWithUrlTemplate' is not resolved: - templateUrl: /base/angular/packages/platform-browser/test/static_assets/test.html Did you run and wait for 'resolveComponentResources()'?` : `This test module uses the component ${ stringify( InlineCompWithUrlTemplate)} which is using a "templateUrl" or "styleUrls", but they were never compiled. ` + `Please call "TestBed.compileComponents" before your test.`); }); }); modifiedInIvy(`Unknown property error thrown instead of logging a message`) .it('should error on unknown bound properties on custom elements by default', () => { @Component({template: '<some-element [someUnknownProp]="true"></some-element>'}) class ComponentUsingInvalidProperty { } expect( () => withModule( {declarations: [ComponentUsingInvalidProperty]}, () => TestBed.createComponent(ComponentUsingInvalidProperty))()) .toThrowError(/Can't bind to 'someUnknownProp'/); }); onlyInIvy(`Unknown property error logged instead of throwing`) .it('should error on unknown bound properties on custom elements by default', () => { @Component({template: '<div [someUnknownProp]="true"></div>'}) class ComponentUsingInvalidProperty { } const spy = spyOn(console, 'error'); withModule({declarations: [ComponentUsingInvalidProperty]}, () => { const fixture = TestBed.createComponent(ComponentUsingInvalidProperty); fixture.detectChanges(); })(); expect(spy.calls.mostRecent().args[0]).toMatch(/Can't bind to 'someUnknownProp'/); }); }); describe('creating components', () => { beforeEach(() => { TestBed.configureTestingModule({ declarations: [ ChildComp, MyIfComp, ChildChildComp, ParentComp, TestProvidersComp, TestViewProvidersComp, ] }); }); it('should instantiate a component with valid DOM', waitForAsync(() => { const fixture = TestBed.createComponent(ChildComp); fixture.detectChanges(); expect(fixture.nativeElement).toHaveText('Original Child'); })); it('should allow changing members of the component', waitForAsync(() => { const componentFixture = TestBed.createComponent(MyIfComp); componentFixture.detectChanges(); expect(componentFixture.nativeElement).toHaveText('MyIf()'); componentFixture.componentInstance.showMore = true; componentFixture.detectChanges(); expect(componentFixture.nativeElement).toHaveText('MyIf(More)'); })); it('should override a template', waitForAsync(() => { TestBed.overrideComponent(ChildComp, {set: {template: '<span>Mock</span>'}}); const componentFixture = TestBed.createComponent(ChildComp); componentFixture.detectChanges(); expect(componentFixture.nativeElement).toHaveText('Mock'); })); it('should override a provider', waitForAsync(() => { TestBed.overrideComponent( TestProvidersComp, {set: {providers: [{provide: FancyService, useClass: MockFancyService}]}}); const componentFixture = TestBed.createComponent(TestProvidersComp); componentFixture.detectChanges(); expect(componentFixture.nativeElement).toHaveText('injected value: mocked out value'); })); it('should override a viewProvider', waitForAsync(() => { TestBed.overrideComponent( TestViewProvidersComp, {set: {viewProviders: [{provide: FancyService, useClass: MockFancyService}]}}); const componentFixture = TestBed.createComponent(TestViewProvidersComp); componentFixture.detectChanges(); expect(componentFixture.nativeElement).toHaveText('injected value: mocked out value'); })); }); describe('using alternate components', () => { beforeEach(() => { TestBed.configureTestingModule({ declarations: [ MockChildComp, ParentComp, ] }); }); it('should override component dependencies', waitForAsync(() => { const componentFixture = TestBed.createComponent(ParentComp); componentFixture.detectChanges(); expect(componentFixture.nativeElement).toHaveText('Parent(Mock)'); })); }); describe('calling override methods after TestBed initialization', () => { const getExpectedErrorMessage = (methodName: string, methodDescription: string) => `Cannot ${ methodDescription} when the test module has already been instantiated. Make sure you are not using \`inject\` before \`${ methodName}\`.`; it('should throw if TestBed.overrideProvider is called after TestBed initialization', () => { TestBed.inject(Injector); expect(() => TestBed.overrideProvider(aTok, { useValue: 'mockValue' })).toThrowError(getExpectedErrorMessage('overrideProvider', 'override provider')); }); it('should throw if TestBed.overrideModule is called after TestBed initialization', () => { @NgModule() class MyModule { } TestBed.inject(Injector); expect(() => TestBed.overrideModule(MyModule, {})) .toThrowError(getExpectedErrorMessage('overrideModule', 'override module metadata')); }); it('should throw if TestBed.overridePipe is called after TestBed initialization', () => { @Pipe({name: 'myPipe'}) class MyPipe { transform(value: any) { return value; } } TestBed.inject(Injector); expect(() => TestBed.overridePipe(MyPipe, {})) .toThrowError(getExpectedErrorMessage('overridePipe', 'override pipe metadata')); }); it('should throw if TestBed.overrideDirective is called after TestBed initialization', () => { @Directive() class MyDirective { } TestBed.inject(Injector); expect(() => TestBed.overrideDirective(MyDirective, {})) .toThrowError( getExpectedErrorMessage('overrideDirective', 'override directive metadata')); }); it('should throw if TestBed.overrideTemplateUsingTestingModule is called after TestBed initialization', () => { @Component({selector: 'comp', template: 'a'}) class MyComponent { } TestBed.inject(Injector); expect(() => TestBed.overrideTemplateUsingTestingModule(MyComponent, 'b')) .toThrowError( /Cannot override template when the test module has already been instantiated/); }); }); }); }<|fim▁end|>
}) class MyModule { }
<|file_name|>misc.rs<|end_file_name|><|fim▁begin|>use rules::Rule; pub struct OwnLineBrace { } impl OwnLineBrace { pub fn new() -> OwnLineBrace { OwnLineBrace { } } } impl Rule for OwnLineBrace { fn verify(&self, filename: &str, content: &str) -> Vec<String> { let mut errors = Vec::new(); let mut line_number: usize = 1; for line in content.lines() { if line.contains("{") && !(line.trim().len() == 1 || (line.chars().filter(|c| !c.is_whitespace()).count() == 2 && line.trim_right().ends_with("\\"))) { errors.push(format!("[{}:{}]Opening brace must be on their own line.", filename, line_number)); } else if line.contains("}") && !(line.trim().len() == 1 || (line.chars().filter(|c| !c.is_whitespace()).count() == 2 && line.trim_right().ends_with("\\"))) { if !line.ends_with(";") { errors.push(format!("[{}:{}]Closing brace must be on their own line.", filename, line_number)); } } line_number += 1; } return errors; } } pub struct MultiLinesComment { } impl MultiLinesComment { pub fn new() -> MultiLinesComment { MultiLinesComment { } } } impl Rule for MultiLinesComment { fn verify(&self, filename: &str, content: &str) -> Vec<String> { let mut errors = Vec::new(); let mut line_number: usize = 1; let mut in_comment = false; for line in content.lines() { if line.contains("*/") && !line.contains("/*") { //Multi lines comment if !in_comment { errors.push(format!("[{}:{}]Unexpected comment end delimiter.", filename, line_number)); } if line.chars().filter(|c| !c.is_whitespace()).count() != 2 { errors.push(format!("[{}:{}]Comment end delimiter must appear on its own line.", filename, line_number)); } in_comment = false; } if in_comment { if !line.trim_left().starts_with("**") { errors.push(format!("[{}:{}]Comment intermediary line must start with '**'.", filename, line_number)); } } if line.contains("/*") && !line.contains("*/") { //Multi lines comment if in_comment { errors.push(format!("[{}:{}]Comments can't be nested.", filename, line_number)); } let nb_non_white_space = line.chars().filter(|c| !c.is_whitespace()).count(); if !(nb_non_white_space == 2 || (nb_non_white_space == 3 && line.contains("/**"))) { errors.push(format!("[{}:{}]Comment start delimiter must appear on its own line.", filename, line_number)); } in_comment = true; } line_number += 1; } if in_comment { errors.push(format!("[{}:{}]Expected comment end delimiter.", filename, line_number)); } return errors; } } pub struct Goto { } impl Goto { pub fn new() -> Goto { Goto { } } } impl Rule for Goto { fn verify(&self, filename: &str, content: &str) -> Vec<String> { let mut errors = Vec::new(); let mut line_number: usize = 1; for line in content.lines() { if line.contains("goto") { errors.push(format!("[{}:{}]Goto statement unauthorized.", filename, line_number)); } line_number += 1; } return errors; } } pub struct Enum { } impl Enum { pub fn new() -> Enum { Enum { } } } impl Rule for Enum { fn verify(&self, filename: &str, content: &str) -> Vec<String> { let mut errors = Vec::new(); let mut line_number: usize = 1; let mut in_enum = false; for line in content.lines() { if in_enum { if line.contains("}") { in_enum = false;<|fim▁hole|> if line.contains(",") && line.trim().len() > 1 &&//To prevent having only a comma on a line. !line.split(",").last().unwrap().trim().is_empty() { errors.push(format!("[{}:{}]Enum values must be on their own line.", filename, line_number)); } } //Start_with '**' -> multilines comments intermediary lines. if line.contains("enum") && !line.trim_left().starts_with("//") && !line.trim_left().starts_with("**") { in_enum = true; } line_number += 1; } return errors; } } pub struct StaticVariable { } impl StaticVariable { pub fn new() -> StaticVariable { StaticVariable { } } } impl Rule for StaticVariable { fn verify(&self, filename: &str, content: &str) -> Vec<String> { let mut errors = Vec::new(); let mut line_number: usize = 1; for line in content.lines() { if line.trim_left().starts_with("static ") && (!line.contains("(") || line.contains("=")) && !line.contains("static const") { errors.push(format!("[{}:{}]Static variable must be const.", filename, line_number)); } line_number += 1; } return errors; } } #[cfg(test)] mod tests { use super::*; #[test] fn own_line_brace() { let own_line_brace = OwnLineBrace::new(); assert_eq!(own_line_brace.verify("", "{\nsome text\n}\n").len(), 0); assert_eq!(own_line_brace.verify("", " { ").len(), 0); assert_eq!(own_line_brace.verify("", "};").len(), 0); assert_eq!(own_line_brace.verify("", "} something;").len(), 0);//End of the typedef struct of do while loop. assert_ne!(own_line_brace.verify("", "{}\n").len(), 0); assert_ne!(own_line_brace.verify("", "}}").len(), 0); assert_ne!(own_line_brace.verify("", "{{").len(), 0); assert_eq!(own_line_brace.verify("", "{some").len(), 1); assert_eq!(own_line_brace.verify("", ";}").len(), 1); } #[test] fn multi_lines_comment() { let multi_lines_comment = MultiLinesComment::new(); assert_eq!(multi_lines_comment.verify("", "zdnkcndccc").len(), 0); assert_eq!(multi_lines_comment.verify("", "//zdnkcndccc").len(), 0); assert_eq!(multi_lines_comment.verify("", "/*zdnkcndccc*/").len(), 0); assert_eq!(multi_lines_comment.verify("", "/*\n**zdnkcn\n*/").len(), 0); assert_ne!(multi_lines_comment.verify("", "/*zdnkcn\ndccc*/").len(), 0); assert_ne!(multi_lines_comment.verify("", "/*\nzdnkcn\n*/").len(), 0); assert_ne!(multi_lines_comment.verify("", "/*\nav**zdnkcn\n*/").len(), 0); assert_ne!(multi_lines_comment.verify("", "/** *\n**zdnkcn\n*/").len(), 0); assert_ne!(multi_lines_comment.verify("", "/**\n**zdnkcn\n*/*").len(), 0); } #[test] fn goto() { let goto = Goto::new(); assert_eq!(goto.verify("", "zdnkcndccc").len(), 0); assert_eq!(goto.verify("", "go\nto\ngo\nto\n").len(), 0); assert_eq!(goto.verify("", "goto").len(), 1); assert_eq!(goto.verify("", "goto\nadezf\nvvrgotoded").len(), 2); } #[test] fn enum_rule() { let enum_rule = Enum::new(); assert_eq!(enum_rule.verify("", "enum{}").len(), 0); assert_eq!(enum_rule.verify("", "enum\n{\n}A").len(), 0); assert_eq!(enum_rule.verify("", "enum\n{\nVALUE\n}").len(), 0); assert_eq!(enum_rule.verify("", "enum\n{\nVALUE, \t\nVALUE2\n}").len(), 0); assert_eq!(enum_rule.verify("", "enum\n{\nvalue\n}").len(), 1); assert_eq!(enum_rule.verify("", "enum\n{\nValue\n}").len(), 1); assert_eq!(enum_rule.verify("", "enum\n{\nVALUE,VALUE2\n}").len(), 1); } #[test] fn static_variable() { let static_variable = StaticVariable::new(); assert_eq!(static_variable.verify("", "something;").len(), 0); assert_eq!(static_variable.verify("", "const something;").len(), 0); assert_eq!(static_variable.verify("", "static const something;").len(), 0); assert_eq!(static_variable.verify("", "static function(parameter...").len(), 0); assert_eq!(static_variable.verify("", "int some_static_name;").len(), 0); assert_eq!(static_variable.verify("", "//something static something").len(), 0); assert_eq!(static_variable.verify("", "static something;").len(), 1); assert_eq!(static_variable.verify("", "static var = function(parameter);").len(), 1); } }<|fim▁end|>
} if in_enum && line.to_uppercase() != line { errors.push(format!("[{}:{}]Enum values must be entirely capitalized. Expected '{}' got '{}'", filename, line_number, line.to_uppercase(), line)); }
<|file_name|>add_parameterized_profile.py<|end_file_name|><|fim▁begin|># IfcOpenShell - IFC toolkit and geometry engine # Copyright (C) 2021 Dion Moult <[email protected]> # # This file is part of IfcOpenShell. #<|fim▁hole|># IfcOpenShell is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # IfcOpenShell is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with IfcOpenShell. If not, see <http://www.gnu.org/licenses/>. class Usecase: def __init__(self, file, **settings): self.file = file self.settings = {"ifc_class": None} for key, value in settings.items(): self.settings[key] = value def execute(self): return self.file.create_entity(self.settings["ifc_class"])<|fim▁end|>
<|file_name|>blend.rs<|end_file_name|><|fim▁begin|>extern crate noise; use noise::{utils::*, Blend, Fbm, Perlin, RidgedMulti}; fn main() { let perlin = Perlin::new(); let ridged = RidgedMulti::new(); let fbm = Fbm::new(); let blend = Blend::new(&perlin, &ridged, &fbm); PlaneMapBuilder::new(&blend) .build() .write_to_file("blend.png");<|fim▁hole|><|fim▁end|>
}
<|file_name|>Languages.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright (c) 2010 SubDownloader Developers - See COPYING - GPLv3 import languages.autodetect_lang as autodetect_lang import re import os.path import logging log = logging.getLogger("subdownloader.languages.Languages") import __builtin__ __builtin__._ = lambda x : x LANGUAGES = [{'locale':'sq', 'ISO639': 'sq', 'SubLanguageID': 'alb', 'LanguageName': _('Albanian')}, {'locale':'ar', 'ISO639': 'ar', 'SubLanguageID': 'ara', 'LanguageName': _('Arabic')}, {'locale':'hy', 'ISO639': 'hy', 'SubLanguageID': 'arm', 'LanguageName': _('Armenian')}, {'locale':'ms', 'ISO639': 'ms', 'SubLanguageID': 'may', 'LanguageName': _('Malay')}, {'locale':'bs', 'ISO639': 'bs', 'SubLanguageID': 'bos', 'LanguageName': _('Bosnian')}, {'locale':'bg', 'ISO639': 'bg', 'SubLanguageID': 'bul', 'LanguageName': _('Bulgarian')}, {'locale':'ca', 'ISO639': 'ca', 'SubLanguageID': 'cat', 'LanguageName': _('Catalan')}, {'locale':'eu', 'ISO639': 'eu', 'SubLanguageID': 'eus', 'LanguageName': _('Basque')}, {'locale':'zh_CN', 'ISO639': 'zh', 'SubLanguageID': 'chi', 'LanguageName': _('Chinese (China)')}, {'locale':'hr', 'ISO639': 'hr', 'SubLanguageID': 'hrv', 'LanguageName': _('Croatian')}, {'locale':'cs', 'ISO639': 'cs', 'SubLanguageID': 'cze', 'LanguageName': _('Czech')}, {'locale':'da', 'ISO639': 'da', 'SubLanguageID': 'dan', 'LanguageName': _('Danish')}, {'locale':'nl', 'ISO639': 'nl', 'SubLanguageID': 'dut', 'LanguageName': _('Dutch')}, {'locale':'en', 'ISO639': 'en', 'SubLanguageID': 'eng', 'LanguageName': _('English (US)')}, {'locale':'en_GB', 'ISO639': 'en', 'SubLanguageID': 'bre', 'LanguageName': _('English (UK)')}, {'locale':'eo', 'ISO639': 'eo', 'SubLanguageID': 'epo', 'LanguageName': _('Esperanto')}, {'locale':'et', 'ISO639': 'et', 'SubLanguageID': 'est', 'LanguageName': _('Estonian')}, {'locale':'fi', 'ISO639': 'fi', 'SubLanguageID': 'fin', 'LanguageName': _('Finnish')}, {'locale':'fr', 'ISO639': 'fr', 'SubLanguageID': 'fre', 'LanguageName': _('French')},<|fim▁hole|> {'locale':'he', 'ISO639': 'he', 'SubLanguageID': 'heb', 'LanguageName': _('Hebrew')}, {'locale':'hu', 'ISO639': 'hu', 'SubLanguageID': 'hun', 'LanguageName': _('Hungarian')}, {'locale':'id', 'ISO639': 'id', 'SubLanguageID': 'ind', 'LanguageName': _('Indonesian')}, {'locale':'it', 'ISO639': 'it', 'SubLanguageID': 'ita', 'LanguageName': _('Italian')}, {'locale':'ja', 'ISO639': 'ja', 'SubLanguageID': 'jpn', 'LanguageName': _('Japanese')}, {'locale':'kk', 'ISO639': 'kk', 'SubLanguageID': 'kaz', 'LanguageName': _('Kazakh')}, {'locale':'ko', 'ISO639': 'ko', 'SubLanguageID': 'kor', 'LanguageName': _('Korean')}, {'locale':'lv', 'ISO639': 'lv', 'SubLanguageID': 'lav', 'LanguageName': _('Latvian')}, {'locale':'lt', 'ISO639': 'lt', 'SubLanguageID': 'lit', 'LanguageName': _('Lithuanian')}, {'locale':'lb', 'ISO639': 'lb', 'SubLanguageID': 'ltz', 'LanguageName': _('Luxembourgish')}, {'locale':'mk', 'ISO639': 'mk', 'SubLanguageID': 'mac', 'LanguageName': _('Macedonian')}, {'locale':'no', 'ISO639': 'no', 'SubLanguageID': 'nor', 'LanguageName': _('Norwegian')}, {'locale':'fa', 'ISO639': 'fa', 'SubLanguageID': 'per', 'LanguageName': _('Persian')}, {'locale':'pl', 'ISO639': 'pl', 'SubLanguageID': 'pol', 'LanguageName': _('Polish')}, {'locale':'pt_PT', 'ISO639': 'pt', 'SubLanguageID': 'por', 'LanguageName': _('Portuguese (Portugal)')}, {'locale':'pt_BR', 'ISO639': 'pb', 'SubLanguageID': 'pob', 'LanguageName': _('Portuguese (Brazil)')}, {'locale':'ro', 'ISO639': 'ro', 'SubLanguageID': 'rum', 'LanguageName': _('Romanian')}, {'locale':'ru', 'ISO639': 'ru', 'SubLanguageID': 'rus', 'LanguageName': _('Russian')}, {'locale':'sr', 'ISO639': 'sr', 'SubLanguageID': 'scc', 'LanguageName': _('Serbian')}, {'locale':'sk', 'ISO639': 'sk', 'SubLanguageID': 'slo', 'LanguageName': _('Slovak')}, {'locale':'sl', 'ISO639': 'sl', 'SubLanguageID': 'slv', 'LanguageName': _('Slovenian')}, {'locale':'es_ES', 'ISO639': 'es', 'SubLanguageID': 'spa', 'LanguageName': _('Spanish (Spain)')}, {'locale':'sv', 'ISO639': 'sv', 'SubLanguageID': 'swe', 'LanguageName': _('Swedish')}, {'locale':'th', 'ISO639': 'th', 'SubLanguageID': 'tha', 'LanguageName': _('Thai')}, {'locale':'tr', 'ISO639': 'tr', 'SubLanguageID': 'tur', 'LanguageName': _('Turkish')}, {'locale':'uk', 'ISO639': 'uk', 'SubLanguageID': 'ukr', 'LanguageName': _('Ukrainian')}, {'locale':'vi', 'ISO639': 'vi', 'SubLanguageID': 'vie', 'LanguageName': _('Vietnamese')}] def ListAll_xx(): temp = [] for lang in LANGUAGES: temp.append(lang['ISO639']) return temp def ListAll_xxx(): temp = [] for lang in LANGUAGES: temp.append(lang['SubLanguageID']) return temp def ListAll_locale(): temp = [] for lang in LANGUAGES: temp.append(lang['locale']) return temp def ListAll_names(): temp = [] for lang in LANGUAGES: temp.append(lang['LanguageName']) return temp def xx2xxx(xx): for lang in LANGUAGES: if lang['ISO639'] == xx: return lang['SubLanguageID'] def xxx2xx(xxx): for lang in LANGUAGES: if lang['SubLanguageID'] == xxx: return lang['ISO639'] def xxx2name(xxx): for lang in LANGUAGES: if lang['SubLanguageID'] == xxx: return lang['LanguageName'] def locale2name(locale): for lang in LANGUAGES: if lang['locale'] == locale: return lang['LanguageName'] def xx2name(xx): for lang in LANGUAGES: if lang['ISO639'] == xx: return lang['LanguageName'] def name2xx(name): for lang in LANGUAGES: if lang['LanguageName'].lower() == name.lower(): return lang['ISO639'] def name2xxx(name): for lang in LANGUAGES: if lang['LanguageName'].lower() == name.lower(): return lang['SubLanguageID'] def CleanTagsFile(text): p = re.compile( '<.*?>') return p.sub('',text)<|fim▁end|>
{'locale':'gl', 'ISO639': 'gl', 'SubLanguageID': 'glg', 'LanguageName': _('Galician')}, {'locale':'ka', 'ISO639': 'ka', 'SubLanguageID': 'geo', 'LanguageName': _('Georgian')}, {'locale':'de', 'ISO639': 'de', 'SubLanguageID': 'ger', 'LanguageName': _('German')}, {'locale':'el', 'ISO639': 'el', 'SubLanguageID': 'ell', 'LanguageName': _('Greek')},
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup setup(<|fim▁hole|> name = 'ical_dict', packages = ['ical_dict'], version = '0.2', description = 'A Python library to convert an .ics file into a Dictionary object.', author = 'Jay Ravaliya', author_email = '[email protected]', url = 'https://github.com/jayrav13/ical_dict', download_url = 'https://github.com/jayrav13/ical_dict/tarball/0.2', keywords = ['calendar', 'ical', 'ics', 'json', 'dictionary', 'python'], classifiers = [], )<|fim▁end|>
<|file_name|>customizations_test.go<|end_file_name|><|fim▁begin|>package cognitoidentity_test import ( "testing" <|fim▁hole|> "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/awstesting/unit" "github.com/aws/aws-sdk-go/service/cognitoidentity" ) var svc = cognitoidentity.New(unit.Session) func TestUnsignedRequest_GetID(t *testing.T) { req, _ := svc.GetIdRequest(&cognitoidentity.GetIdInput{ IdentityPoolId: aws.String("IdentityPoolId"), }) err := req.Sign() if err != nil { t.Errorf("expected no error, but received %v", err) } if e, a := "", req.HTTPRequest.Header.Get("Authorization"); e != a { t.Errorf("expected empty value '%v', but received, %v", e, a) } } func TestUnsignedRequest_GetOpenIDToken(t *testing.T) { req, _ := svc.GetOpenIdTokenRequest(&cognitoidentity.GetOpenIdTokenInput{ IdentityId: aws.String("IdentityId"), }) err := req.Sign() if err != nil { t.Errorf("expected no error, but received %v", err) } if e, a := "", req.HTTPRequest.Header.Get("Authorization"); e != a { t.Errorf("expected empty value '%v', but received, %v", e, a) } } func TestUnsignedRequest_GetCredentialsForIdentity(t *testing.T) { req, _ := svc.GetCredentialsForIdentityRequest(&cognitoidentity.GetCredentialsForIdentityInput{ IdentityId: aws.String("IdentityId"), }) err := req.Sign() if err != nil { t.Errorf("expected no error, but received %v", err) } if e, a := "", req.HTTPRequest.Header.Get("Authorization"); e != a { t.Errorf("expected empty value '%v', but received, %v", e, a) } }<|fim▁end|>
<|file_name|>test_fields.py<|end_file_name|><|fim▁begin|>from rethinkengine.fields import * import unittest2 as unittest class PrimaryKeyFieldTestCase(unittest.TestCase): def test_default(self): f = ObjectIdField() self.assertEqual(f._default, None) with self.assertRaises(TypeError): ObjectIdField(default='') def test_required(self): with self.assertRaises(TypeError): ObjectIdField(required=False) def test_is_valid(self): f = ObjectIdField() self.assertTrue(f.is_valid('cdc14784-3327-492b-a1db-ad8a3b8abcef')) def test_too_short(self): f = ObjectIdField() self.assertFalse(f.is_valid('cdc14784-3327-492b-a1db-ad8a3b8abce')) def test_too_long(self): f = ObjectIdField() self.assertFalse(f.is_valid('cdc14784-3327-492b-a1db-ad8a3b8abcefa')) def test_wrong_chars(self): f = ObjectIdField() self.assertFalse(f.is_valid('zzzzzzzz-3327-492b-a1db-ad8a3b8abcef')) def test_wrong_type(self): f = ObjectIdField() self.assertFalse(f.is_valid(123)) class StringFieldTestCase(unittest.TestCase): def test_default(self): f = StringField() self.assertEqual(f._default, None) f = StringField(default='foo') self.assertEqual(f._default, 'foo') def test_none(self): f = StringField(required=False) self.assertTrue(f.is_valid(None))<|fim▁hole|> self.assertFalse(f.is_valid(None)) def test_is_valid(self): f = StringField() self.assertTrue(f.is_valid('foo')) self.assertTrue(f.is_valid('')) def test_wrong_type(self): f = StringField() self.assertFalse(f.is_valid(123)) class IntegerFieldTestCase(unittest.TestCase): def test_default(self): f = IntegerField() self.assertEqual(f._default, None) f = IntegerField(default=42) self.assertEqual(f._default, 42) def test_none(self): f = IntegerField(required=False) self.assertTrue(f.is_valid(None)) f = IntegerField(required=True) self.assertFalse(f.is_valid(None)) def test_is_valid(self): f = IntegerField() self.assertTrue(f.is_valid(123)) def test_wrong_type(self): f = IntegerField() self.assertFalse(f.is_valid('foo')) class FloatFieldTestCase(unittest.TestCase): def test_default(self): f = FloatField() self.assertEqual(f._default, None) f = FloatField(default=4.2) self.assertEqual(f._default, 4.2) def test_none(self): f = FloatField(required=False) self.assertTrue(f.is_valid(None)) f = FloatField(required=True) self.assertFalse(f.is_valid(None)) def test_is_valid(self): f = FloatField() self.assertTrue(f.is_valid(123.456)) def test_wrong_type(self): f = FloatField() self.assertFalse(f.is_valid('foo')) self.assertFalse(f.is_valid(0)) class ListFieldTestCase(unittest.TestCase): def test_default(self): f = ListField() self.assertEqual(f._default, None) f = ListField(default=[1, 2, 3]) self.assertEqual(f._default, [1, 2, 3]) def test_none(self): f = ListField(required=False) self.assertTrue(f.is_valid(None)) f = ListField(required=True) self.assertFalse(f.is_valid(None)) def test_is_valid(self): f = ListField() self.assertTrue(f.is_valid([1, 2, 3])) def test_is_valid_tuple(self): f = ListField() self.assertTrue(f.is_valid((1, 2, 3))) def test_wrong_type(self): f = ListField() self.assertFalse(f.is_valid('foo')) def test_element_type_string(self): f = ListField(StringField) self.assertEqual(f._element_type, StringField) def test_element_type_invalid(self): with self.assertRaises(TypeError): f = ListField(str) def test_element_type_is_valid(self): f = ListField(StringField) self.assertTrue(f.is_valid(['foo'])) def test_element_type_is_invalid(self): f = ListField(StringField) self.assertFalse(f.is_valid([42])) class DictFieldTestCase(unittest.TestCase): def test_default(self): f = DictField() self.assertEqual(f._default, None) f = DictField(default={'foo': 'bar'}) self.assertEqual(f._default, {'foo': 'bar'}) def test_none(self): f = DictField(required=False) self.assertTrue(f.is_valid(None)) f = DictField(required=True) self.assertFalse(f.is_valid(None)) def test_is_valid(self): f = DictField() self.assertTrue(f.is_valid({})) self.assertTrue(f.is_valid({'foo': 1, 'bar': 2})) def test_wrong_type(self): f = DictField() self.assertFalse(f.is_valid('foo')) class BooleanFieldTestCase(unittest.TestCase): def test_default(self): f = BooleanField() self.assertEqual(f._default, None) f = BooleanField(default=True) self.assertEqual(f._default, True) def test_none(self): f = BooleanField(required=False) self.assertTrue(f.is_valid(None)) f = BooleanField(required=True) self.assertFalse(f.is_valid(None)) def test_is_valid(self): f = BooleanField() self.assertTrue(f.is_valid(False)) self.assertTrue(f.is_valid(True)) def test_wrong_type(self): f = BooleanField() self.assertFalse(f.is_valid('foo'))<|fim▁end|>
f = StringField(required=True)
<|file_name|>libcore_io_Posix.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2011 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #define LOG_TAG "Posix" #include "AsynchronousCloseMonitor.h" #include "cutils/log.h" #include "ExecStrings.h" #include "JNIHelp.h" #include "JniConstants.h" #include "JniException.h" #include "NetworkUtilities.h" #include "Portability.h" #include "readlink.h" #include "../../bionic/libc/dns/include/resolv_netid.h" // For android_getaddrinfofornet. #include "ScopedBytes.h" #include "ScopedLocalRef.h" #include "ScopedPrimitiveArray.h" #include "ScopedUtfChars.h" #include "toStringArray.h" #include "UniquePtr.h" #include <arpa/inet.h> #include <errno.h> #include <fcntl.h> #include <net/if.h> #include <netdb.h> #include <netinet/in.h> #include <poll.h> #include <pwd.h> #include <signal.h> #include <stdlib.h> #include <sys/ioctl.h> #include <sys/mman.h> #ifndef __APPLE__ #include <sys/prctl.h> #endif #include <sys/socket.h> #include <sys/stat.h> #ifdef __APPLE__ #include <sys/statvfs.h> #endif #include <sys/syscall.h> #include <sys/time.h> #include <sys/types.h> #include <sys/uio.h> #include <sys/utsname.h> #include <sys/wait.h> #include <termios.h> #include <unistd.h> #ifndef __unused #define __unused __attribute__((__unused__)) #endif #define TO_JAVA_STRING(NAME, EXP) \ jstring NAME = env->NewStringUTF(EXP); \ if (NAME == NULL) return NULL; struct addrinfo_deleter { void operator()(addrinfo* p) const { if (p != NULL) { // bionic's freeaddrinfo(3) crashes when passed NULL. freeaddrinfo(p); } } }; /** * Used to retry networking system calls that can be interrupted with a signal. Unlike * TEMP_FAILURE_RETRY, this also handles the case where * AsynchronousCloseMonitor::signalBlockedThreads(fd) is used to signal a close() or * Thread.interrupt(). Other signals that result in an EINTR result are ignored and the system call * is retried. * * Returns the result of the system call though a Java exception will be pending if the result is * -1: a SocketException if signaled via AsynchronousCloseMonitor, or ErrnoException for other * failures. */ #define NET_FAILURE_RETRY(jni_env, return_type, syscall_name, java_fd, ...) ({ \ return_type _rc = -1; \ do { \ bool _wasSignaled; \ int _syscallErrno; \ { \ int _fd = jniGetFDFromFileDescriptor(jni_env, java_fd); \ AsynchronousCloseMonitor _monitor(_fd); \ _rc = syscall_name(_fd, __VA_ARGS__); \ _syscallErrno = errno; \ _wasSignaled = _monitor.wasSignaled(); \ } \ if (_wasSignaled) { \ jniThrowException(jni_env, "java/net/SocketException", "Socket closed"); \ _rc = -1; \ break; \ } \ if (_rc == -1 && _syscallErrno != EINTR) { \ /* TODO: with a format string we could show the arguments too, like strace(1). */ \ throwErrnoException(jni_env, # syscall_name); \ break; \ } \ } while (_rc == -1); /* _syscallErrno == EINTR && !_wasSignaled */ \ _rc; }) /** * Used to retry system calls that can be interrupted with a signal. Unlike TEMP_FAILURE_RETRY, this * also handles the case where AsynchronousCloseMonitor::signalBlockedThreads(fd) is used to signal * a close() or Thread.interrupt(). Other signals that result in an EINTR result are ignored and the * system call is retried. * * Returns the result of the system call though a Java exception will be pending if the result is * -1: an IOException if the file descriptor is already closed, a InterruptedIOException if signaled * via AsynchronousCloseMonitor, or ErrnoException for other failures. */ #define IO_FAILURE_RETRY(jni_env, return_type, syscall_name, java_fd, ...) ({ \ return_type _rc = -1; \ do { \ bool _wasSignaled; \ int _syscallErrno; \ { \ int _fd = jniGetFDFromFileDescriptor(jni_env, java_fd); \ AsynchronousCloseMonitor _monitor(_fd); \ _rc = syscall_name(_fd, __VA_ARGS__); \ _syscallErrno = errno; \ _wasSignaled = _monitor.wasSignaled(); \ } \ if (_wasSignaled) { \ jniThrowException(jni_env, "java/io/InterruptedIOException", # syscall_name " interrupted"); \ _rc = -1; \ break; \ } \ if (_rc == -1 && _syscallErrno != EINTR) { \ /* TODO: with a format string we could show the arguments too, like strace(1). */ \ throwErrnoException(jni_env, # syscall_name); \ break; \ } \ } while (_rc == -1); /* && _syscallErrno == EINTR && !_wasSignaled */ \ _rc; }) static void throwException(JNIEnv* env, jclass exceptionClass, jmethodID ctor3, jmethodID ctor2, const char* functionName, int error) { jthrowable cause = NULL; if (env->ExceptionCheck()) { cause = env->ExceptionOccurred(); env->ExceptionClear(); } ScopedLocalRef<jstring> detailMessage(env, env->NewStringUTF(functionName)); if (detailMessage.get() == NULL) { // Not really much we can do here. We're probably dead in the water, // but let's try to stumble on... env->ExceptionClear(); } jobject exception; if (cause != NULL) { exception = env->NewObject(exceptionClass, ctor3, detailMessage.get(), error, cause); } else { exception = env->NewObject(exceptionClass, ctor2, detailMessage.get(), error); } env->Throw(reinterpret_cast<jthrowable>(exception)); } static void throwErrnoException(JNIEnv* env, const char* functionName) { int error = errno; static jmethodID ctor3 = env->GetMethodID(JniConstants::errnoExceptionClass, "<init>", "(Ljava/lang/String;ILjava/lang/Throwable;)V"); static jmethodID ctor2 = env->GetMethodID(JniConstants::errnoExceptionClass, "<init>", "(Ljava/lang/String;I)V"); throwException(env, JniConstants::errnoExceptionClass, ctor3, ctor2, functionName, error); } static void throwGaiException(JNIEnv* env, const char* functionName, int error) { // Cache the methods ids before we throw, so we don't call GetMethodID with a pending exception. static jmethodID ctor3 = env->GetMethodID(JniConstants::gaiExceptionClass, "<init>", "(Ljava/lang/String;ILjava/lang/Throwable;)V"); static jmethodID ctor2 = env->GetMethodID(JniConstants::gaiExceptionClass, "<init>", "(Ljava/lang/String;I)V"); if (errno != 0) { // EAI_SYSTEM should mean "look at errno instead", but both glibc and bionic seem to // mess this up. In particular, if you don't have INTERNET permission, errno will be EACCES // but you'll get EAI_NONAME or EAI_NODATA. So we want our GaiException to have a // potentially-relevant ErrnoException as its cause even if error != EAI_SYSTEM. // http://code.google.com/p/android/issues/detail?id=15722 throwErrnoException(env, functionName); // Deliberately fall through to throw another exception... } throwException(env, JniConstants::gaiExceptionClass, ctor3, ctor2, functionName, error); } template <typename rc_t> static rc_t throwIfMinusOne(JNIEnv* env, const char* name, rc_t rc) { if (rc == rc_t(-1)) { throwErrnoException(env, name); } return rc; } template <typename ScopedT> class IoVec { public: IoVec(JNIEnv* env, size_t bufferCount) : mEnv(env), mBufferCount(bufferCount) { } bool init(jobjectArray javaBuffers, jintArray javaOffsets, jintArray javaByteCounts) { // We can't delete our local references until after the I/O, so make sure we have room. if (mEnv->PushLocalFrame(mBufferCount + 16) < 0) { return false; } ScopedIntArrayRO offsets(mEnv, javaOffsets); if (offsets.get() == NULL) { return false; } ScopedIntArrayRO byteCounts(mEnv, javaByteCounts); if (byteCounts.get() == NULL) { return false; } // TODO: Linux actually has a 1024 buffer limit. glibc works around this, and we should too. // TODO: you can query the limit at runtime with sysconf(_SC_IOV_MAX). for (size_t i = 0; i < mBufferCount; ++i) { jobject buffer = mEnv->GetObjectArrayElement(javaBuffers, i); // We keep this local ref. mScopedBuffers.push_back(new ScopedT(mEnv, buffer)); jbyte* ptr = const_cast<jbyte*>(mScopedBuffers.back()->get()); if (ptr == NULL) { return false; } struct iovec iov; iov.iov_base = reinterpret_cast<void*>(ptr + offsets[i]); iov.iov_len = byteCounts[i]; mIoVec.push_back(iov); } return true; } ~IoVec() { for (size_t i = 0; i < mScopedBuffers.size(); ++i) { delete mScopedBuffers[i]; } mEnv->PopLocalFrame(NULL); } iovec* get() { return &mIoVec[0]; } size_t size() { return mBufferCount; } private: JNIEnv* mEnv; size_t mBufferCount; std::vector<iovec> mIoVec; std::vector<ScopedT*> mScopedBuffers; }; static jobject makeSocketAddress(JNIEnv* env, const sockaddr_storage& ss) { jint port; jobject inetAddress = sockaddrToInetAddress(env, ss, &port); if (inetAddress == NULL) { return NULL; } static jmethodID ctor = env->GetMethodID(JniConstants::inetSocketAddressClass, "<init>", "(Ljava/net/InetAddress;I)V"); return env->NewObject(JniConstants::inetSocketAddressClass, ctor, inetAddress, port); } static jobject makeStructPasswd(JNIEnv* env, const struct passwd& pw) { TO_JAVA_STRING(pw_name, pw.pw_name); TO_JAVA_STRING(pw_dir, pw.pw_dir); TO_JAVA_STRING(pw_shell, pw.pw_shell); static jmethodID ctor = env->GetMethodID(JniConstants::structPasswdClass, "<init>", "(Ljava/lang/String;IILjava/lang/String;Ljava/lang/String;)V"); return env->NewObject(JniConstants::structPasswdClass, ctor, pw_name, static_cast<jint>(pw.pw_uid), static_cast<jint>(pw.pw_gid), pw_dir, pw_shell); } static jobject makeStructStat(JNIEnv* env, const struct stat& sb) { static jmethodID ctor = env->GetMethodID(JniConstants::structStatClass, "<init>", "(JJIJIIJJJJJJJ)V"); return env->NewObject(JniConstants::structStatClass, ctor, static_cast<jlong>(sb.st_dev), static_cast<jlong>(sb.st_ino), static_cast<jint>(sb.st_mode), static_cast<jlong>(sb.st_nlink), static_cast<jint>(sb.st_uid), static_cast<jint>(sb.st_gid), static_cast<jlong>(sb.st_rdev), static_cast<jlong>(sb.st_size), static_cast<jlong>(sb.st_atime), static_cast<jlong>(sb.st_mtime), static_cast<jlong>(sb.st_ctime), static_cast<jlong>(sb.st_blksize), static_cast<jlong>(sb.st_blocks)); } static jobject makeStructStatVfs(JNIEnv* env, const struct statvfs& sb) { #if defined(__APPLE__) // Mac OS has no f_namelen field in struct statfs. jlong max_name_length = 255; // __DARWIN_MAXNAMLEN #else jlong max_name_length = static_cast<jlong>(sb.f_namemax); #endif static jmethodID ctor = env->GetMethodID(JniConstants::structStatVfsClass, "<init>", "(JJJJJJJJJJJ)V"); return env->NewObject(JniConstants::structStatVfsClass, ctor, static_cast<jlong>(sb.f_bsize), static_cast<jlong>(sb.f_frsize), static_cast<jlong>(sb.f_blocks), static_cast<jlong>(sb.f_bfree), static_cast<jlong>(sb.f_bavail), static_cast<jlong>(sb.f_files), static_cast<jlong>(sb.f_ffree), static_cast<jlong>(sb.f_favail), static_cast<jlong>(sb.f_fsid), static_cast<jlong>(sb.f_flag), max_name_length); } static jobject makeStructLinger(JNIEnv* env, const struct linger& l) { static jmethodID ctor = env->GetMethodID(JniConstants::structLingerClass, "<init>", "(II)V"); return env->NewObject(JniConstants::structLingerClass, ctor, l.l_onoff, l.l_linger); } static jobject makeStructTimeval(JNIEnv* env, const struct timeval& tv) { static jmethodID ctor = env->GetMethodID(JniConstants::structTimevalClass, "<init>", "(JJ)V"); return env->NewObject(JniConstants::structTimevalClass, ctor, static_cast<jlong>(tv.tv_sec), static_cast<jlong>(tv.tv_usec)); } static jobject makeStructUcred(JNIEnv* env, const struct ucred& u __unused) { #ifdef __APPLE__ jniThrowException(env, "java/lang/UnsupportedOperationException", "unimplemented support for ucred on a Mac"); return NULL; #else static jmethodID ctor = env->GetMethodID(JniConstants::structUcredClass, "<init>", "(III)V"); return env->NewObject(JniConstants::structUcredClass, ctor, u.pid, u.uid, u.gid); #endif } static jobject makeStructUtsname(JNIEnv* env, const struct utsname& buf) { TO_JAVA_STRING(sysname, buf.sysname); TO_JAVA_STRING(nodename, buf.nodename); TO_JAVA_STRING(release, buf.release); TO_JAVA_STRING(version, buf.version); TO_JAVA_STRING(machine, buf.machine); static jmethodID ctor = env->GetMethodID(JniConstants::structUtsnameClass, "<init>", "(Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;)V"); return env->NewObject(JniConstants::structUtsnameClass, ctor, sysname, nodename, release, version, machine); }; static bool fillIfreq(JNIEnv* env, jstring javaInterfaceName, struct ifreq& req) { ScopedUtfChars interfaceName(env, javaInterfaceName); if (interfaceName.c_str() == NULL) { return false; } memset(&req, 0, sizeof(req)); strncpy(req.ifr_name, interfaceName.c_str(), sizeof(req.ifr_name)); req.ifr_name[sizeof(req.ifr_name) - 1] = '\0'; return true; } static bool fillInetSocketAddress(JNIEnv* env, jint rc, jobject javaInetSocketAddress, const sockaddr_storage& ss) { if (rc == -1 || javaInetSocketAddress == NULL) { return true; } // Fill out the passed-in InetSocketAddress with the sender's IP address and port number. jint port; jobject sender = sockaddrToInetAddress(env, ss, &port); if (sender == NULL) { return false;<|fim▁hole|> static jfieldID portFid = env->GetFieldID(JniConstants::inetSocketAddressClass, "port", "I"); env->SetObjectField(javaInetSocketAddress, addressFid, sender); env->SetIntField(javaInetSocketAddress, portFid, port); return true; } static jobject doStat(JNIEnv* env, jstring javaPath, bool isLstat) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return NULL; } struct stat sb; int rc = isLstat ? TEMP_FAILURE_RETRY(lstat(path.c_str(), &sb)) : TEMP_FAILURE_RETRY(stat(path.c_str(), &sb)); if (rc == -1) { throwErrnoException(env, isLstat ? "lstat" : "stat"); return NULL; } return makeStructStat(env, sb); } static jobject doGetSockName(JNIEnv* env, jobject javaFd, bool is_sockname) { int fd = jniGetFDFromFileDescriptor(env, javaFd); sockaddr_storage ss; sockaddr* sa = reinterpret_cast<sockaddr*>(&ss); socklen_t byteCount = sizeof(ss); memset(&ss, 0, byteCount); int rc = is_sockname ? TEMP_FAILURE_RETRY(getsockname(fd, sa, &byteCount)) : TEMP_FAILURE_RETRY(getpeername(fd, sa, &byteCount)); if (rc == -1) { throwErrnoException(env, is_sockname ? "getsockname" : "getpeername"); return NULL; } return makeSocketAddress(env, ss); } class Passwd { public: Passwd(JNIEnv* env) : mEnv(env), mResult(NULL) { mBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX); mBuffer.reset(new char[mBufferSize]); } jobject getpwnam(const char* name) { return process("getpwnam_r", getpwnam_r(name, &mPwd, mBuffer.get(), mBufferSize, &mResult)); } jobject getpwuid(uid_t uid) { return process("getpwuid_r", getpwuid_r(uid, &mPwd, mBuffer.get(), mBufferSize, &mResult)); } struct passwd* get() { return mResult; } private: jobject process(const char* syscall, int error) { if (mResult == NULL) { errno = error; throwErrnoException(mEnv, syscall); return NULL; } return makeStructPasswd(mEnv, *mResult); } JNIEnv* mEnv; UniquePtr<char[]> mBuffer; size_t mBufferSize; struct passwd mPwd; struct passwd* mResult; }; static jobject Posix_accept(JNIEnv* env, jobject, jobject javaFd, jobject javaInetSocketAddress) { sockaddr_storage ss; socklen_t sl = sizeof(ss); memset(&ss, 0, sizeof(ss)); sockaddr* peer = (javaInetSocketAddress != NULL) ? reinterpret_cast<sockaddr*>(&ss) : NULL; socklen_t* peerLength = (javaInetSocketAddress != NULL) ? &sl : 0; jint clientFd = NET_FAILURE_RETRY(env, int, accept, javaFd, peer, peerLength); if (clientFd == -1 || !fillInetSocketAddress(env, clientFd, javaInetSocketAddress, ss)) { close(clientFd); return NULL; } return (clientFd != -1) ? jniCreateFileDescriptor(env, clientFd) : NULL; } static jboolean Posix_access(JNIEnv* env, jobject, jstring javaPath, jint mode) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return JNI_FALSE; } int rc = TEMP_FAILURE_RETRY(access(path.c_str(), mode)); if (rc == -1) { throwErrnoException(env, "access"); } return (rc == 0); } static void Posix_bind(JNIEnv* env, jobject, jobject javaFd, jobject javaAddress, jint port) { sockaddr_storage ss; socklen_t sa_len; if (!inetAddressToSockaddr(env, javaAddress, port, ss, sa_len)) { return; } const sockaddr* sa = reinterpret_cast<const sockaddr*>(&ss); // We don't need the return value because we'll already have thrown. (void) NET_FAILURE_RETRY(env, int, bind, javaFd, sa, sa_len); } static void Posix_chmod(JNIEnv* env, jobject, jstring javaPath, jint mode) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return; } throwIfMinusOne(env, "chmod", TEMP_FAILURE_RETRY(chmod(path.c_str(), mode))); } static void Posix_chown(JNIEnv* env, jobject, jstring javaPath, jint uid, jint gid) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return; } throwIfMinusOne(env, "chown", TEMP_FAILURE_RETRY(chown(path.c_str(), uid, gid))); } static void Posix_close(JNIEnv* env, jobject, jobject javaFd) { // Get the FileDescriptor's 'fd' field and clear it. // We need to do this before we can throw an IOException (http://b/3222087). int fd = jniGetFDFromFileDescriptor(env, javaFd); jniSetFileDescriptorOfFD(env, javaFd, -1); // Even if close(2) fails with EINTR, the fd will have been closed. // Using TEMP_FAILURE_RETRY will either lead to EBADF or closing someone else's fd. // http://lkml.indiana.edu/hypermail/linux/kernel/0509.1/0877.html throwIfMinusOne(env, "close", close(fd)); } static void Posix_connect(JNIEnv* env, jobject, jobject javaFd, jobject javaAddress, jint port) { sockaddr_storage ss; socklen_t sa_len; if (!inetAddressToSockaddr(env, javaAddress, port, ss, sa_len)) { return; } const sockaddr* sa = reinterpret_cast<const sockaddr*>(&ss); // We don't need the return value because we'll already have thrown. (void) NET_FAILURE_RETRY(env, int, connect, javaFd, sa, sa_len); } static jobject Posix_dup(JNIEnv* env, jobject, jobject javaOldFd) { int oldFd = jniGetFDFromFileDescriptor(env, javaOldFd); int newFd = throwIfMinusOne(env, "dup", TEMP_FAILURE_RETRY(dup(oldFd))); return (newFd != -1) ? jniCreateFileDescriptor(env, newFd) : NULL; } static jobject Posix_dup2(JNIEnv* env, jobject, jobject javaOldFd, jint newFd) { int oldFd = jniGetFDFromFileDescriptor(env, javaOldFd); int fd = throwIfMinusOne(env, "dup2", TEMP_FAILURE_RETRY(dup2(oldFd, newFd))); return (fd != -1) ? jniCreateFileDescriptor(env, fd) : NULL; } static jobjectArray Posix_environ(JNIEnv* env, jobject) { extern char** environ; // Standard, but not in any header file. return toStringArray(env, environ); } static void Posix_execve(JNIEnv* env, jobject, jstring javaFilename, jobjectArray javaArgv, jobjectArray javaEnvp) { ScopedUtfChars path(env, javaFilename); if (path.c_str() == NULL) { return; } ExecStrings argv(env, javaArgv); ExecStrings envp(env, javaEnvp); execve(path.c_str(), argv.get(), envp.get()); throwErrnoException(env, "execve"); } static void Posix_execv(JNIEnv* env, jobject, jstring javaFilename, jobjectArray javaArgv) { ScopedUtfChars path(env, javaFilename); if (path.c_str() == NULL) { return; } ExecStrings argv(env, javaArgv); execv(path.c_str(), argv.get()); throwErrnoException(env, "execv"); } static void Posix_fchmod(JNIEnv* env, jobject, jobject javaFd, jint mode) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "fchmod", TEMP_FAILURE_RETRY(fchmod(fd, mode))); } static void Posix_fchown(JNIEnv* env, jobject, jobject javaFd, jint uid, jint gid) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "fchown", TEMP_FAILURE_RETRY(fchown(fd, uid, gid))); } static jint Posix_fcntlVoid(JNIEnv* env, jobject, jobject javaFd, jint cmd) { int fd = jniGetFDFromFileDescriptor(env, javaFd); return throwIfMinusOne(env, "fcntl", TEMP_FAILURE_RETRY(fcntl(fd, cmd))); } static jint Posix_fcntlLong(JNIEnv* env, jobject, jobject javaFd, jint cmd, jlong arg) { int fd = jniGetFDFromFileDescriptor(env, javaFd); return throwIfMinusOne(env, "fcntl", TEMP_FAILURE_RETRY(fcntl(fd, cmd, arg))); } static jint Posix_fcntlFlock(JNIEnv* env, jobject, jobject javaFd, jint cmd, jobject javaFlock) { static jfieldID typeFid = env->GetFieldID(JniConstants::structFlockClass, "l_type", "S"); static jfieldID whenceFid = env->GetFieldID(JniConstants::structFlockClass, "l_whence", "S"); static jfieldID startFid = env->GetFieldID(JniConstants::structFlockClass, "l_start", "J"); static jfieldID lenFid = env->GetFieldID(JniConstants::structFlockClass, "l_len", "J"); static jfieldID pidFid = env->GetFieldID(JniConstants::structFlockClass, "l_pid", "I"); struct flock64 lock; memset(&lock, 0, sizeof(lock)); lock.l_type = env->GetShortField(javaFlock, typeFid); lock.l_whence = env->GetShortField(javaFlock, whenceFid); lock.l_start = env->GetLongField(javaFlock, startFid); lock.l_len = env->GetLongField(javaFlock, lenFid); lock.l_pid = env->GetIntField(javaFlock, pidFid); int rc = IO_FAILURE_RETRY(env, int, fcntl, javaFd, cmd, &lock); if (rc != -1) { env->SetShortField(javaFlock, typeFid, lock.l_type); env->SetShortField(javaFlock, whenceFid, lock.l_whence); env->SetLongField(javaFlock, startFid, lock.l_start); env->SetLongField(javaFlock, lenFid, lock.l_len); env->SetIntField(javaFlock, pidFid, lock.l_pid); } return rc; } static void Posix_fdatasync(JNIEnv* env, jobject, jobject javaFd) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "fdatasync", TEMP_FAILURE_RETRY(fdatasync(fd))); } static jobject Posix_fstat(JNIEnv* env, jobject, jobject javaFd) { int fd = jniGetFDFromFileDescriptor(env, javaFd); struct stat sb; int rc = TEMP_FAILURE_RETRY(fstat(fd, &sb)); if (rc == -1) { throwErrnoException(env, "fstat"); return NULL; } return makeStructStat(env, sb); } static jobject Posix_fstatvfs(JNIEnv* env, jobject, jobject javaFd) { int fd = jniGetFDFromFileDescriptor(env, javaFd); struct statvfs sb; int rc = TEMP_FAILURE_RETRY(fstatvfs(fd, &sb)); if (rc == -1) { throwErrnoException(env, "fstatvfs"); return NULL; } return makeStructStatVfs(env, sb); } static void Posix_fsync(JNIEnv* env, jobject, jobject javaFd) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "fsync", TEMP_FAILURE_RETRY(fsync(fd))); } static void Posix_ftruncate(JNIEnv* env, jobject, jobject javaFd, jlong length) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "ftruncate", TEMP_FAILURE_RETRY(ftruncate64(fd, length))); } static jstring Posix_gai_strerror(JNIEnv* env, jobject, jint error) { return env->NewStringUTF(gai_strerror(error)); } static jobjectArray Posix_android_getaddrinfo(JNIEnv* env, jobject, jstring javaNode, jobject javaHints, jint netId) { ScopedUtfChars node(env, javaNode); if (node.c_str() == NULL) { return NULL; } static jfieldID flagsFid = env->GetFieldID(JniConstants::structAddrinfoClass, "ai_flags", "I"); static jfieldID familyFid = env->GetFieldID(JniConstants::structAddrinfoClass, "ai_family", "I"); static jfieldID socktypeFid = env->GetFieldID(JniConstants::structAddrinfoClass, "ai_socktype", "I"); static jfieldID protocolFid = env->GetFieldID(JniConstants::structAddrinfoClass, "ai_protocol", "I"); addrinfo hints; memset(&hints, 0, sizeof(hints)); hints.ai_flags = env->GetIntField(javaHints, flagsFid); hints.ai_family = env->GetIntField(javaHints, familyFid); hints.ai_socktype = env->GetIntField(javaHints, socktypeFid); hints.ai_protocol = env->GetIntField(javaHints, protocolFid); addrinfo* addressList = NULL; errno = 0; int rc = android_getaddrinfofornet(node.c_str(), NULL, &hints, netId, 0, &addressList); UniquePtr<addrinfo, addrinfo_deleter> addressListDeleter(addressList); if (rc != 0) { throwGaiException(env, "android_getaddrinfo", rc); return NULL; } // Count results so we know how to size the output array. int addressCount = 0; for (addrinfo* ai = addressList; ai != NULL; ai = ai->ai_next) { if (ai->ai_family == AF_INET || ai->ai_family == AF_INET6) { ++addressCount; } else { ALOGE("android_getaddrinfo unexpected ai_family %i", ai->ai_family); } } if (addressCount == 0) { return NULL; } // Prepare output array. jobjectArray result = env->NewObjectArray(addressCount, JniConstants::inetAddressClass, NULL); if (result == NULL) { return NULL; } // Examine returned addresses one by one, save them in the output array. int index = 0; for (addrinfo* ai = addressList; ai != NULL; ai = ai->ai_next) { if (ai->ai_family != AF_INET && ai->ai_family != AF_INET6) { // Unknown address family. Skip this address. ALOGE("android_getaddrinfo unexpected ai_family %i", ai->ai_family); continue; } // Convert each IP address into a Java byte array. sockaddr_storage& address = *reinterpret_cast<sockaddr_storage*>(ai->ai_addr); ScopedLocalRef<jobject> inetAddress(env, sockaddrToInetAddress(env, address, NULL)); if (inetAddress.get() == NULL) { return NULL; } env->SetObjectArrayElement(result, index, inetAddress.get()); ++index; } return result; } static jint Posix_getegid(JNIEnv*, jobject) { return getegid(); } static jint Posix_geteuid(JNIEnv*, jobject) { return geteuid(); } static jint Posix_getgid(JNIEnv*, jobject) { return getgid(); } static jstring Posix_getenv(JNIEnv* env, jobject, jstring javaName) { ScopedUtfChars name(env, javaName); if (name.c_str() == NULL) { return NULL; } return env->NewStringUTF(getenv(name.c_str())); } static jstring Posix_getnameinfo(JNIEnv* env, jobject, jobject javaAddress, jint flags) { sockaddr_storage ss; socklen_t sa_len; if (!inetAddressToSockaddrVerbatim(env, javaAddress, 0, ss, sa_len)) { return NULL; } char buf[NI_MAXHOST]; // NI_MAXHOST is longer than INET6_ADDRSTRLEN. errno = 0; int rc = getnameinfo(reinterpret_cast<sockaddr*>(&ss), sa_len, buf, sizeof(buf), NULL, 0, flags); if (rc != 0) { throwGaiException(env, "getnameinfo", rc); return NULL; } return env->NewStringUTF(buf); } static jobject Posix_getpeername(JNIEnv* env, jobject, jobject javaFd) { return doGetSockName(env, javaFd, false); } static jint Posix_getpid(JNIEnv*, jobject) { return getpid(); } static jint Posix_getppid(JNIEnv*, jobject) { return getppid(); } static jobject Posix_getpwnam(JNIEnv* env, jobject, jstring javaName) { ScopedUtfChars name(env, javaName); if (name.c_str() == NULL) { return NULL; } return Passwd(env).getpwnam(name.c_str()); } static jobject Posix_getpwuid(JNIEnv* env, jobject, jint uid) { return Passwd(env).getpwuid(uid); } static jobject Posix_getsockname(JNIEnv* env, jobject, jobject javaFd) { return doGetSockName(env, javaFd, true); } static jint Posix_getsockoptByte(JNIEnv* env, jobject, jobject javaFd, jint level, jint option) { int fd = jniGetFDFromFileDescriptor(env, javaFd); u_char result = 0; socklen_t size = sizeof(result); throwIfMinusOne(env, "getsockopt", TEMP_FAILURE_RETRY(getsockopt(fd, level, option, &result, &size))); return result; } static jobject Posix_getsockoptInAddr(JNIEnv* env, jobject, jobject javaFd, jint level, jint option) { int fd = jniGetFDFromFileDescriptor(env, javaFd); sockaddr_storage ss; memset(&ss, 0, sizeof(ss)); ss.ss_family = AF_INET; // This is only for the IPv4-only IP_MULTICAST_IF. sockaddr_in* sa = reinterpret_cast<sockaddr_in*>(&ss); socklen_t size = sizeof(sa->sin_addr); int rc = TEMP_FAILURE_RETRY(getsockopt(fd, level, option, &sa->sin_addr, &size)); if (rc == -1) { throwErrnoException(env, "getsockopt"); return NULL; } return sockaddrToInetAddress(env, ss, NULL); } static jint Posix_getsockoptInt(JNIEnv* env, jobject, jobject javaFd, jint level, jint option) { int fd = jniGetFDFromFileDescriptor(env, javaFd); jint result = 0; socklen_t size = sizeof(result); throwIfMinusOne(env, "getsockopt", TEMP_FAILURE_RETRY(getsockopt(fd, level, option, &result, &size))); return result; } static jobject Posix_getsockoptLinger(JNIEnv* env, jobject, jobject javaFd, jint level, jint option) { int fd = jniGetFDFromFileDescriptor(env, javaFd); struct linger l; socklen_t size = sizeof(l); memset(&l, 0, size); int rc = TEMP_FAILURE_RETRY(getsockopt(fd, level, option, &l, &size)); if (rc == -1) { throwErrnoException(env, "getsockopt"); return NULL; } return makeStructLinger(env, l); } static jobject Posix_getsockoptTimeval(JNIEnv* env, jobject, jobject javaFd, jint level, jint option) { int fd = jniGetFDFromFileDescriptor(env, javaFd); struct timeval tv; socklen_t size = sizeof(tv); memset(&tv, 0, size); int rc = TEMP_FAILURE_RETRY(getsockopt(fd, level, option, &tv, &size)); if (rc == -1) { throwErrnoException(env, "getsockopt"); return NULL; } return makeStructTimeval(env, tv); } static jobject Posix_getsockoptUcred(JNIEnv* env, jobject, jobject javaFd, jint level, jint option) { int fd = jniGetFDFromFileDescriptor(env, javaFd); struct ucred u; socklen_t size = sizeof(u); memset(&u, 0, size); int rc = TEMP_FAILURE_RETRY(getsockopt(fd, level, option, &u, &size)); if (rc == -1) { throwErrnoException(env, "getsockopt"); return NULL; } return makeStructUcred(env, u); } static jint Posix_gettid(JNIEnv* env __unused, jobject) { #if defined(__APPLE__) uint64_t owner; int rc = pthread_threadid_np(NULL, &owner); // Requires Mac OS 10.6 if (rc != 0) { throwErrnoException(env, "gettid"); return 0; } return static_cast<jint>(owner); #else // Neither bionic nor glibc exposes gettid(2). return syscall(__NR_gettid); #endif } static jint Posix_getuid(JNIEnv*, jobject) { return getuid(); } static jstring Posix_if_indextoname(JNIEnv* env, jobject, jint index) { char buf[IF_NAMESIZE]; char* name = if_indextoname(index, buf); // if_indextoname(3) returns NULL on failure, which will come out of NewStringUTF unscathed. // There's no useful information in errno, so we don't bother throwing. Callers can null-check. return env->NewStringUTF(name); } static jobject Posix_inet_pton(JNIEnv* env, jobject, jint family, jstring javaName) { ScopedUtfChars name(env, javaName); if (name.c_str() == NULL) { return NULL; } sockaddr_storage ss; memset(&ss, 0, sizeof(ss)); // sockaddr_in and sockaddr_in6 are at the same address, so we can use either here. void* dst = &reinterpret_cast<sockaddr_in*>(&ss)->sin_addr; if (inet_pton(family, name.c_str(), dst) != 1) { return NULL; } ss.ss_family = family; return sockaddrToInetAddress(env, ss, NULL); } static jobject Posix_ioctlInetAddress(JNIEnv* env, jobject, jobject javaFd, jint cmd, jstring javaInterfaceName) { struct ifreq req; if (!fillIfreq(env, javaInterfaceName, req)) { return NULL; } int fd = jniGetFDFromFileDescriptor(env, javaFd); int rc = throwIfMinusOne(env, "ioctl", TEMP_FAILURE_RETRY(ioctl(fd, cmd, &req))); if (rc == -1) { return NULL; } return sockaddrToInetAddress(env, reinterpret_cast<sockaddr_storage&>(req.ifr_addr), NULL); } static jint Posix_ioctlInt(JNIEnv* env, jobject, jobject javaFd, jint cmd, jobject javaArg) { // This is complicated because ioctls may return their result by updating their argument // or via their return value, so we need to support both. int fd = jniGetFDFromFileDescriptor(env, javaFd); static jfieldID valueFid = env->GetFieldID(JniConstants::mutableIntClass, "value", "I"); jint arg = env->GetIntField(javaArg, valueFid); int rc = throwIfMinusOne(env, "ioctl", TEMP_FAILURE_RETRY(ioctl(fd, cmd, &arg))); if (!env->ExceptionCheck()) { env->SetIntField(javaArg, valueFid, arg); } return rc; } static jboolean Posix_isatty(JNIEnv* env, jobject, jobject javaFd) { int fd = jniGetFDFromFileDescriptor(env, javaFd); return TEMP_FAILURE_RETRY(isatty(fd)) == 1; } static void Posix_kill(JNIEnv* env, jobject, jint pid, jint sig) { throwIfMinusOne(env, "kill", TEMP_FAILURE_RETRY(kill(pid, sig))); } static void Posix_lchown(JNIEnv* env, jobject, jstring javaPath, jint uid, jint gid) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return; } throwIfMinusOne(env, "lchown", TEMP_FAILURE_RETRY(lchown(path.c_str(), uid, gid))); } static void Posix_link(JNIEnv* env, jobject, jstring javaOldPath, jstring javaNewPath) { ScopedUtfChars oldPath(env, javaOldPath); if (oldPath.c_str() == NULL) { return; } ScopedUtfChars newPath(env, javaNewPath); if (newPath.c_str() == NULL) { return; } throwIfMinusOne(env, "link", TEMP_FAILURE_RETRY(link(oldPath.c_str(), newPath.c_str()))); } static void Posix_listen(JNIEnv* env, jobject, jobject javaFd, jint backlog) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "listen", TEMP_FAILURE_RETRY(listen(fd, backlog))); } static jlong Posix_lseek(JNIEnv* env, jobject, jobject javaFd, jlong offset, jint whence) { int fd = jniGetFDFromFileDescriptor(env, javaFd); return throwIfMinusOne(env, "lseek", TEMP_FAILURE_RETRY(lseek64(fd, offset, whence))); } static jobject Posix_lstat(JNIEnv* env, jobject, jstring javaPath) { return doStat(env, javaPath, true); } static void Posix_mincore(JNIEnv* env, jobject, jlong address, jlong byteCount, jbyteArray javaVector) { ScopedByteArrayRW vector(env, javaVector); if (vector.get() == NULL) { return; } void* ptr = reinterpret_cast<void*>(static_cast<uintptr_t>(address)); unsigned char* vec = reinterpret_cast<unsigned char*>(vector.get()); throwIfMinusOne(env, "mincore", TEMP_FAILURE_RETRY(mincore(ptr, byteCount, vec))); } static void Posix_mkdir(JNIEnv* env, jobject, jstring javaPath, jint mode) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return; } throwIfMinusOne(env, "mkdir", TEMP_FAILURE_RETRY(mkdir(path.c_str(), mode))); } static void Posix_mkfifo(JNIEnv* env, jobject, jstring javaPath, jint mode) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return; } throwIfMinusOne(env, "mkfifo", TEMP_FAILURE_RETRY(mkfifo(path.c_str(), mode))); } static void Posix_mlock(JNIEnv* env, jobject, jlong address, jlong byteCount) { void* ptr = reinterpret_cast<void*>(static_cast<uintptr_t>(address)); throwIfMinusOne(env, "mlock", TEMP_FAILURE_RETRY(mlock(ptr, byteCount))); } static jlong Posix_mmap(JNIEnv* env, jobject, jlong address, jlong byteCount, jint prot, jint flags, jobject javaFd, jlong offset) { int fd = jniGetFDFromFileDescriptor(env, javaFd); void* suggestedPtr = reinterpret_cast<void*>(static_cast<uintptr_t>(address)); void* ptr = mmap(suggestedPtr, byteCount, prot, flags, fd, offset); if (ptr == MAP_FAILED) { throwErrnoException(env, "mmap"); } return static_cast<jlong>(reinterpret_cast<uintptr_t>(ptr)); } static void Posix_msync(JNIEnv* env, jobject, jlong address, jlong byteCount, jint flags) { void* ptr = reinterpret_cast<void*>(static_cast<uintptr_t>(address)); throwIfMinusOne(env, "msync", TEMP_FAILURE_RETRY(msync(ptr, byteCount, flags))); } static void Posix_munlock(JNIEnv* env, jobject, jlong address, jlong byteCount) { void* ptr = reinterpret_cast<void*>(static_cast<uintptr_t>(address)); throwIfMinusOne(env, "munlock", TEMP_FAILURE_RETRY(munlock(ptr, byteCount))); } static void Posix_munmap(JNIEnv* env, jobject, jlong address, jlong byteCount) { void* ptr = reinterpret_cast<void*>(static_cast<uintptr_t>(address)); throwIfMinusOne(env, "munmap", TEMP_FAILURE_RETRY(munmap(ptr, byteCount))); } static jobject Posix_open(JNIEnv* env, jobject, jstring javaPath, jint flags, jint mode) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return NULL; } int fd = throwIfMinusOne(env, "open", TEMP_FAILURE_RETRY(open(path.c_str(), flags, mode))); return fd != -1 ? jniCreateFileDescriptor(env, fd) : NULL; } static jobjectArray Posix_pipe(JNIEnv* env, jobject) { int fds[2]; throwIfMinusOne(env, "pipe", TEMP_FAILURE_RETRY(pipe(&fds[0]))); jobjectArray result = env->NewObjectArray(2, JniConstants::fileDescriptorClass, NULL); if (result == NULL) { return NULL; } for (int i = 0; i < 2; ++i) { ScopedLocalRef<jobject> fd(env, jniCreateFileDescriptor(env, fds[i])); if (fd.get() == NULL) { return NULL; } env->SetObjectArrayElement(result, i, fd.get()); if (env->ExceptionCheck()) { return NULL; } } return result; } static jint Posix_poll(JNIEnv* env, jobject, jobjectArray javaStructs, jint timeoutMs) { static jfieldID fdFid = env->GetFieldID(JniConstants::structPollfdClass, "fd", "Ljava/io/FileDescriptor;"); static jfieldID eventsFid = env->GetFieldID(JniConstants::structPollfdClass, "events", "S"); static jfieldID reventsFid = env->GetFieldID(JniConstants::structPollfdClass, "revents", "S"); // Turn the Java android.system.StructPollfd[] into a C++ struct pollfd[]. size_t arrayLength = env->GetArrayLength(javaStructs); UniquePtr<struct pollfd[]> fds(new struct pollfd[arrayLength]); memset(fds.get(), 0, sizeof(struct pollfd) * arrayLength); size_t count = 0; // Some trailing array elements may be irrelevant. (See below.) for (size_t i = 0; i < arrayLength; ++i) { ScopedLocalRef<jobject> javaStruct(env, env->GetObjectArrayElement(javaStructs, i)); if (javaStruct.get() == NULL) { break; // We allow trailing nulls in the array for caller convenience. } ScopedLocalRef<jobject> javaFd(env, env->GetObjectField(javaStruct.get(), fdFid)); if (javaFd.get() == NULL) { break; // We also allow callers to just clear the fd field (this is what Selector does). } fds[count].fd = jniGetFDFromFileDescriptor(env, javaFd.get()); fds[count].events = env->GetShortField(javaStruct.get(), eventsFid); ++count; } std::vector<AsynchronousCloseMonitor*> monitors; for (size_t i = 0; i < count; ++i) { monitors.push_back(new AsynchronousCloseMonitor(fds[i].fd)); } int rc = poll(fds.get(), count, timeoutMs); for (size_t i = 0; i < monitors.size(); ++i) { delete monitors[i]; } if (rc == -1) { throwErrnoException(env, "poll"); return -1; } // Update the revents fields in the Java android.system.StructPollfd[]. for (size_t i = 0; i < count; ++i) { ScopedLocalRef<jobject> javaStruct(env, env->GetObjectArrayElement(javaStructs, i)); if (javaStruct.get() == NULL) { return -1; } env->SetShortField(javaStruct.get(), reventsFid, fds[i].revents); } return rc; } static void Posix_posix_fallocate(JNIEnv* env, jobject, jobject javaFd __unused, jlong offset __unused, jlong length __unused) { #ifdef __APPLE__ jniThrowException(env, "java/lang/UnsupportedOperationException", "fallocate doesn't exist on a Mac"); #else int fd = jniGetFDFromFileDescriptor(env, javaFd); errno = TEMP_FAILURE_RETRY(posix_fallocate64(fd, offset, length)); if (errno != 0) { throwErrnoException(env, "posix_fallocate"); } #endif } static jint Posix_prctl(JNIEnv* env, jobject, jint option __unused, jlong arg2 __unused, jlong arg3 __unused, jlong arg4 __unused, jlong arg5 __unused) { #ifdef __APPLE__ jniThrowException(env, "java/lang/UnsupportedOperationException", "prctl doesn't exist on a Mac"); return 0; #else int result = prctl(static_cast<int>(option), static_cast<unsigned long>(arg2), static_cast<unsigned long>(arg3), static_cast<unsigned long>(arg4), static_cast<unsigned long>(arg5)); return throwIfMinusOne(env, "prctl", result); #endif } static jint Posix_preadBytes(JNIEnv* env, jobject, jobject javaFd, jobject javaBytes, jint byteOffset, jint byteCount, jlong offset) { ScopedBytesRW bytes(env, javaBytes); if (bytes.get() == NULL) { return -1; } return IO_FAILURE_RETRY(env, ssize_t, pread64, javaFd, bytes.get() + byteOffset, byteCount, offset); } static jint Posix_pwriteBytes(JNIEnv* env, jobject, jobject javaFd, jbyteArray javaBytes, jint byteOffset, jint byteCount, jlong offset) { ScopedBytesRO bytes(env, javaBytes); if (bytes.get() == NULL) { return -1; } return IO_FAILURE_RETRY(env, ssize_t, pwrite64, javaFd, bytes.get() + byteOffset, byteCount, offset); } static jint Posix_readBytes(JNIEnv* env, jobject, jobject javaFd, jobject javaBytes, jint byteOffset, jint byteCount) { ScopedBytesRW bytes(env, javaBytes); if (bytes.get() == NULL) { return -1; } return IO_FAILURE_RETRY(env, ssize_t, read, javaFd, bytes.get() + byteOffset, byteCount); } static jstring Posix_readlink(JNIEnv* env, jobject, jstring javaPath) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return NULL; } std::string result; if (!readlink(path.c_str(), result)) { throwErrnoException(env, "readlink"); return NULL; } return env->NewStringUTF(result.c_str()); } static jint Posix_readv(JNIEnv* env, jobject, jobject javaFd, jobjectArray buffers, jintArray offsets, jintArray byteCounts) { IoVec<ScopedBytesRW> ioVec(env, env->GetArrayLength(buffers)); if (!ioVec.init(buffers, offsets, byteCounts)) { return -1; } return IO_FAILURE_RETRY(env, ssize_t, readv, javaFd, ioVec.get(), ioVec.size()); } static jint Posix_recvfromBytes(JNIEnv* env, jobject, jobject javaFd, jobject javaBytes, jint byteOffset, jint byteCount, jint flags, jobject javaInetSocketAddress) { ScopedBytesRW bytes(env, javaBytes); if (bytes.get() == NULL) { return -1; } sockaddr_storage ss; socklen_t sl = sizeof(ss); memset(&ss, 0, sizeof(ss)); sockaddr* from = (javaInetSocketAddress != NULL) ? reinterpret_cast<sockaddr*>(&ss) : NULL; socklen_t* fromLength = (javaInetSocketAddress != NULL) ? &sl : 0; jint recvCount = NET_FAILURE_RETRY(env, ssize_t, recvfrom, javaFd, bytes.get() + byteOffset, byteCount, flags, from, fromLength); fillInetSocketAddress(env, recvCount, javaInetSocketAddress, ss); return recvCount; } static void Posix_remove(JNIEnv* env, jobject, jstring javaPath) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return; } throwIfMinusOne(env, "remove", TEMP_FAILURE_RETRY(remove(path.c_str()))); } static void Posix_rename(JNIEnv* env, jobject, jstring javaOldPath, jstring javaNewPath) { ScopedUtfChars oldPath(env, javaOldPath); if (oldPath.c_str() == NULL) { return; } ScopedUtfChars newPath(env, javaNewPath); if (newPath.c_str() == NULL) { return; } throwIfMinusOne(env, "rename", TEMP_FAILURE_RETRY(rename(oldPath.c_str(), newPath.c_str()))); } static jlong Posix_sendfile(JNIEnv* env, jobject, jobject javaOutFd, jobject javaInFd, jobject javaOffset, jlong byteCount) { int outFd = jniGetFDFromFileDescriptor(env, javaOutFd); int inFd = jniGetFDFromFileDescriptor(env, javaInFd); static jfieldID valueFid = env->GetFieldID(JniConstants::mutableLongClass, "value", "J"); off_t offset = 0; off_t* offsetPtr = NULL; if (javaOffset != NULL) { // TODO: fix bionic so we can have a 64-bit off_t! offset = env->GetLongField(javaOffset, valueFid); offsetPtr = &offset; } jlong result = throwIfMinusOne(env, "sendfile", TEMP_FAILURE_RETRY(sendfile(outFd, inFd, offsetPtr, byteCount))); if (javaOffset != NULL) { env->SetLongField(javaOffset, valueFid, offset); } return result; } static jint Posix_sendtoBytes(JNIEnv* env, jobject, jobject javaFd, jobject javaBytes, jint byteOffset, jint byteCount, jint flags, jobject javaInetAddress, jint port) { ScopedBytesRO bytes(env, javaBytes); if (bytes.get() == NULL) { return -1; } sockaddr_storage ss; socklen_t sa_len = 0; if (javaInetAddress != NULL && !inetAddressToSockaddr(env, javaInetAddress, port, ss, sa_len)) { return -1; } const sockaddr* to = (javaInetAddress != NULL) ? reinterpret_cast<const sockaddr*>(&ss) : NULL; return NET_FAILURE_RETRY(env, ssize_t, sendto, javaFd, bytes.get() + byteOffset, byteCount, flags, to, sa_len); } static void Posix_setegid(JNIEnv* env, jobject, jint egid) { throwIfMinusOne(env, "setegid", TEMP_FAILURE_RETRY(setegid(egid))); } static void Posix_setenv(JNIEnv* env, jobject, jstring javaName, jstring javaValue, jboolean overwrite) { ScopedUtfChars name(env, javaName); if (name.c_str() == NULL) { return; } ScopedUtfChars value(env, javaValue); if (value.c_str() == NULL) { return; } throwIfMinusOne(env, "setenv", setenv(name.c_str(), value.c_str(), overwrite)); } static void Posix_seteuid(JNIEnv* env, jobject, jint euid) { throwIfMinusOne(env, "seteuid", TEMP_FAILURE_RETRY(seteuid(euid))); } static void Posix_setgid(JNIEnv* env, jobject, jint gid) { throwIfMinusOne(env, "setgid", TEMP_FAILURE_RETRY(setgid(gid))); } static jint Posix_setsid(JNIEnv* env, jobject) { return throwIfMinusOne(env, "setsid", TEMP_FAILURE_RETRY(setsid())); } static void Posix_setsockoptByte(JNIEnv* env, jobject, jobject javaFd, jint level, jint option, jint value) { int fd = jniGetFDFromFileDescriptor(env, javaFd); u_char byte = value; throwIfMinusOne(env, "setsockopt", TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &byte, sizeof(byte)))); } static void Posix_setsockoptIfreq(JNIEnv* env, jobject, jobject javaFd, jint level, jint option, jstring javaInterfaceName) { struct ifreq req; if (!fillIfreq(env, javaInterfaceName, req)) { return; } int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "setsockopt", TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &req, sizeof(req)))); } static void Posix_setsockoptInt(JNIEnv* env, jobject, jobject javaFd, jint level, jint option, jint value) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "setsockopt", TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &value, sizeof(value)))); } #if defined(__APPLE__) && MAC_OS_X_VERSION_MAX_ALLOWED < 1070 // Mac OS didn't support modern multicast APIs until 10.7. static void Posix_setsockoptIpMreqn(JNIEnv*, jobject, jobject, jint, jint, jint) { abort(); } static void Posix_setsockoptGroupReq(JNIEnv*, jobject, jobject, jint, jint, jobject) { abort(); } static void Posix_setsockoptGroupSourceReq(JNIEnv*, jobject, jobject, jint, jint, jobject) { abort(); } #else static void Posix_setsockoptIpMreqn(JNIEnv* env, jobject, jobject javaFd, jint level, jint option, jint value) { ip_mreqn req; memset(&req, 0, sizeof(req)); req.imr_ifindex = value; int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "setsockopt", TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &req, sizeof(req)))); } static void Posix_setsockoptGroupReq(JNIEnv* env, jobject, jobject javaFd, jint level, jint option, jobject javaGroupReq) { struct group_req req; memset(&req, 0, sizeof(req)); static jfieldID grInterfaceFid = env->GetFieldID(JniConstants::structGroupReqClass, "gr_interface", "I"); req.gr_interface = env->GetIntField(javaGroupReq, grInterfaceFid); // Get the IPv4 or IPv6 multicast address to join or leave. static jfieldID grGroupFid = env->GetFieldID(JniConstants::structGroupReqClass, "gr_group", "Ljava/net/InetAddress;"); ScopedLocalRef<jobject> javaGroup(env, env->GetObjectField(javaGroupReq, grGroupFid)); socklen_t sa_len; if (!inetAddressToSockaddrVerbatim(env, javaGroup.get(), 0, req.gr_group, sa_len)) { return; } int fd = jniGetFDFromFileDescriptor(env, javaFd); int rc = TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &req, sizeof(req))); if (rc == -1 && errno == EINVAL) { // Maybe we're a 32-bit binary talking to a 64-bit kernel? // glibc doesn't automatically handle this. // http://sourceware.org/bugzilla/show_bug.cgi?id=12080 struct group_req64 { uint32_t gr_interface; uint32_t my_padding; sockaddr_storage gr_group; }; group_req64 req64; req64.gr_interface = req.gr_interface; memcpy(&req64.gr_group, &req.gr_group, sizeof(req.gr_group)); rc = TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &req64, sizeof(req64))); } throwIfMinusOne(env, "setsockopt", rc); } static void Posix_setsockoptGroupSourceReq(JNIEnv* env, jobject, jobject javaFd, jint level, jint option, jobject javaGroupSourceReq) { socklen_t sa_len; struct group_source_req req; memset(&req, 0, sizeof(req)); static jfieldID gsrInterfaceFid = env->GetFieldID(JniConstants::structGroupSourceReqClass, "gsr_interface", "I"); req.gsr_interface = env->GetIntField(javaGroupSourceReq, gsrInterfaceFid); // Get the IPv4 or IPv6 multicast address to join or leave. static jfieldID gsrGroupFid = env->GetFieldID(JniConstants::structGroupSourceReqClass, "gsr_group", "Ljava/net/InetAddress;"); ScopedLocalRef<jobject> javaGroup(env, env->GetObjectField(javaGroupSourceReq, gsrGroupFid)); if (!inetAddressToSockaddrVerbatim(env, javaGroup.get(), 0, req.gsr_group, sa_len)) { return; } // Get the IPv4 or IPv6 multicast address to add to the filter. static jfieldID gsrSourceFid = env->GetFieldID(JniConstants::structGroupSourceReqClass, "gsr_source", "Ljava/net/InetAddress;"); ScopedLocalRef<jobject> javaSource(env, env->GetObjectField(javaGroupSourceReq, gsrSourceFid)); if (!inetAddressToSockaddrVerbatim(env, javaSource.get(), 0, req.gsr_source, sa_len)) { return; } int fd = jniGetFDFromFileDescriptor(env, javaFd); int rc = TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &req, sizeof(req))); if (rc == -1 && errno == EINVAL) { // Maybe we're a 32-bit binary talking to a 64-bit kernel? // glibc doesn't automatically handle this. // http://sourceware.org/bugzilla/show_bug.cgi?id=12080 struct group_source_req64 { uint32_t gsr_interface; uint32_t my_padding; sockaddr_storage gsr_group; sockaddr_storage gsr_source; }; group_source_req64 req64; req64.gsr_interface = req.gsr_interface; memcpy(&req64.gsr_group, &req.gsr_group, sizeof(req.gsr_group)); memcpy(&req64.gsr_source, &req.gsr_source, sizeof(req.gsr_source)); rc = TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &req64, sizeof(req64))); } throwIfMinusOne(env, "setsockopt", rc); } #endif static void Posix_setsockoptLinger(JNIEnv* env, jobject, jobject javaFd, jint level, jint option, jobject javaLinger) { static jfieldID lOnoffFid = env->GetFieldID(JniConstants::structLingerClass, "l_onoff", "I"); static jfieldID lLingerFid = env->GetFieldID(JniConstants::structLingerClass, "l_linger", "I"); int fd = jniGetFDFromFileDescriptor(env, javaFd); struct linger value; value.l_onoff = env->GetIntField(javaLinger, lOnoffFid); value.l_linger = env->GetIntField(javaLinger, lLingerFid); throwIfMinusOne(env, "setsockopt", TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &value, sizeof(value)))); } static void Posix_setsockoptTimeval(JNIEnv* env, jobject, jobject javaFd, jint level, jint option, jobject javaTimeval) { static jfieldID tvSecFid = env->GetFieldID(JniConstants::structTimevalClass, "tv_sec", "J"); static jfieldID tvUsecFid = env->GetFieldID(JniConstants::structTimevalClass, "tv_usec", "J"); int fd = jniGetFDFromFileDescriptor(env, javaFd); struct timeval value; value.tv_sec = env->GetLongField(javaTimeval, tvSecFid); value.tv_usec = env->GetLongField(javaTimeval, tvUsecFid); throwIfMinusOne(env, "setsockopt", TEMP_FAILURE_RETRY(setsockopt(fd, level, option, &value, sizeof(value)))); } static void Posix_setuid(JNIEnv* env, jobject, jint uid) { throwIfMinusOne(env, "setuid", TEMP_FAILURE_RETRY(setuid(uid))); } static void Posix_shutdown(JNIEnv* env, jobject, jobject javaFd, jint how) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "shutdown", TEMP_FAILURE_RETRY(shutdown(fd, how))); } static jobject Posix_socket(JNIEnv* env, jobject, jint domain, jint type, jint protocol) { int fd = throwIfMinusOne(env, "socket", TEMP_FAILURE_RETRY(socket(domain, type, protocol))); return fd != -1 ? jniCreateFileDescriptor(env, fd) : NULL; } static void Posix_socketpair(JNIEnv* env, jobject, jint domain, jint type, jint protocol, jobject javaFd1, jobject javaFd2) { int fds[2]; int rc = throwIfMinusOne(env, "socketpair", TEMP_FAILURE_RETRY(socketpair(domain, type, protocol, fds))); if (rc != -1) { jniSetFileDescriptorOfFD(env, javaFd1, fds[0]); jniSetFileDescriptorOfFD(env, javaFd2, fds[1]); } } static jobject Posix_stat(JNIEnv* env, jobject, jstring javaPath) { return doStat(env, javaPath, false); } static jobject Posix_statvfs(JNIEnv* env, jobject, jstring javaPath) { ScopedUtfChars path(env, javaPath); if (path.c_str() == NULL) { return NULL; } struct statvfs sb; int rc = TEMP_FAILURE_RETRY(statvfs(path.c_str(), &sb)); if (rc == -1) { throwErrnoException(env, "statvfs"); return NULL; } return makeStructStatVfs(env, sb); } static jstring Posix_strerror(JNIEnv* env, jobject, jint errnum) { char buffer[BUFSIZ]; const char* message = jniStrError(errnum, buffer, sizeof(buffer)); return env->NewStringUTF(message); } static jstring Posix_strsignal(JNIEnv* env, jobject, jint signal) { return env->NewStringUTF(strsignal(signal)); } static void Posix_symlink(JNIEnv* env, jobject, jstring javaOldPath, jstring javaNewPath) { ScopedUtfChars oldPath(env, javaOldPath); if (oldPath.c_str() == NULL) { return; } ScopedUtfChars newPath(env, javaNewPath); if (newPath.c_str() == NULL) { return; } throwIfMinusOne(env, "symlink", TEMP_FAILURE_RETRY(symlink(oldPath.c_str(), newPath.c_str()))); } static jlong Posix_sysconf(JNIEnv* env, jobject, jint name) { // Since -1 is a valid result from sysconf(3), detecting failure is a little more awkward. errno = 0; long result = sysconf(name); if (result == -1L && errno == EINVAL) { throwErrnoException(env, "sysconf"); } return result; } static void Posix_tcdrain(JNIEnv* env, jobject, jobject javaFd) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "tcdrain", TEMP_FAILURE_RETRY(tcdrain(fd))); } static void Posix_tcsendbreak(JNIEnv* env, jobject, jobject javaFd, jint duration) { int fd = jniGetFDFromFileDescriptor(env, javaFd); throwIfMinusOne(env, "tcsendbreak", TEMP_FAILURE_RETRY(tcsendbreak(fd, duration))); } static jint Posix_umaskImpl(JNIEnv*, jobject, jint mask) { return umask(mask); } static jobject Posix_uname(JNIEnv* env, jobject) { struct utsname buf; if (TEMP_FAILURE_RETRY(uname(&buf)) == -1) { return NULL; // Can't happen. } return makeStructUtsname(env, buf); } static void Posix_unsetenv(JNIEnv* env, jobject, jstring javaName) { ScopedUtfChars name(env, javaName); if (name.c_str() == NULL) { return; } throwIfMinusOne(env, "unsetenv", unsetenv(name.c_str())); } static jint Posix_waitpid(JNIEnv* env, jobject, jint pid, jobject javaStatus, jint options) { int status; int rc = throwIfMinusOne(env, "waitpid", TEMP_FAILURE_RETRY(waitpid(pid, &status, options))); if (rc != -1) { static jfieldID valueFid = env->GetFieldID(JniConstants::mutableIntClass, "value", "I"); env->SetIntField(javaStatus, valueFid, status); } return rc; } static jint Posix_writeBytes(JNIEnv* env, jobject, jobject javaFd, jbyteArray javaBytes, jint byteOffset, jint byteCount) { ScopedBytesRO bytes(env, javaBytes); if (bytes.get() == NULL) { return -1; } return IO_FAILURE_RETRY(env, ssize_t, write, javaFd, bytes.get() + byteOffset, byteCount); } static jint Posix_writev(JNIEnv* env, jobject, jobject javaFd, jobjectArray buffers, jintArray offsets, jintArray byteCounts) { IoVec<ScopedBytesRO> ioVec(env, env->GetArrayLength(buffers)); if (!ioVec.init(buffers, offsets, byteCounts)) { return -1; } return IO_FAILURE_RETRY(env, ssize_t, writev, javaFd, ioVec.get(), ioVec.size()); } static JNINativeMethod gMethods[] = { NATIVE_METHOD(Posix, accept, "(Ljava/io/FileDescriptor;Ljava/net/InetSocketAddress;)Ljava/io/FileDescriptor;"), NATIVE_METHOD(Posix, access, "(Ljava/lang/String;I)Z"), NATIVE_METHOD(Posix, android_getaddrinfo, "(Ljava/lang/String;Landroid/system/StructAddrinfo;I)[Ljava/net/InetAddress;"), NATIVE_METHOD(Posix, bind, "(Ljava/io/FileDescriptor;Ljava/net/InetAddress;I)V"), NATIVE_METHOD(Posix, chmod, "(Ljava/lang/String;I)V"), NATIVE_METHOD(Posix, chown, "(Ljava/lang/String;II)V"), NATIVE_METHOD(Posix, close, "(Ljava/io/FileDescriptor;)V"), NATIVE_METHOD(Posix, connect, "(Ljava/io/FileDescriptor;Ljava/net/InetAddress;I)V"), NATIVE_METHOD(Posix, dup, "(Ljava/io/FileDescriptor;)Ljava/io/FileDescriptor;"), NATIVE_METHOD(Posix, dup2, "(Ljava/io/FileDescriptor;I)Ljava/io/FileDescriptor;"), NATIVE_METHOD(Posix, environ, "()[Ljava/lang/String;"), NATIVE_METHOD(Posix, execv, "(Ljava/lang/String;[Ljava/lang/String;)V"), NATIVE_METHOD(Posix, execve, "(Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;)V"), NATIVE_METHOD(Posix, fchmod, "(Ljava/io/FileDescriptor;I)V"), NATIVE_METHOD(Posix, fchown, "(Ljava/io/FileDescriptor;II)V"), NATIVE_METHOD(Posix, fcntlVoid, "(Ljava/io/FileDescriptor;I)I"), NATIVE_METHOD(Posix, fcntlLong, "(Ljava/io/FileDescriptor;IJ)I"), NATIVE_METHOD(Posix, fcntlFlock, "(Ljava/io/FileDescriptor;ILandroid/system/StructFlock;)I"), NATIVE_METHOD(Posix, fdatasync, "(Ljava/io/FileDescriptor;)V"), NATIVE_METHOD(Posix, fstat, "(Ljava/io/FileDescriptor;)Landroid/system/StructStat;"), NATIVE_METHOD(Posix, fstatvfs, "(Ljava/io/FileDescriptor;)Landroid/system/StructStatVfs;"), NATIVE_METHOD(Posix, fsync, "(Ljava/io/FileDescriptor;)V"), NATIVE_METHOD(Posix, ftruncate, "(Ljava/io/FileDescriptor;J)V"), NATIVE_METHOD(Posix, gai_strerror, "(I)Ljava/lang/String;"), NATIVE_METHOD(Posix, getegid, "()I"), NATIVE_METHOD(Posix, geteuid, "()I"), NATIVE_METHOD(Posix, getgid, "()I"), NATIVE_METHOD(Posix, getenv, "(Ljava/lang/String;)Ljava/lang/String;"), NATIVE_METHOD(Posix, getnameinfo, "(Ljava/net/InetAddress;I)Ljava/lang/String;"), NATIVE_METHOD(Posix, getpeername, "(Ljava/io/FileDescriptor;)Ljava/net/SocketAddress;"), NATIVE_METHOD(Posix, getpid, "()I"), NATIVE_METHOD(Posix, getppid, "()I"), NATIVE_METHOD(Posix, getpwnam, "(Ljava/lang/String;)Landroid/system/StructPasswd;"), NATIVE_METHOD(Posix, getpwuid, "(I)Landroid/system/StructPasswd;"), NATIVE_METHOD(Posix, getsockname, "(Ljava/io/FileDescriptor;)Ljava/net/SocketAddress;"), NATIVE_METHOD(Posix, getsockoptByte, "(Ljava/io/FileDescriptor;II)I"), NATIVE_METHOD(Posix, getsockoptInAddr, "(Ljava/io/FileDescriptor;II)Ljava/net/InetAddress;"), NATIVE_METHOD(Posix, getsockoptInt, "(Ljava/io/FileDescriptor;II)I"), NATIVE_METHOD(Posix, getsockoptLinger, "(Ljava/io/FileDescriptor;II)Landroid/system/StructLinger;"), NATIVE_METHOD(Posix, getsockoptTimeval, "(Ljava/io/FileDescriptor;II)Landroid/system/StructTimeval;"), NATIVE_METHOD(Posix, getsockoptUcred, "(Ljava/io/FileDescriptor;II)Landroid/system/StructUcred;"), NATIVE_METHOD(Posix, gettid, "()I"), NATIVE_METHOD(Posix, getuid, "()I"), NATIVE_METHOD(Posix, if_indextoname, "(I)Ljava/lang/String;"), NATIVE_METHOD(Posix, inet_pton, "(ILjava/lang/String;)Ljava/net/InetAddress;"), NATIVE_METHOD(Posix, ioctlInetAddress, "(Ljava/io/FileDescriptor;ILjava/lang/String;)Ljava/net/InetAddress;"), NATIVE_METHOD(Posix, ioctlInt, "(Ljava/io/FileDescriptor;ILandroid/util/MutableInt;)I"), NATIVE_METHOD(Posix, isatty, "(Ljava/io/FileDescriptor;)Z"), NATIVE_METHOD(Posix, kill, "(II)V"), NATIVE_METHOD(Posix, lchown, "(Ljava/lang/String;II)V"), NATIVE_METHOD(Posix, link, "(Ljava/lang/String;Ljava/lang/String;)V"), NATIVE_METHOD(Posix, listen, "(Ljava/io/FileDescriptor;I)V"), NATIVE_METHOD(Posix, lseek, "(Ljava/io/FileDescriptor;JI)J"), NATIVE_METHOD(Posix, lstat, "(Ljava/lang/String;)Landroid/system/StructStat;"), NATIVE_METHOD(Posix, mincore, "(JJ[B)V"), NATIVE_METHOD(Posix, mkdir, "(Ljava/lang/String;I)V"), NATIVE_METHOD(Posix, mkfifo, "(Ljava/lang/String;I)V"), NATIVE_METHOD(Posix, mlock, "(JJ)V"), NATIVE_METHOD(Posix, mmap, "(JJIILjava/io/FileDescriptor;J)J"), NATIVE_METHOD(Posix, msync, "(JJI)V"), NATIVE_METHOD(Posix, munlock, "(JJ)V"), NATIVE_METHOD(Posix, munmap, "(JJ)V"), NATIVE_METHOD(Posix, open, "(Ljava/lang/String;II)Ljava/io/FileDescriptor;"), NATIVE_METHOD(Posix, pipe, "()[Ljava/io/FileDescriptor;"), NATIVE_METHOD(Posix, poll, "([Landroid/system/StructPollfd;I)I"), NATIVE_METHOD(Posix, posix_fallocate, "(Ljava/io/FileDescriptor;JJ)V"), NATIVE_METHOD(Posix, prctl, "(IJJJJ)I"), NATIVE_METHOD(Posix, preadBytes, "(Ljava/io/FileDescriptor;Ljava/lang/Object;IIJ)I"), NATIVE_METHOD(Posix, pwriteBytes, "(Ljava/io/FileDescriptor;Ljava/lang/Object;IIJ)I"), NATIVE_METHOD(Posix, readBytes, "(Ljava/io/FileDescriptor;Ljava/lang/Object;II)I"), NATIVE_METHOD(Posix, readlink, "(Ljava/lang/String;)Ljava/lang/String;"), NATIVE_METHOD(Posix, readv, "(Ljava/io/FileDescriptor;[Ljava/lang/Object;[I[I)I"), NATIVE_METHOD(Posix, recvfromBytes, "(Ljava/io/FileDescriptor;Ljava/lang/Object;IIILjava/net/InetSocketAddress;)I"), NATIVE_METHOD(Posix, remove, "(Ljava/lang/String;)V"), NATIVE_METHOD(Posix, rename, "(Ljava/lang/String;Ljava/lang/String;)V"), NATIVE_METHOD(Posix, sendfile, "(Ljava/io/FileDescriptor;Ljava/io/FileDescriptor;Landroid/util/MutableLong;J)J"), NATIVE_METHOD(Posix, sendtoBytes, "(Ljava/io/FileDescriptor;Ljava/lang/Object;IIILjava/net/InetAddress;I)I"), NATIVE_METHOD(Posix, setegid, "(I)V"), NATIVE_METHOD(Posix, setenv, "(Ljava/lang/String;Ljava/lang/String;Z)V"), NATIVE_METHOD(Posix, seteuid, "(I)V"), NATIVE_METHOD(Posix, setgid, "(I)V"), NATIVE_METHOD(Posix, setsid, "()I"), NATIVE_METHOD(Posix, setsockoptByte, "(Ljava/io/FileDescriptor;III)V"), NATIVE_METHOD(Posix, setsockoptIfreq, "(Ljava/io/FileDescriptor;IILjava/lang/String;)V"), NATIVE_METHOD(Posix, setsockoptInt, "(Ljava/io/FileDescriptor;III)V"), NATIVE_METHOD(Posix, setsockoptIpMreqn, "(Ljava/io/FileDescriptor;III)V"), NATIVE_METHOD(Posix, setsockoptGroupReq, "(Ljava/io/FileDescriptor;IILandroid/system/StructGroupReq;)V"), NATIVE_METHOD(Posix, setsockoptGroupSourceReq, "(Ljava/io/FileDescriptor;IILandroid/system/StructGroupSourceReq;)V"), NATIVE_METHOD(Posix, setsockoptLinger, "(Ljava/io/FileDescriptor;IILandroid/system/StructLinger;)V"), NATIVE_METHOD(Posix, setsockoptTimeval, "(Ljava/io/FileDescriptor;IILandroid/system/StructTimeval;)V"), NATIVE_METHOD(Posix, setuid, "(I)V"), NATIVE_METHOD(Posix, shutdown, "(Ljava/io/FileDescriptor;I)V"), NATIVE_METHOD(Posix, socket, "(III)Ljava/io/FileDescriptor;"), NATIVE_METHOD(Posix, socketpair, "(IIILjava/io/FileDescriptor;Ljava/io/FileDescriptor;)V"), NATIVE_METHOD(Posix, stat, "(Ljava/lang/String;)Landroid/system/StructStat;"), NATIVE_METHOD(Posix, statvfs, "(Ljava/lang/String;)Landroid/system/StructStatVfs;"), NATIVE_METHOD(Posix, strerror, "(I)Ljava/lang/String;"), NATIVE_METHOD(Posix, strsignal, "(I)Ljava/lang/String;"), NATIVE_METHOD(Posix, symlink, "(Ljava/lang/String;Ljava/lang/String;)V"), NATIVE_METHOD(Posix, sysconf, "(I)J"), NATIVE_METHOD(Posix, tcdrain, "(Ljava/io/FileDescriptor;)V"), NATIVE_METHOD(Posix, tcsendbreak, "(Ljava/io/FileDescriptor;I)V"), NATIVE_METHOD(Posix, umaskImpl, "(I)I"), NATIVE_METHOD(Posix, uname, "()Landroid/system/StructUtsname;"), NATIVE_METHOD(Posix, unsetenv, "(Ljava/lang/String;)V"), NATIVE_METHOD(Posix, waitpid, "(ILandroid/util/MutableInt;I)I"), NATIVE_METHOD(Posix, writeBytes, "(Ljava/io/FileDescriptor;Ljava/lang/Object;II)I"), NATIVE_METHOD(Posix, writev, "(Ljava/io/FileDescriptor;[Ljava/lang/Object;[I[I)I"), }; void register_libcore_io_Posix(JNIEnv* env) { jniRegisterNativeMethods(env, "libcore/io/Posix", gMethods, NELEM(gMethods)); }<|fim▁end|>
} static jfieldID addressFid = env->GetFieldID(JniConstants::inetSocketAddressClass, "addr", "Ljava/net/InetAddress;");
<|file_name|>ServiceContentTags.java<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2019 Contentful GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.contentful.java.cma; import com.contentful.java.cma.model.CMAArray; import com.contentful.java.cma.model.CMATag; import io.reactivex.Flowable; import retrofit2.Response; import retrofit2.http.GET; import retrofit2.http.PUT; import retrofit2.http.Path; import retrofit2.http.QueryMap; import retrofit2.http.Body; import retrofit2.http.DELETE; import java.util.Map; /** * Spaces Service.<|fim▁hole|> Flowable<CMAArray<CMATag>> fetchAll( @Path("space_id") String spaceId, @Path("environment_id") String environmentID, @QueryMap Map<String, String> query ); @PUT("/spaces/{space_id}/environments/{environment_id}/tags/{tag_id}") Flowable<CMATag> create( @Path("space_id") String spaceId, @Path("environment_id") String environmentID, @Path("tag_id") String tagId, @Body CMATag tag); @GET("/spaces/{space_id}/environments/{environment_id}/tags/{tag_id}") Flowable<CMATag> fetchOne( @Path("space_id") String spaceId, @Path("environment_id") String environmentID, @Path("tag_id") String tagId ); @PUT("/spaces/{space_id}/environments/{environment_id}/tags/{tag_id}") Flowable<CMATag> update( @Path("space_id") String spaceId, @Path("environment_id") String environmentID, @Path("tag_id") String tagId, @Body CMATag tag); @DELETE("/spaces/{space_id}/environments/{environment_id}/tags/{tag_id}") Flowable<Response<Void>> delete( @Path("space_id") String spaceId, @Path("environment_id") String environmentID, @Path("tag_id") String tagId); }<|fim▁end|>
*/ interface ServiceContentTags { @GET("/spaces/{space_id}/environments/{environment_id}/tags")
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Vanadium Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // // Package featuretests implements black-box tests for various Syncbase<|fim▁hole|><|fim▁end|>
// features. package featuretests
<|file_name|>chief_adapter.rs<|end_file_name|><|fim▁begin|>use Chief; use confectioner::Confectioner; pub struct ChiefAdapter { confectioner: Confectioner } impl ChiefAdapter { pub fn new() -> ChiefAdapter { ChiefAdapter{ confectioner: Confectioner::new() } } } impl Chief for ChiefAdapter { fn get_cost(&self) -> u32 { self.confectioner.get_cost_for_dinner() } fn make_dinner(&self) { self.confectioner.make_a_dinner(); } fn take_money(&self, money: u32) { self.confectioner.take_money_for_dinner(money); }<|fim▁hole|><|fim▁end|>
}
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2019 Philipp Weber // Use of this source code is governed by the MIT license<|fim▁hole|>It supports all the RESTful service endpoints and parameters of API version 2. */ package hibpgo<|fim▁end|>
// which can be found in the repositorys LICENSE file. /* Package hibpgo provides access to the "Have I been Pwned?" API from Troy Hunt (https://haveibeenpwned.com).
<|file_name|>CollectionReceiver.java<|end_file_name|><|fim▁begin|>/* * Copyright 2016 TomeOkin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License.<|fim▁hole|> * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package app.receiver; import app.data.model.Collection; import org.slf4j.Logger; import org.slf4j.LoggerFactory; //@Component public class CollectionReceiver { private static final Logger logger = LoggerFactory.getLogger(CollectionReceiver.class); public void receiveMessage(Object message) { Collection collection = (Collection) message; logger.info("receive message: {}", collection.toString()); } }<|fim▁end|>
* You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0
<|file_name|>asn1_print.cpp<|end_file_name|><|fim▁begin|>/* * (C) 2014,2015,2017 Jack Lloyd * * Botan is released under the Simplified BSD License (see license.txt) */ #include <botan/asn1_print.h> #include <botan/bigint.h> #include <botan/hex.h> #include <botan/der_enc.h> #include <botan/ber_dec.h> #include <botan/asn1_time.h> #include <botan/asn1_str.h> #include <botan/oids.h> #include <iomanip> #include <sstream> #include <cctype> namespace Botan { namespace { bool all_printable_chars(const uint8_t bits[], size_t bits_len) { for(size_t i = 0; i != bits_len; ++i) { int c = bits[i]; if(c > 127) return false; if((std::isalnum(c) || c == '.' || c == ':' || c == '/' || c == '-') == false) return false; } return true; } /* * Special hack to handle GeneralName [2] and [6] (DNS name and URI) */ bool possibly_a_general_name(const uint8_t bits[], size_t bits_len) { if(bits_len <= 2) return false; if(bits[0] != 0x82 && bits[0] != 0x86) return false; if(bits[1] != bits_len - 2) return false; if(all_printable_chars(bits + 2, bits_len - 2) == false) return false; return true; } } std::string ASN1_Formatter::print(const uint8_t in[], size_t len) const { std::ostringstream output; print_to_stream(output, in, len); return output.str(); } void ASN1_Formatter::print_to_stream(std::ostream& output, const uint8_t in[], size_t len) const { BER_Decoder dec(in, len); decode(output, dec, 0); } void ASN1_Formatter::decode(std::ostream& output, BER_Decoder& decoder, size_t level) const { BER_Object obj = decoder.get_next_object(); while(obj.type_tag != NO_OBJECT) { const ASN1_Tag type_tag = obj.type_tag; const ASN1_Tag class_tag = obj.class_tag; const size_t length = obj.value.size(); /* hack to insert the tag+length back in front of the stuff now that we've gotten the type info */ DER_Encoder encoder; encoder.add_object(type_tag, class_tag, obj.value); const std::vector<uint8_t> bits = encoder.get_contents_unlocked(); BER_Decoder data(bits); if(class_tag & CONSTRUCTED) { BER_Decoder cons_info(obj.value); output << format(type_tag, class_tag, level, length, ""); decode(output, cons_info, level + 1); // recurse } else if((class_tag & APPLICATION) || (class_tag & CONTEXT_SPECIFIC)) { bool success_parsing_cs = false; if(m_print_context_specific) { try { if(possibly_a_general_name(bits.data(), bits.size())) { output << format(type_tag, class_tag, level, level, std::string(cast_uint8_ptr_to_char(&bits[2]), bits.size() - 2)); success_parsing_cs = true; } else { std::vector<uint8_t> inner_bits; data.decode(inner_bits, type_tag); BER_Decoder inner(inner_bits); std::ostringstream inner_data; decode(inner_data, inner, level + 1); // recurse output << inner_data.str(); success_parsing_cs = true; } } catch(...) { } } if(success_parsing_cs == false) { output << format(type_tag, class_tag, level, length, format_bin(type_tag, class_tag, bits)); } } else if(type_tag == OBJECT_ID) { OID oid; data.decode(oid); std::string out = OIDS::lookup(oid); if(out.empty()) { out = oid.as_string(); } else { out += " [" + oid.as_string() + "]"; } output << format(type_tag, class_tag, level, length, out); } else if(type_tag == INTEGER || type_tag == ENUMERATED) { BigInt number; if(type_tag == INTEGER) { data.decode(number); } else if(type_tag == ENUMERATED) { data.decode(number, ENUMERATED, class_tag); } const std::vector<uint8_t> rep = BigInt::encode(number, BigInt::Hexadecimal); std::string str; for(size_t i = 0; i != rep.size(); ++i) { str += static_cast<char>(rep[i]); } output << format(type_tag, class_tag, level, length, str); } else if(type_tag == BOOLEAN) {<|fim▁hole|> data.decode(boolean); output << format(type_tag, class_tag, level, length, (boolean ? "true" : "false")); } else if(type_tag == NULL_TAG) { output << format(type_tag, class_tag, level, length, ""); } else if(type_tag == OCTET_STRING || type_tag == BIT_STRING) { std::vector<uint8_t> decoded_bits; data.decode(decoded_bits, type_tag); try { BER_Decoder inner(decoded_bits); std::ostringstream inner_data; decode(inner_data, inner, level + 1); // recurse output << format(type_tag, class_tag, level, length, ""); output << inner_data.str(); } catch(...) { output << format(type_tag, class_tag, level, length, format_bin(type_tag, class_tag, decoded_bits)); } } else if(ASN1_String::is_string_type(type_tag)) { ASN1_String str; data.decode(str); output << format(type_tag, class_tag, level, length, str.value()); } else if(type_tag == UTC_TIME || type_tag == GENERALIZED_TIME) { X509_Time time; data.decode(time); output << format(type_tag, class_tag, level, length, time.readable_string()); } else { output << "Unknown ASN.1 tag class=" << static_cast<int>(class_tag) << " type=" << static_cast<int>(type_tag) << "\n";; } obj = decoder.get_next_object(); } } namespace { std::string format_type(ASN1_Tag type_tag, ASN1_Tag class_tag) { if(class_tag == UNIVERSAL) return asn1_tag_to_string(type_tag); if(class_tag == CONSTRUCTED && (type_tag == SEQUENCE || type_tag == SET)) return asn1_tag_to_string(type_tag); std::string name; if(class_tag & CONSTRUCTED) name += "cons "; name += "[" + std::to_string(type_tag) + "]"; if(class_tag & APPLICATION) { name += " appl"; } if(class_tag & CONTEXT_SPECIFIC) { name += " context"; } return name; } } std::string ASN1_Pretty_Printer::format(ASN1_Tag type_tag, ASN1_Tag class_tag, size_t level, size_t length, const std::string& value) const { bool should_skip = false; if(value.length() > m_print_limit) { should_skip = true; } if((type_tag == OCTET_STRING || type_tag == BIT_STRING) && value.length() > m_print_binary_limit) { should_skip = true; } level += m_initial_level; std::ostringstream oss; oss << " d=" << std::setw(2) << level << ", l=" << std::setw(4) << length << ":" << std::string(level + 1, ' ') << format_type(type_tag, class_tag); if(value != "" && !should_skip) { const size_t current_pos = static_cast<size_t>(oss.tellp()); const size_t spaces_to_align = (current_pos >= m_value_column) ? 1 : (m_value_column - current_pos); oss << std::string(spaces_to_align, ' ') << value; } oss << "\n"; return oss.str(); } std::string ASN1_Pretty_Printer::format_bin(ASN1_Tag /*type_tag*/, ASN1_Tag /*class_tag*/, const std::vector<uint8_t>& vec) const { if(all_printable_chars(vec.data(), vec.size())) { return std::string(cast_uint8_ptr_to_char(vec.data()), vec.size()); } else return hex_encode(vec); } }<|fim▁end|>
bool boolean;
<|file_name|>util.py<|end_file_name|><|fim▁begin|># urllib3/util.py # Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from base64 import b64encode from collections import namedtuple from socket import error as SocketError from hashlib import md5, sha1 from binascii import hexlify, unhexlify try: from select import poll, POLLIN except ImportError: # `poll` doesn't exist on OSX and other platforms poll = False try: from select import select except ImportError: # `select` doesn't exist on AppEngine. select = False try: # Test for SSL features SSLContext = None HAS_SNI = False import ssl from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 from ssl import SSLContext # Modern SSL? from ssl import HAS_SNI # Has SNI? except ImportError: pass from .packages import six from .exceptions import LocationParseError, SSLError class Url(namedtuple('Url', ['scheme', 'auth', 'host', 'port', 'path', 'query', 'fragment'])): """ Datastructure for representing an HTTP URL. Used as a return value for :func:`parse_url`. """ slots = () def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, query=None, fragment=None): return super(Url, cls).__new__(cls, scheme, auth, host, port, path, query, fragment) @property def hostname(self): """For backwards-compatibility with urlparse. We're nice like that.""" return self.host @property def request_uri(self): """Absolute path including the query string.""" uri = self.path or '/' if self.query is not None: uri += '?' + self.query return uri def split_first(s, delims): """ Given a string and an iterable of delimiters, split on the first found delimiter. Return two split parts and the matched delimiter. If not found, then the first part is the full input string. Example: :: >>> split_first('foo/bar?baz', '?/=') ('foo', 'bar?baz', '/') >>> split_first('foo/bar?baz', '123') ('foo/bar?baz', '', None) Scales linearly with number of delims. Not ideal for large number of delims. """ min_idx = None min_delim = None for d in delims: idx = s.find(d) if idx < 0: continue if min_idx is None or idx < min_idx: min_idx = idx min_delim = d if min_idx is None or min_idx < 0: return s, '', None return s[:min_idx], s[min_idx+1:], min_delim def parse_url(url): """ Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is performed to parse incomplete urls. Fields not provided will be None. Partly backwards-compatible with :mod:`urlparse`. Example: :: >>> parse_url('http://google.com/mail/') Url(scheme='http', host='google.com', port=None, path='/', ...) >>> parse_url('google.com:80') Url(scheme=None, host='google.com', port=80, path=None, ...) >>> parse_url('/foo?bar') Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) """ # While this code has overlap with stdlib's urlparse, it is much # simplified for our needs and less annoying. # Additionally, this imeplementations does silly things to be optimal # on CPython. scheme = None auth = None host = None port = None path = None fragment = None query = None # Scheme if '://' in url: scheme, url = url.split('://', 1) # Find the earliest Authority Terminator # (http://tools.ietf.org/html/rfc3986#section-3.2) url, path_, delim = split_first(url, ['/', '?', '#']) if delim: # Reassemble the path path = delim + path_ # Auth if '@' in url: auth, url = url.split('@', 1) # IPv6 if url and url[0] == '[': host, url = url[1:].split(']', 1) # Port if ':' in url: _host, port = url.split(':', 1) if not host:<|fim▁hole|> raise LocationParseError("Failed to parse: %s" % url) port = int(port) elif not host and url: host = url if not path: return Url(scheme, auth, host, port, path, query, fragment) # Fragment if '#' in path: path, fragment = path.split('#', 1) # Query if '?' in path: path, query = path.split('?', 1) return Url(scheme, auth, host, port, path, query, fragment) def get_host(url): """ Deprecated. Use :func:`.parse_url` instead. """ p = parse_url(url) return p.scheme or 'http', p.hostname, p.port def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, basic_auth=None): """ Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. Example: :: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} """ headers = {} if accept_encoding: if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): accept_encoding = ','.join(accept_encoding) else: accept_encoding = 'gzip,deflate' headers['accept-encoding'] = accept_encoding if user_agent: headers['user-agent'] = user_agent if keep_alive: headers['connection'] = 'keep-alive' if basic_auth: headers['authorization'] = 'Basic ' + \ b64encode(six.b(basic_auth)).decode('utf-8') return headers def is_connection_dropped(conn): # Platform-specific """ Returns True if the connection is dropped and should be closed. :param conn: :class:`httplib.HTTPConnection` object. Note: For platforms like AppEngine, this will always return ``False`` to let the platform handle connection recycling transparently for us. """ sock = getattr(conn, 'sock', False) if not sock: # Platform-specific: AppEngine return False if not poll: if not select: # Platform-specific: AppEngine return False try: return select([sock], [], [], 0.0)[0] except SocketError: return True # This version is better on platforms that support it. p = poll() p.register(sock, POLLIN) for (fno, ev) in p.poll(0.0): if fno == sock.fileno(): # Either data is buffered (bad), or the connection is dropped. return True def resolve_cert_reqs(candidate): """ Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_NONE`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbrevation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket. """ if candidate is None: return CERT_NONE if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: res = getattr(ssl, 'CERT_' + candidate) return res return candidate def resolve_ssl_version(candidate): """ like resolve_cert_reqs """ if candidate is None: return PROTOCOL_SSLv23 if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: res = getattr(ssl, 'PROTOCOL_' + candidate) return res return candidate def assert_fingerprint(cert, fingerprint): """ Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. """ # Maps the length of a digest to a possible hash function producing # this digest. hashfunc_map = { 16: md5, 20: sha1 } fingerprint = fingerprint.replace(':', '').lower() digest_length, rest = divmod(len(fingerprint), 2) if rest or digest_length not in hashfunc_map: raise SSLError('Fingerprint is of invalid length.') # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) hashfunc = hashfunc_map[digest_length] cert_digest = hashfunc(cert).digest() if not cert_digest == fingerprint_bytes: raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".' .format(hexlify(fingerprint_bytes), hexlify(cert_digest))) def is_fp_closed(obj): """ Checks whether a given file-like object is closed. :param obj: The file-like object to check. """ if hasattr(obj, 'fp'): # Object is a container for another file-like object that gets released # on exhaustion (e.g. HTTPResponse) return obj.fp is None return obj.closed if SSLContext is not None: # Python 3.2+ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, ssl_version=None): """ All arguments except `server_hostname` have the same meaning as for :func:`ssl.wrap_socket` :param server_hostname: Hostname of the expected certificate """ context = SSLContext(ssl_version) context.verify_mode = cert_reqs if ca_certs: try: context.load_verify_locations(ca_certs) # Py32 raises IOError # Py33 raises FileNotFoundError except Exception as e: # Reraise as SSLError raise SSLError(e) if certfile: # FIXME: This block needs a test. context.load_cert_chain(certfile, keyfile) if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI return context.wrap_socket(sock, server_hostname=server_hostname) return context.wrap_socket(sock) else: # Python 3.1 and earlier def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, ssl_version=None): return wrap_socket(sock, keyfile=keyfile, certfile=certfile, ca_certs=ca_certs, cert_reqs=cert_reqs, ssl_version=ssl_version)<|fim▁end|>
host = _host if not port.isdigit():
<|file_name|>event.rs<|end_file_name|><|fim▁begin|>/* TOOD: Implement for other kqueue based systems */ use {Errno, Result}; #[cfg(not(target_os = "netbsd"))] use libc::{timespec, time_t, c_int, c_long, intptr_t, uintptr_t}; #[cfg(target_os = "netbsd")] use libc::{timespec, time_t, c_long, intptr_t, uintptr_t, size_t}; use libc; use std::os::unix::io::RawFd; use std::ptr; use std::mem; // Redefine kevent in terms of programmer-friendly enums and bitfields. #[derive(Clone, Copy)] #[repr(C)] #[allow(missing_debug_implementations)] pub struct KEvent { kevent: libc::kevent, } #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd"))] type type_of_udata = *mut libc::c_void; #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] type type_of_data = intptr_t; #[cfg(any(target_os = "netbsd"))] type type_of_udata = intptr_t; #[cfg(any(target_os = "netbsd", target_os = "openbsd"))] type type_of_data = libc::int64_t; #[cfg(target_os = "netbsd")] type type_of_event_filter = u32; #[cfg(not(target_os = "netbsd"))] type type_of_event_filter = i16; libc_enum! { #[cfg_attr(target_os = "netbsd", repr(u32))] #[cfg_attr(not(target_os = "netbsd"), repr(i16))] pub enum EventFilter { EVFILT_AIO, /// Returns whenever there is no remaining data in the write buffer #[cfg(target_os = "freebsd")] EVFILT_EMPTY, #[cfg(target_os = "dragonfly")] EVFILT_EXCEPT, #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] EVFILT_FS, #[cfg(target_os = "freebsd")] EVFILT_LIO, #[cfg(any(target_os = "ios", target_os = "macos"))] EVFILT_MACHPORT, EVFILT_PROC, /// Returns events associated with the process referenced by a given /// process descriptor, created by `pdfork()`. The events to monitor are: /// /// - NOTE_EXIT: the process has exited. The exit status will be stored in data. #[cfg(target_os = "freebsd")] EVFILT_PROCDESC, EVFILT_READ, /// Returns whenever an asynchronous `sendfile()` call completes. #[cfg(target_os = "freebsd")] EVFILT_SENDFILE, EVFILT_SIGNAL, EVFILT_TIMER, #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos"))] EVFILT_USER, #[cfg(any(target_os = "ios", target_os = "macos"))] EVFILT_VM, EVFILT_VNODE, EVFILT_WRITE, } } #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd"))] pub type type_of_event_flag = u16; #[cfg(any(target_os = "netbsd"))] pub type type_of_event_flag = u32; libc_bitflags!{ pub struct EventFlag: type_of_event_flag { EV_ADD; EV_CLEAR; EV_DELETE; EV_DISABLE; // No released version of OpenBSD supports EV_DISPATCH or EV_RECEIPT. // These have been commited to the -current branch though and are // expected to be part of the OpenBSD 6.2 release in Nov 2017. // See: https://marc.info/?l=openbsd-tech&m=149621427511219&w=2 // https://github.com/rust-lang/libc/pull/613 #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd"))] EV_DISPATCH; #[cfg(target_os = "freebsd")] EV_DROP; EV_ENABLE; EV_EOF; EV_ERROR; #[cfg(any(target_os = "macos", target_os = "ios"))] EV_FLAG0; EV_FLAG1; #[cfg(target_os = "dragonfly")] EV_NODATA; EV_ONESHOT; #[cfg(any(target_os = "macos", target_os = "ios"))] EV_OOBAND; #[cfg(any(target_os = "macos", target_os = "ios"))] EV_POLL; #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "netbsd"))] EV_RECEIPT;<|fim▁hole|> libc_bitflags!( pub struct FilterFlag: u32 { #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_ABSOLUTE; NOTE_ATTRIB; NOTE_CHILD; NOTE_DELETE; #[cfg(target_os = "openbsd")] NOTE_EOF; NOTE_EXEC; NOTE_EXIT; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_EXIT_REPARENTED; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_EXITSTATUS; NOTE_EXTEND; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFAND; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFCOPY; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFCTRLMASK; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFLAGSMASK; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFNOP; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_FFOR; NOTE_FORK; NOTE_LINK; NOTE_LOWAT; #[cfg(target_os = "freebsd")] NOTE_MSECONDS; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_NONE; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd"))] NOTE_NSECONDS; #[cfg(target_os = "dragonfly")] NOTE_OOB; NOTE_PCTRLMASK; NOTE_PDATAMASK; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_REAP; NOTE_RENAME; NOTE_REVOKE; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd"))] NOTE_SECONDS; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_SIGNAL; NOTE_TRACK; NOTE_TRACKERR; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly"))] NOTE_TRIGGER; #[cfg(target_os = "openbsd")] NOTE_TRUNCATE; #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd"))] NOTE_USECONDS; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_VM_ERROR; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_VM_PRESSURE; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_VM_PRESSURE_SUDDEN_TERMINATE; #[cfg(any(target_os = "macos", target_os = "ios"))] NOTE_VM_PRESSURE_TERMINATE; NOTE_WRITE; } ); pub fn kqueue() -> Result<RawFd> { let res = unsafe { libc::kqueue() }; Errno::result(res) } // KEvent can't derive Send because on some operating systems, udata is defined // as a void*. However, KEvent's public API always treats udata as an intptr_t, // which is safe to Send. unsafe impl Send for KEvent { } impl KEvent { pub fn new(ident: uintptr_t, filter: EventFilter, flags: EventFlag, fflags:FilterFlag, data: intptr_t, udata: intptr_t) -> KEvent { KEvent { kevent: libc::kevent { ident: ident, filter: filter as type_of_event_filter, flags: flags.bits(), fflags: fflags.bits(), data: data as type_of_data, udata: udata as type_of_udata } } } pub fn ident(&self) -> uintptr_t { self.kevent.ident } pub fn filter(&self) -> EventFilter { unsafe { mem::transmute(self.kevent.filter as type_of_event_filter) } } pub fn flags(&self) -> EventFlag { EventFlag::from_bits(self.kevent.flags).unwrap() } pub fn fflags(&self) -> FilterFlag { FilterFlag::from_bits(self.kevent.fflags).unwrap() } pub fn data(&self) -> intptr_t { self.kevent.data as intptr_t } pub fn udata(&self) -> intptr_t { self.kevent.udata as intptr_t } } pub fn kevent(kq: RawFd, changelist: &[KEvent], eventlist: &mut [KEvent], timeout_ms: usize) -> Result<usize> { // Convert ms to timespec let timeout = timespec { tv_sec: (timeout_ms / 1000) as time_t, tv_nsec: ((timeout_ms % 1000) * 1_000_000) as c_long }; kevent_ts(kq, changelist, eventlist, Some(timeout)) } #[cfg(any(target_os = "macos", target_os = "ios", target_os = "freebsd", target_os = "dragonfly", target_os = "openbsd"))] type type_of_nchanges = c_int; #[cfg(target_os = "netbsd")] type type_of_nchanges = size_t; pub fn kevent_ts(kq: RawFd, changelist: &[KEvent], eventlist: &mut [KEvent], timeout_opt: Option<timespec>) -> Result<usize> { let res = unsafe { libc::kevent( kq, changelist.as_ptr() as *const libc::kevent, changelist.len() as type_of_nchanges, eventlist.as_mut_ptr() as *mut libc::kevent, eventlist.len() as type_of_nchanges, if let Some(ref timeout) = timeout_opt {timeout as *const timespec} else {ptr::null()}) }; Errno::result(res).map(|r| r as usize) } #[inline] pub fn ev_set(ev: &mut KEvent, ident: usize, filter: EventFilter, flags: EventFlag, fflags: FilterFlag, udata: intptr_t) { ev.kevent.ident = ident as uintptr_t; ev.kevent.filter = filter as type_of_event_filter; ev.kevent.flags = flags.bits(); ev.kevent.fflags = fflags.bits(); ev.kevent.data = 0; ev.kevent.udata = udata as type_of_udata; } #[test] fn test_struct_kevent() { let udata : intptr_t = 12345; let expected = libc::kevent{ident: 0xdead_beef, filter: libc::EVFILT_READ, flags: libc::EV_ONESHOT | libc::EV_ADD, fflags: libc::NOTE_CHILD | libc::NOTE_EXIT, data: 0x1337, udata: udata as type_of_udata}; let actual = KEvent::new(0xdead_beef, EventFilter::EVFILT_READ, EventFlag::EV_ONESHOT | EventFlag::EV_ADD, FilterFlag::NOTE_CHILD | FilterFlag::NOTE_EXIT, 0x1337, udata); assert!(expected.ident == actual.ident()); assert!(expected.filter == actual.filter() as type_of_event_filter); assert!(expected.flags == actual.flags().bits()); assert!(expected.fflags == actual.fflags().bits()); assert!(expected.data == actual.data() as type_of_data); assert!(expected.udata == actual.udata() as type_of_udata); assert!(mem::size_of::<libc::kevent>() == mem::size_of::<KEvent>()); }<|fim▁end|>
EV_SYSFLAGS; } }
<|file_name|>hash.rs<|end_file_name|><|fim▁begin|>#![feature(core)] extern crate core; #[cfg(test)] mod tests { use core::hash::SipHasher; use core::hash::Hasher; use core::hash::Hash; // pub trait FixedSizeArray<T> { // /// Converts the array to immutable slice // fn as_slice(&self) -> &[T]; // /// Converts the array to mutable slice // fn as_mut_slice(&mut self) -> &mut [T]; // } // macro_rules! array_impls { // ($($N:expr)+) => { // $( // #[unstable(feature = "core")] // impl<T> FixedSizeArray<T> for [T; $N] { // #[inline] // fn as_slice(&self) -> &[T] { // &self[..] // } // #[inline] // fn as_mut_slice(&mut self) -> &mut [T] { // &mut self[..] // } // } // // #[unstable(feature = "array_as_ref", // reason = "should ideally be implemented for all fixed-sized arrays")] // impl<T> AsRef<[T]> for [T; $N] { // #[inline] // fn as_ref(&self) -> &[T] { // &self[..] // } // } // // #[unstable(feature = "array_as_ref", // reason = "should ideally be implemented for all fixed-sized arrays")] // impl<T> AsMut<[T]> for [T; $N] { // #[inline] // fn as_mut(&mut self) -> &mut [T] { // &mut self[..] // }<|fim▁hole|> // fn clone(&self) -> [T; $N] { // *self // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T: Hash> Hash for [T; $N] { // fn hash<H: hash::Hasher>(&self, state: &mut H) { // Hash::hash(&self[..], state) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T: fmt::Debug> fmt::Debug for [T; $N] { // fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // fmt::Debug::fmt(&&self[..], f) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<'a, T> IntoIterator for &'a [T; $N] { // type Item = &'a T; // type IntoIter = Iter<'a, T>; // // fn into_iter(self) -> Iter<'a, T> { // self.iter() // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<'a, T> IntoIterator for &'a mut [T; $N] { // type Item = &'a mut T; // type IntoIter = IterMut<'a, T>; // // fn into_iter(self) -> IterMut<'a, T> { // self.iter_mut() // } // } // // // NOTE: some less important impls are omitted to reduce code bloat // __impl_slice_eq1! { [A; $N], [B; $N] } // __impl_slice_eq2! { [A; $N], [B] } // __impl_slice_eq2! { [A; $N], &'b [B] } // __impl_slice_eq2! { [A; $N], &'b mut [B] } // // __impl_slice_eq2! { [A; $N], &'b [B; $N] } // // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Eq> Eq for [T; $N] { } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:PartialOrd> PartialOrd for [T; $N] { // #[inline] // fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> { // PartialOrd::partial_cmp(&&self[..], &&other[..]) // } // #[inline] // fn lt(&self, other: &[T; $N]) -> bool { // PartialOrd::lt(&&self[..], &&other[..]) // } // #[inline] // fn le(&self, other: &[T; $N]) -> bool { // PartialOrd::le(&&self[..], &&other[..]) // } // #[inline] // fn ge(&self, other: &[T; $N]) -> bool { // PartialOrd::ge(&&self[..], &&other[..]) // } // #[inline] // fn gt(&self, other: &[T; $N]) -> bool { // PartialOrd::gt(&&self[..], &&other[..]) // } // } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Ord> Ord for [T; $N] { // #[inline] // fn cmp(&self, other: &[T; $N]) -> Ordering { // Ord::cmp(&&self[..], &&other[..]) // } // } // )+ // } // } // array_impls! { // 0 1 2 3 4 5 6 7 8 9 // 10 11 12 13 14 15 16 17 18 19 // 20 21 22 23 24 25 26 27 28 29 // 30 31 32 // } type T = i32; type H = SipHasher; // H: hash::Hasher #[test] fn hash_test1() { let mut state: H = <H>::new(); let finish: u64 = state.finish(); assert_eq!(finish, 0x1e924b9d737700d7); let array: [T; 27] = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26 ]; array.hash::<H>(&mut state); let finish: u64 = state.finish(); assert_eq!(finish, 0x2a397bfd176cb36); } }<|fim▁end|>
// } // // #[stable(feature = "rust1", since = "1.0.0")] // impl<T:Copy> Clone for [T; $N] {
<|file_name|>SolutionVerifier.java<|end_file_name|><|fim▁begin|>/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and<|fim▁hole|>package com.graphhopper.jsprit.core.util; import java.util.Collection; import java.util.HashSet; import java.util.Set; import com.graphhopper.jsprit.core.algorithm.listener.AlgorithmEndsListener; import com.graphhopper.jsprit.core.problem.VehicleRoutingProblem; import com.graphhopper.jsprit.core.problem.job.Job; import com.graphhopper.jsprit.core.problem.solution.VehicleRoutingProblemSolution; import com.graphhopper.jsprit.core.problem.solution.route.VehicleRoute; public class SolutionVerifier implements AlgorithmEndsListener { @Override public void informAlgorithmEnds(VehicleRoutingProblem problem, Collection<VehicleRoutingProblemSolution> solutions) { for (VehicleRoutingProblemSolution solution : solutions) { Set<Job> jobsInSolution = new HashSet<Job>(); for (VehicleRoute route : solution.getRoutes()) { jobsInSolution.addAll(route.getTourActivities().getJobs()); } if (jobsInSolution.size() != problem.getJobs().size()) { throw new IllegalStateException("we are at the end of the algorithm and still have not found a valid solution." + "This cannot be."); } } } }<|fim▁end|>
* limitations under the License. */
<|file_name|>ModuleAutoTesterUtil.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ''' Mepinta Copyright (c) 2011-2012, Joaquin G. Duo This file is part of Mepinta. Mepinta is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Mepinta is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Mepinta. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|>from mepinta.testing.plugins_testing.base import ModuleAutoTesterBase class ModuleAutoTesterUtil(ModuleAutoTesterBase): pass def testModule(): from getDefaultContext import getDefaultContext context = getDefaultContext() if __name__ == "__main__": testModule()<|fim▁end|>
'''
<|file_name|>native.rs<|end_file_name|><|fim▁begin|>use std::fs::File; use std::io; use std::os::unix::fs::MetadataExt; use std::os::unix::io::AsRawFd; use nix::errno::Errno; use crate::util::io::io_err; mod sys { use nix::libc::c_int; <|fim▁hole|> #[link(name = "fallocate")] extern "C" { pub fn native_fallocate(fd: c_int, len: u64) -> c_int; } } pub fn is_sparse(f: &File) -> io::Result<bool> { let stat = f.metadata()?; Ok(stat.blocks() * stat.blksize() < stat.size()) } pub fn fallocate(f: &File, len: u64) -> io::Result<bool> { // We ignore the len here, if you actually have a u64 max, then you're kinda fucked either way. loop { match unsafe { sys::native_fallocate(f.as_raw_fd(), len) } { 0 => return Ok(true), -1 => match Errno::last() { Errno::EOPNOTSUPP | Errno::ENOSYS => { f.set_len(len)?; return Ok(false); } Errno::ENOSPC => { return io_err("Out of disk space!"); } Errno::EINTR => { continue; } e => { return io_err(e.desc()); } }, _ => unreachable!(), } } }<|fim▁end|>
<|file_name|>test_log_entries.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # This file is part of Shoop. # # Copyright (c) 2012-2015, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. import pytest from shoop.notify import Context from shoop_tests.notify.fixtures import get_initialized_test_event @pytest.mark.django_db def test_log_entries(): event = get_initialized_test_event()<|fim▁hole|> ctx.add_log_entry_on_log_target("blap", "blorr") order.add_log_entry("blep") assert ctx.log_entry_queryset.count() == n_log_entries + 2 # they got added assert order.log_entries.last().message == "blep" # it's what we added assert ctx.log_entry_queryset.last().message == "blep" # from this perspective too @pytest.mark.django_db @pytest.mark.parametrize("target_obj", (None, object())) def test_log_entry_on_unloggable_object(target_obj): event = get_initialized_test_event() event.variable_values["order"] = target_obj # invalidate log target _before_ creating context ctx = Context.from_event(event) n_log_entries = ctx.log_entry_queryset.count() ctx.add_log_entry_on_log_target("blap", "blorr") assert ctx.log_entry_queryset.count() == n_log_entries # couldn't add :(<|fim▁end|>
ctx = Context.from_event(event) order = ctx.get("order") n_log_entries = ctx.log_entry_queryset.count()
<|file_name|>custom-fields-edit-directive.js<|end_file_name|><|fim▁begin|>'use strict'; /** * @ngdoc directive * @name GO.Core.CustomFields.goCustomFieldsEdit * * @description * Prints custom fields form fieldsets. * * * @param {string} ngModel The customFields model property of the model the customFields belong to<|fim▁hole|> * * @example * <go-custom-fields-edit ng-model="contact.customFields" server-model="GO\Modules\GroupOffice\Contacts\Model\ContactCustomFields"></go-custom-fields-edit> */ angular.module('GO.Core').directive('goCustomFieldsEdit', [ '$templateCache', '$compile', 'GO.Core.Directives.CustomFields', function ($templateCache, $compile, CustomFields) { var buildTemplate = function (customFieldSetStore) { var tpl = ''; for (var i = 0, l = customFieldSetStore.items.length; i < l; i++) { var fieldSet = customFieldSetStore.items[i]; tpl += '<fieldset><h3>{{::"' + fieldSet.name + '" | goT}}</h3>'; for (var n = 0, cl = fieldSet.fields.length; n < cl; n++) { var field = fieldSet.fields[n]; tpl += buildFunctions[field.type](field); } tpl += '</fieldset>'; } return tpl; }; var buildFunctions = { formName: null, text: function (field) { return '<md-input-container class="md-block">\ <md-icon>star</md-icon>\ <label>{{::"' + field.name + '" | goT}}</label>\ <input name="' + field.databaseName + '" type="text" maxlength="' + field.data.maxLength + '" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '" />\ <md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\ <div ng-messages="formController.' + field.databaseName + '.$error" role="alert">\ <div ng-message="required">\ {{::"This field is required" | goT}}\ </div>\ </div>\ </md-input-container>'; }, textarea: function (field) { return '<md-input-container class="md-block">\ <md-icon>star</md-icon>\ <label>{{::"' + field.name + '" | goT}}</label>\ <textarea id="' + field.databaseName + '" name="' + field.databaseName + '" maxlength="' + field.data.maxLength + '" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '"></textarea>\ <md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\ <div ng-messages="formController.' + field.databaseName + '.$error" role="alert">\ <div ng-message="required">\ {{::"This field is required" | goT}}\ </div>\ </div>\ </md-input-container>'; }, select: function (field) { var tpl = '<md-input-container class="md-block">\ <md-icon>star</md-icon>\ <label>{{::"' + field.name + '" | goT}}</label>\ <md-select name="' + field.databaseName + '" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '">'; for (var i = 0, l = field.data.options.length; i < l; i++) { tpl += '<md-option value="' + field.data.options[i] + '">{{::"' + field.data.options[i] + '" | goT}}</md-option>'; } tpl += '</md-select>\ <md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\ <div class="md-errors-spacer"></div>\ <div ng-messages="formController.' + field.databaseName + '.$error" role="alert">\ <div ng-message="required">\ {{::"This field is required" | goT}}\ </div>\ </div>'; tpl += '</md-input-container>'; return tpl; }, checkbox: function (field) { return '<md-input-container class="md-block">\ <md-checkbox id="cf_{{field.id}}" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '"> {{::"' + field.name + '" | goT}}</md-checkbox>\ <md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\ </md-input-container>'; }, date: function (field) { return '<go-date-picker id="cf_{{field.id}}" name="dateOfBirth" hint="{{::\''+field.hintText+'\' | goT }}" label="' + field.name + '" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '"></go-date-picker>'; }, number: function (field) { return '<md-input-container class="md-block">\ <md-icon>star</md-icon>\ <label>{{::"' + field.name + '" | goT}}</label>\ <input go-number id="cf_{{field.id}}" name="' + field.databaseName + '" type="text" ng-model="goModel[\'' + field.databaseName + '\']" ng-required="' + (field.required ? 'true' : 'false') + '" />\ <md-hint>{{::"'+field.hintText+'" | goT}}</md-hint>\ <div ng-messages="formController.' + field.databaseName + '.$error" role="alert">\ <div ng-message="required">\ {{::"This field is required" | goT}}\ </div>\ </div>\ </md-input-container>'; } }; return { restrict: 'E', scope: { goModel: '=ngModel', serverModel: '@', formController: '=' }, link: function (scope, element, attrs) { var customFieldSetStore = CustomFields.getFieldSetStore(attrs.serverModel); //TODO load is called twice now customFieldSetStore.promise.then(function () { var tpl = buildTemplate(customFieldSetStore); element.html(tpl); $compile(element.contents())(scope); }); } }; }]);<|fim▁end|>
* @param {string} serverModel The custom fields server model.
<|file_name|>test_agents.py<|end_file_name|><|fim▁begin|># Copyright 2012 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import webob.exc from nova.api.openstack.compute.contrib import agents from nova import context from nova import db from nova.db.sqlalchemy import models from nova import exception from nova import test fake_agents_list = [{'hypervisor': 'kvm', 'os': 'win', 'architecture': 'x86', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545', 'id': 1}, {'hypervisor': 'kvm', 'os': 'linux', 'architecture': 'x86', 'version': '16.0', 'url': 'xxx://xxxx/xxx/xxx1', 'md5hash': 'add6bb58e139be103324d04d82d8f546', 'id': 2}, {'hypervisor': 'xen', 'os': 'linux', 'architecture': 'x86', 'version': '16.0', 'url': 'xxx://xxxx/xxx/xxx2', 'md5hash': 'add6bb58e139be103324d04d82d8f547', 'id': 3}, {'hypervisor': 'xen', 'os': 'win', 'architecture': 'power', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx3', 'md5hash': 'add6bb58e139be103324d04d82d8f548', 'id': 4}, ] def fake_agent_build_get_all(context, hypervisor): agent_build_all = [] for agent in fake_agents_list: if hypervisor and hypervisor != agent['hypervisor']: continue agent_build_ref = models.AgentBuild() agent_build_ref.update(agent) agent_build_all.append(agent_build_ref) return agent_build_all def fake_agent_build_update(context, agent_build_id, values): pass def fake_agent_build_destroy(context, agent_update_id): pass def fake_agent_build_create(context, values): values['id'] = 1 agent_build_ref = models.AgentBuild() agent_build_ref.update(values) return agent_build_ref class FakeRequest(object): environ = {"nova.context": context.get_admin_context()} GET = {} class FakeRequestWithHypervisor(object): environ = {"nova.context": context.get_admin_context()} GET = {'hypervisor': 'kvm'} class AgentsTest(test.NoDBTestCase): def setUp(self): super(AgentsTest, self).setUp() self.stubs.Set(db, "agent_build_get_all", fake_agent_build_get_all) self.stubs.Set(db, "agent_build_update", fake_agent_build_update) self.stubs.Set(db, "agent_build_destroy", fake_agent_build_destroy) self.stubs.Set(db, "agent_build_create", fake_agent_build_create) self.context = context.get_admin_context() self.controller = agents.AgentController() def test_agents_create(self): req = FakeRequest() body = {'agent': {'hypervisor': 'kvm', 'os': 'win', 'architecture': 'x86', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545'}} response = {'agent': {'hypervisor': 'kvm', 'os': 'win', 'architecture': 'x86', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545', 'agent_id': 1}} res_dict = self.controller.create(req, body) self.assertEqual(res_dict, response) def test_agents_create_key_error(self): req = FakeRequest() body = {'agent': {'hypervisordummy': 'kvm', 'os': 'win', 'architecture': 'x86', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545'}} self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, req, body) def test_agents_create_with_existed_agent(self): def fake_agent_build_create_with_exited_agent(context, values): raise exception.AgentBuildExists(**values) self.stubs.Set(db, 'agent_build_create', fake_agent_build_create_with_exited_agent) req = FakeRequest() body = {'agent': {'hypervisor': 'kvm', 'os': 'win', 'architecture': 'x86', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545'}} self.assertRaises(webob.exc.HTTPConflict, self.controller.create, req, body=body) def _test_agents_create_with_invalid_length(self, key): req = FakeRequest() body = {'agent': {'hypervisor': 'kvm', 'os': 'win', 'architecture': 'x86', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545'}} body['agent'][key] = 'x' * 256 self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create, req, body) def test_agents_create_with_invalid_length_hypervisor(self): self._test_agents_create_with_invalid_length('hypervisor') def test_agents_create_with_invalid_length_os(self): self._test_agents_create_with_invalid_length('os') def test_agents_create_with_invalid_length_architecture(self): self._test_agents_create_with_invalid_length('architecture') def test_agents_create_with_invalid_length_version(self): self._test_agents_create_with_invalid_length('version') def test_agents_create_with_invalid_length_url(self): self._test_agents_create_with_invalid_length('url') def test_agents_create_with_invalid_length_md5hash(self): self._test_agents_create_with_invalid_length('md5hash') def test_agents_delete(self): req = FakeRequest() self.controller.delete(req, 1) def test_agents_list(self): req = FakeRequest() res_dict = self.controller.index(req) agents_list = [{'hypervisor': 'kvm', 'os': 'win', 'architecture': 'x86', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx',<|fim▁hole|> 'version': '16.0', 'url': 'xxx://xxxx/xxx/xxx1', 'md5hash': 'add6bb58e139be103324d04d82d8f546', 'agent_id': 2}, {'hypervisor': 'xen', 'os': 'linux', 'architecture': 'x86', 'version': '16.0', 'url': 'xxx://xxxx/xxx/xxx2', 'md5hash': 'add6bb58e139be103324d04d82d8f547', 'agent_id': 3}, {'hypervisor': 'xen', 'os': 'win', 'architecture': 'power', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx3', 'md5hash': 'add6bb58e139be103324d04d82d8f548', 'agent_id': 4}, ] self.assertEqual(res_dict, {'agents': agents_list}) def test_agents_list_with_hypervisor(self): req = FakeRequestWithHypervisor() res_dict = self.controller.index(req) response = [{'hypervisor': 'kvm', 'os': 'win', 'architecture': 'x86', 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545', 'agent_id': 1}, {'hypervisor': 'kvm', 'os': 'linux', 'architecture': 'x86', 'version': '16.0', 'url': 'xxx://xxxx/xxx/xxx1', 'md5hash': 'add6bb58e139be103324d04d82d8f546', 'agent_id': 2}, ] self.assertEqual(res_dict, {'agents': response}) def test_agents_update(self): req = FakeRequest() body = {'para': {'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545'}} response = {'agent': {'agent_id': 1, 'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545'}} res_dict = self.controller.update(req, 1, body) self.assertEqual(res_dict, response) def test_agents_update_key_error(self): req = FakeRequest() body = {'para': {'versiondummy': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545'}} self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update, req, 1, body) def test_agents_update_value_error(self): req = FakeRequest() body = {'para': {'version': '7.0', 'url': 1111, 'md5hash': 'add6bb58e139be103324d04d82d8f545'}} self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update, req, 1, body) def _test_agents_update_with_invalid_length(self, key): req = FakeRequest() body = {'para': {'version': '7.0', 'url': 'xxx://xxxx/xxx/xxx', 'md5hash': 'add6bb58e139be103324d04d82d8f545'}} body['para'][key] = 'x' * 256 self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update, req, 1, body) def test_agents_update_with_invalid_length_version(self): self._test_agents_update_with_invalid_length('version') def test_agents_update_with_invalid_length_url(self): self._test_agents_update_with_invalid_length('url') def test_agents_update_with_invalid_length_md5hash(self): self._test_agents_update_with_invalid_length('md5hash')<|fim▁end|>
'md5hash': 'add6bb58e139be103324d04d82d8f545', 'agent_id': 1}, {'hypervisor': 'kvm', 'os': 'linux', 'architecture': 'x86',
<|file_name|>test-amp-vk.js<|end_file_name|><|fim▁begin|>/** * Copyright 2017 The AMP HTML Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS-IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ const POST_PARAMS = { 'embedtype': 'post', 'hash': 'Yc8_Z9pnpg8aKMZbVcD-jK45eAk', 'owner-id': '1', 'post-id': '45616', }; const POLL_PARAMS = { 'embedtype': 'poll', 'api-id': '6183531', 'poll-id': '274086843_1a2a465f60fff4699f', }; import '../amp-vk'; import {Layout} from '../../../../src/layout'; import {Resource} from '../../../../src/service/resource'; describes.realWin('amp-vk', { amp: { extensions: ['amp-vk'], }, }, env => { let win, doc; beforeEach(() => { win = env.win; doc = win.document; }); function createAmpVkElement(dataParams, layout) { const element = doc.createElement('amp-vk'); for (const param in dataParams) { element.setAttribute(`data-${param}`, dataParams[param]); } element.setAttribute('width', 500); element.setAttribute('height', 300); if (layout) { element.setAttribute('layout', layout); } doc.body.appendChild(element); return element.build().then(() => { const resource = Resource.forElement(element); resource.measure(); return element.layoutCallback(); }).then(() => element); } it('requires data-embedtype', () => { const params = Object.assign({}, POST_PARAMS); delete params['embedtype']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-embedtype attribute is required for/); }); it('removes iframe after unlayoutCallback', () => { return createAmpVkElement(POST_PARAMS).then(vkPost => { const iframe = vkPost.querySelector('iframe'); expect(iframe).to.not.be.null; const obj = vkPost.implementation_; obj.unlayoutCallback(); expect(vkPost.querySelector('iframe')).to.be.null; expect(obj.iframe_).to.be.null; expect(obj.unlayoutOnPause()).to.be.true; }); }); // Post tests it('post::requires data-hash', () => { const params = Object.assign({}, POST_PARAMS); delete params['hash']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-hash attribute is required for/); }); it('post::requires data-owner-id', () => { const params = Object.assign({}, POST_PARAMS); delete params['owner-id']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-owner-id attribute is required for/); }); it('post::requires data-post-id', () => { const params = Object.assign({}, POST_PARAMS); delete params['post-id']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-post-id attribute is required for/); }); it('post::renders iframe in amp-vk', () => { return createAmpVkElement(POST_PARAMS).then(vkPost => { const iframe = vkPost.querySelector('iframe'); expect(iframe).to.not.be.null; }); }); it('post::renders responsively', () => { return createAmpVkElement(POST_PARAMS, Layout.RESPONSIVE).then(vkPost => { const iframe = vkPost.querySelector('iframe'); expect(iframe).to.not.be.null; expect(iframe.className).to.match(/i-amphtml-fill-content/); }); }); it('post::sets correct src url to the vk iFrame', () => { return createAmpVkElement(POST_PARAMS, Layout.RESPONSIVE).then(vkPost => { const impl = vkPost.implementation_; const iframe = vkPost.querySelector('iframe'); const referrer = encodeURIComponent(vkPost.ownerDocument.referrer); const url = encodeURIComponent( vkPost.ownerDocument.location.href.replace(/#.*$/, '') ); impl.onLayoutMeasure(); const startWidth = impl.getLayoutWidth(); const correctIFrameSrc = `https://vk.com/widget_post.php?app=0&width=100%25\ &_ver=1&owner_id=1&post_id=45616&hash=Yc8_Z9pnpg8aKMZbVcD-jK45eAk&amp=1\ &startWidth=${startWidth}&url=${url}&referrer=${referrer}&title=AMP%20Post`; expect(iframe).to.not.be.null; const timeArgPosition = iframe.src.lastIndexOf('&'); const iframeSrcWithoutTime = iframe.src.substr(0, timeArgPosition); expect(iframeSrcWithoutTime).to.equal(correctIFrameSrc); }); }); // Poll tests it('poll::requires data-api-id', () => { const params = Object.assign({}, POLL_PARAMS); delete params['api-id'];<|fim▁hole|> }); it('poll::requires data-poll-id', () => { const params = Object.assign({}, POLL_PARAMS); delete params['poll-id']; return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-poll-id attribute is required for/); }); it('poll::renders iframe in amp-vk', () => { return createAmpVkElement(POLL_PARAMS).then(vkPoll => { const iframe = vkPoll.querySelector('iframe'); expect(iframe).to.not.be.null; }); }); it('poll::renders responsively', () => { return createAmpVkElement(POLL_PARAMS, Layout.RESPONSIVE).then(vkPoll => { const iframe = vkPoll.querySelector('iframe'); expect(iframe).to.not.be.null; expect(iframe.className).to.match(/i-amphtml-fill-content/); }); }); it('poll::sets correct src url to the vk iFrame', () => { return createAmpVkElement(POLL_PARAMS, Layout.RESPONSIVE).then(vkPoll => { const iframe = vkPoll.querySelector('iframe'); const referrer = encodeURIComponent(vkPoll.ownerDocument.referrer); const url = encodeURIComponent( vkPoll.ownerDocument.location.href.replace(/#.*$/, '') ); const correctIFrameSrc = `https://vk.com/al_widget_poll.php?\ app=6183531&width=100%25&_ver=1&poll_id=274086843_1a2a465f60fff4699f&amp=1\ &url=${url}&title=AMP%20Poll&description=&referrer=${referrer}`; expect(iframe).to.not.be.null; const timeArgPosition = iframe.src.lastIndexOf('&'); const iframeSrcWithoutTime = iframe.src.substr(0, timeArgPosition); expect(iframeSrcWithoutTime).to.equal(correctIFrameSrc); }); }); it('both::resizes amp-vk element in response to postmessages', () => { return createAmpVkElement(POLL_PARAMS).then(vkPoll => { const impl = vkPoll.implementation_; const iframe = vkPoll.querySelector('iframe'); const changeHeight = sandbox.spy(impl, 'changeHeight'); const fakeHeight = 555; expect(iframe).to.not.be.null; generatePostMessage(vkPoll, iframe, fakeHeight); expect(changeHeight).to.be.calledOnce; expect(changeHeight.firstCall.args[0]).to.equal(fakeHeight); }); }); function generatePostMessage(ins, iframe, height) { ins.implementation_.handleVkIframeMessage_({ origin: 'https://vk.com', source: iframe.contentWindow, data: JSON.stringify([ 'resize', [height], ]), }); } });<|fim▁end|>
return createAmpVkElement(params).should.eventually.be.rejectedWith( /The data-api-id attribute is required for/);
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|><|fim▁hole|> 400: "Bad request sent to search API ({0})", 401: "Incorrect API Key ({0})", 403: "Correct API but request refused ({0})", 404: "Bad request sent to search API ({0})"} class SearchException(Exception): """ Abstract class representing an ifind search exception. """ def __init__(self, module, message): """ SearchException constructor. Args: module (str): name of module/class that's raising exception message (str): exception message to be displayed Usage: raise SearchException("Test", "this is an error") """ message = "{0} - {1}".format(module, message) Exception.__init__(self, message) class EngineConnectionException(SearchException): """ Thrown when an Engine connectivity error occurs. Returns specific response message if status code specified. """ def __init__(self, engine, message, code=None): """ EngineException constructor. Args: engine (str): name of engine that's raising exception message (str): exception message to be displayed (ignored usually here) Kwargs: code (int): response status code of issued request Usage: raise EngineException("Bing", "", code=200) """ self.message = message self.code = code if code: self.message = ERROR.get(code, ERROR['default']).format(self.code) SearchException.__init__(self, engine, self.message) class EngineLoadException(SearchException): """ Thrown when an Engine can't be dynamically loaded. """ pass class EngineAPIKeyException(SearchException): """ Thrown when an Engine's API key hasn't been provided. """ pass class QueryParamException(SearchException): """ Thrown when a query parameters incompatible or missing. """ pass class CacheConnectionException(SearchException): """ Thrown when cache connectivity error occurs. """ pass class InvalidQueryException(SearchException): """ Thrown when an invalid query is passed to engine's search method. """ pass class RateLimitException(SearchException): """ Thrown when an engine's request rate limit has been exceeded. """ pass<|fim▁end|>
# TODO When raising an exception pass a lambda function, the function being the module/path/name thing ERROR = {'default': "Unknown engine error ({0})",
<|file_name|>ingress.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ <|fim▁hole|>package v1beta1 import ( "k8s.io/apimachinery/pkg/api/errors" "k8s.io/apimachinery/pkg/labels" "k8s.io/client-go/tools/cache" v1beta1 "k8s.io/kubernetes/pkg/apis/extensions/v1beta1" ) // IngressLister helps list Ingresses. type IngressLister interface { // List lists all Ingresses in the indexer. List(selector labels.Selector) (ret []*v1beta1.Ingress, err error) // Ingresses returns an object that can list and get Ingresses. Ingresses(namespace string) IngressNamespaceLister IngressListerExpansion } // ingressLister implements the IngressLister interface. type ingressLister struct { indexer cache.Indexer } // NewIngressLister returns a new IngressLister. func NewIngressLister(indexer cache.Indexer) IngressLister { return &ingressLister{indexer: indexer} } // List lists all Ingresses in the indexer. func (s *ingressLister) List(selector labels.Selector) (ret []*v1beta1.Ingress, err error) { err = cache.ListAll(s.indexer, selector, func(m interface{}) { ret = append(ret, m.(*v1beta1.Ingress)) }) return ret, err } // Ingresses returns an object that can list and get Ingresses. func (s *ingressLister) Ingresses(namespace string) IngressNamespaceLister { return ingressNamespaceLister{indexer: s.indexer, namespace: namespace} } // IngressNamespaceLister helps list and get Ingresses. type IngressNamespaceLister interface { // List lists all Ingresses in the indexer for a given namespace. List(selector labels.Selector) (ret []*v1beta1.Ingress, err error) // Get retrieves the Ingress from the indexer for a given namespace and name. Get(name string) (*v1beta1.Ingress, error) IngressNamespaceListerExpansion } // ingressNamespaceLister implements the IngressNamespaceLister // interface. type ingressNamespaceLister struct { indexer cache.Indexer namespace string } // List lists all Ingresses in the indexer for a given namespace. func (s ingressNamespaceLister) List(selector labels.Selector) (ret []*v1beta1.Ingress, err error) { err = cache.ListAllByNamespace(s.indexer, s.namespace, selector, func(m interface{}) { ret = append(ret, m.(*v1beta1.Ingress)) }) return ret, err } // Get retrieves the Ingress from the indexer for a given namespace and name. func (s ingressNamespaceLister) Get(name string) (*v1beta1.Ingress, error) { obj, exists, err := s.indexer.GetByKey(s.namespace + "/" + name) if err != nil { return nil, err } if !exists { return nil, errors.NewNotFound(v1beta1.Resource("ingress"), name) } return obj.(*v1beta1.Ingress), nil }<|fim▁end|>
// This file was automatically generated by lister-gen
<|file_name|>instr_vdivpd.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; use ::test::run_test; #[test] fn vdivpd_1() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM7)), operand2: Some(Direct(XMM3)), operand3: Some(Direct(XMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 225, 94, 252], OperandSize::Dword) } #[test] fn vdivpd_2() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM6)), operand3: Some(IndirectScaledIndexed(ECX, EAX, Eight, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 201, 94, 36, 193], OperandSize::Dword) } #[test] fn vdivpd_3() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM3)), operand2: Some(Direct(XMM0)), operand3: Some(Direct(XMM2)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 249, 94, 218], OperandSize::Qword)<|fim▁hole|>} #[test] fn vdivpd_4() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM2)), operand3: Some(IndirectScaledIndexedDisplaced(RSI, RAX, Two, 1467073025, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 233, 94, 140, 70, 1, 194, 113, 87], OperandSize::Qword) } #[test] fn vdivpd_5() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM7)), operand3: Some(Direct(YMM0)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 197, 94, 240], OperandSize::Dword) } #[test] fn vdivpd_6() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM4)), operand3: Some(IndirectScaledIndexedDisplaced(EDI, ECX, Four, 527411030, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 221, 94, 164, 143, 86, 167, 111, 31], OperandSize::Dword) } #[test] fn vdivpd_7() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM6)), operand2: Some(Direct(YMM7)), operand3: Some(Direct(YMM6)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 197, 94, 246], OperandSize::Qword) } #[test] fn vdivpd_8() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM7)), operand2: Some(Direct(YMM3)), operand3: Some(IndirectScaledIndexed(RDX, RAX, Eight, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[197, 229, 94, 60, 194], OperandSize::Qword) } #[test] fn vdivpd_9() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM1)), operand2: Some(Direct(XMM5)), operand3: Some(Direct(XMM1)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 213, 142, 94, 201], OperandSize::Dword) } #[test] fn vdivpd_10() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM7)), operand3: Some(IndirectScaledIndexed(ECX, EAX, Four, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 197, 143, 94, 36, 129], OperandSize::Dword) } #[test] fn vdivpd_11() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM5)), operand3: Some(Indirect(ECX, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: Some(BroadcastMode::Broadcast1To2) }, &[98, 241, 213, 156, 94, 33], OperandSize::Dword) } #[test] fn vdivpd_12() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM14)), operand2: Some(Direct(XMM20)), operand3: Some(Direct(XMM20)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 49, 221, 135, 94, 244], OperandSize::Qword) } #[test] fn vdivpd_13() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM21)), operand2: Some(Direct(XMM25)), operand3: Some(IndirectScaledDisplaced(RDX, Four, 1432505095, Some(OperandSize::Xmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 225, 181, 135, 94, 44, 149, 7, 75, 98, 85], OperandSize::Qword) } #[test] fn vdivpd_14() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(XMM3)), operand2: Some(Direct(XMM2)), operand3: Some(IndirectScaledIndexed(RBX, RCX, Two, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: Some(BroadcastMode::Broadcast1To2) }, &[98, 241, 237, 155, 94, 28, 75], OperandSize::Qword) } #[test] fn vdivpd_15() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM5)), operand2: Some(Direct(YMM2)), operand3: Some(Direct(YMM4)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K7), broadcast: None }, &[98, 241, 237, 175, 94, 236], OperandSize::Dword) } #[test] fn vdivpd_16() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM1)), operand3: Some(IndirectScaledDisplaced(ECX, Four, 10847994, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 241, 245, 174, 94, 36, 141, 250, 134, 165, 0], OperandSize::Dword) } #[test] fn vdivpd_17() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM2)), operand2: Some(Direct(YMM6)), operand3: Some(IndirectScaledIndexed(EBX, EDI, Eight, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: Some(BroadcastMode::Broadcast1To4) }, &[98, 241, 205, 186, 94, 20, 251], OperandSize::Dword) } #[test] fn vdivpd_18() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM29)), operand2: Some(Direct(YMM27)), operand3: Some(Direct(YMM26)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 1, 165, 161, 94, 234], OperandSize::Qword) } #[test] fn vdivpd_19() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM8)), operand2: Some(Direct(YMM10)), operand3: Some(IndirectScaledIndexedDisplaced(RAX, RDX, Four, 993070894, Some(OperandSize::Ymmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K6), broadcast: None }, &[98, 113, 173, 174, 94, 132, 144, 46, 15, 49, 59], OperandSize::Qword) } #[test] fn vdivpd_20() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(YMM10)), operand2: Some(Direct(YMM6)), operand3: Some(IndirectDisplaced(RDX, 966615856, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: Some(BroadcastMode::Broadcast1To4) }, &[98, 113, 205, 186, 94, 146, 48, 99, 157, 57], OperandSize::Qword) } #[test] fn vdivpd_21() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(ZMM3)), operand2: Some(Direct(ZMM5)), operand3: Some(Direct(ZMM0)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Zero), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K4), broadcast: None }, &[98, 241, 213, 252, 94, 216], OperandSize::Dword) } #[test] fn vdivpd_22() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(ZMM7)), operand2: Some(Direct(ZMM4)), operand3: Some(IndirectDisplaced(ECX, 82570418, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K3), broadcast: None }, &[98, 241, 221, 203, 94, 185, 178, 236, 235, 4], OperandSize::Dword) } #[test] fn vdivpd_23() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(ZMM6)), operand2: Some(Direct(ZMM4)), operand3: Some(IndirectScaledDisplaced(EBX, Eight, 421201916, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K2), broadcast: Some(BroadcastMode::Broadcast1To8) }, &[98, 241, 221, 218, 94, 52, 221, 252, 7, 27, 25], OperandSize::Dword) } #[test] fn vdivpd_24() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(ZMM18)), operand2: Some(Direct(ZMM21)), operand3: Some(Direct(ZMM12)), operand4: None, lock: false, rounding_mode: Some(RoundingMode::Up), merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: None }, &[98, 193, 213, 209, 94, 212], OperandSize::Qword) } #[test] fn vdivpd_25() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(ZMM27)), operand2: Some(Direct(ZMM14)), operand3: Some(Indirect(RSI, Some(OperandSize::Zmmword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K5), broadcast: None }, &[98, 97, 141, 205, 94, 30], OperandSize::Qword) } #[test] fn vdivpd_26() { run_test(&Instruction { mnemonic: Mnemonic::VDIVPD, operand1: Some(Direct(ZMM19)), operand2: Some(Direct(ZMM19)), operand3: Some(Indirect(RAX, Some(OperandSize::Qword), None)), operand4: None, lock: false, rounding_mode: None, merge_mode: Some(MergeMode::Zero), sae: false, mask: Some(MaskReg::K1), broadcast: Some(BroadcastMode::Broadcast1To8) }, &[98, 225, 229, 209, 94, 24], OperandSize::Qword) }<|fim▁end|>
<|file_name|>ml_tests.py<|end_file_name|><|fim▁begin|>from sklearn import preprocessing import numpy as np X = np.array([[ 1., -1., 2.], [ 2., 0., 0.], [ 2., 0., 0.],<|fim▁hole|> X_scaled = preprocessing.scale(X) print X_scaled<|fim▁end|>
[ 0., 1., -1.]]) print X
<|file_name|>S11.13.2_A7.6_T2.js<|end_file_name|><|fim▁begin|>// Copyright (C) 2015 André Bargull. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- info: Compound Assignment Operator evaluates its operands from left to right. description: > The left-hand side expression is evaluated before the right-hand side. Left-hand side expression is MemberExpression: base[prop]. base is the undefined value. Check operator is "x <<= y". ---*/ function DummyError() { } assert.throws(DummyError, function() { var base = undefined; var prop = function() { throw new DummyError(); }; var expr = function() { $ERROR("right-hand side expression evaluated"); }; base[prop()] <<= expr(); });<|fim▁hole|> assert.throws(TypeError, function() { var base = undefined; var prop = { toString: function() { $ERROR("property key evaluated"); } }; var expr = function() { $ERROR("right-hand side expression evaluated"); }; base[prop] <<= expr(); });<|fim▁end|>
<|file_name|>EditDinnerPresenter.java<|end_file_name|><|fim▁begin|>package biz.golek.whattodofordinner.business.contract.presenters; import biz.golek.whattodofordinner.business.contract.entities.Dinner; /** * Created by Bartosz Gołek on 2016-02-10.<|fim▁hole|>public interface EditDinnerPresenter { void Show(Dinner dinner); }<|fim▁end|>
*/
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 ~ 2018, Alex Stocks. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package server /* // consumer: rpc client -> rpc stream -> rpc codec -> transport + codec // provider: rpc server -> rpc stream -> rpc codec -> transport + codec func (s *server) handlePkg(servo interface{}, sock transport.Socket) { sock.Recv(&pkg) // pkg = transport.Package // func (r *rpcStream) Recv(pkg interface{}) error { // r.codec.ReadRequestHeader(&req, false) // // func (c *rpcCodec) ReadRequestHeader(r *request, first bool) error // // c.socket.Recv(&tm) // tm(transport.Package) // // // func (h *httpTransportSocket) Recv(m *Message) error { // 读取全部reqeust,并赋值给m(transport.Package) // // // http.ReadRequest(h.buff) // // // ioutil.ReadAll(r.Body) // // // m.Target = m.Header["Path"] // // // } // // // // err := c.codec.ReadHeader(&m, codec.Request) // // // func (j *jsonCodec) ReadHeader(m *codec.Message, mt codec.MessageType) // // // case codec.Request: // // // return j.s.ReadHeader(m) // // // // func (c *serverCodec) ReadHeader(m *codec.Message) error { // serverCodec, github.com/AlexStocks/dubbogo/codec // // // // c.dec.Decode(&raw) // // // // json.Unmarshal(raw, &c.req) // 注意此处,c.req存储了请求的body // // // // m.Id = c.seq // // // // m.Method = c.req.Method // // // // m.Target = m.Header["Path"] // // // // } // // // } // // r.Service = m.Target // // r.Method = m.Method // // r.Seq = m.Id // // return err // // } // (c *rpcCodec) ReadRequestHeader // r.codec.ReadRequestBody(pkg) // } codecFunc, err = s.newCodec(contentType) // dubbogo.codec codec = newRPCCodec(&pkg, sock, codecFunc) rpc.serveRequest(ctx, codec, contentType) // func (server *server) serveRequest(ctx context.Context, codec serverCodec, ct string) error { // server.readRequest(codec) // // func (server *server) readRequest(codec serverCodec) { // // server.readRequestHeader(codec) // // // func (server *server) readRequestHeader(codec serverCodec) // // // err = codec.ReadRequestHeader(req, true) // 注意此时first为false,避免进行网络收发,只读取相关分析结果 // // // // func (c *rpcCodec) ReadRequestHeader(r *request, first bool) error { // // // // m := codec.Message{Header: c.req.Header} // // // // err := c.codec.ReadHeader(&m, codec.Request) // // // // r.Service = m.Target // // // // r.Method = m.Method // // // // r.Seq = m.Id // // // // return err // // // // } // // // service = server.serviceMap[req.Service] // 根据Service // // // mtype = service.method[req.Method] // 获取method, 供下面的call调用 // // // }<|fim▁hole|> // // // // json.Unmarshal(*c.req.Params, x) // decode request body, c.req value line 19 // // // // } // // // } // // } // service.call() // } } */<|fim▁end|>
// // codec.ReadRequestBody(argv.Interface()) // rpcCodec.ReadRequestBody // // // func (c *rpcCodec) ReadRequestBody(b interface{}) error { // // // return c.codec.ReadBody(b) // // // // func (c *serverCodec) ReadBody(x interface{}) error {
<|file_name|>instr_stos.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; use ::RegType::*; use ::instruction_def::*; use ::Operand::*; use ::Reg::*; use ::RegScale::*; use ::test::run_test; #[test] fn stos_1() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(IndirectScaledIndexedDisplaced(BX, DI, One, 203, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[170], OperandSize::Word) }<|fim▁hole|>fn stos_2() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(IndirectScaledDisplaced(EBX, Eight, 1152119332, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[170], OperandSize::Dword) } #[test] fn stos_3() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(IndirectScaledDisplaced(RDX, Eight, 358669770, Some(OperandSize::Byte), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[170], OperandSize::Qword) } #[test] fn stos_4() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(IndirectScaledIndexed(BX, DI, One, Some(OperandSize::Word), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[171], OperandSize::Word) } #[test] fn stos_5() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(IndirectScaledIndexedDisplaced(EDI, ECX, Eight, 2045177442, Some(OperandSize::Word), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 171], OperandSize::Dword) } #[test] fn stos_6() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(IndirectDisplaced(RBX, 664731441, Some(OperandSize::Word), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 171], OperandSize::Qword) } #[test] fn stos_7() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(IndirectDisplaced(DI, 18058, Some(OperandSize::Dword), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[102, 171], OperandSize::Word) } #[test] fn stos_8() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(IndirectDisplaced(ECX, 128031086, Some(OperandSize::Dword), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[171], OperandSize::Dword) } #[test] fn stos_9() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(IndirectScaledIndexed(RDI, RDX, Two, Some(OperandSize::Dword), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[171], OperandSize::Qword) } #[test] fn stos_10() { run_test(&Instruction { mnemonic: Mnemonic::STOS, operand1: Some(Indirect(RDI, Some(OperandSize::Qword), None)), operand2: None, operand3: None, operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[72, 171], OperandSize::Qword) }<|fim▁end|>
#[test]
<|file_name|>config.js<|end_file_name|><|fim▁begin|>'use strict'; module.exports = {<|fim▁hole|>};<|fim▁end|>
env: 'test'
<|file_name|>test_cli.py<|end_file_name|><|fim▁begin|>from pathlib import PosixPath import pytest from rpmlint.cli import process_lint_args from rpmlint.config import Config from rpmlint.lint import Lint @pytest.mark.parametrize('test_arguments', [['-c', 'rpmlint/configs/thisdoesntexist.toml']]) def test_parsing_non_existing_config_file(test_arguments): with pytest.raises(SystemExit) as exc: process_lint_args(test_arguments) assert exc.value.code == 2 @pytest.mark.parametrize('test_arguments', [['-c', 'rpmlint/configdefaults.toml']]) def test_parsing_config_file(test_arguments): parsed = process_lint_args(test_arguments) assert len(parsed['config']) == 1 assert parsed['config'][0] == PosixPath('rpmlint/configdefaults.toml') @pytest.mark.parametrize('test_arguments', [['-c', 'configs/openSUSE']]) def test_parsing_opensuse_conf(test_arguments): parsed = process_lint_args(test_arguments) assert len(parsed['config']) == 7 assert PosixPath('configs/openSUSE/opensuse.toml') in parsed['config'] assert PosixPath('configs/openSUSE/licenses.toml') in parsed['config'] assert PosixPath('configs/openSUSE/pie-executables.toml') in parsed['config'] defaultcfg = Config() lint = Lint(parsed) default_checks = defaultcfg.configuration['Checks'] checks = lint.config.configuration['Checks'] # Verify that all original Checks are enabled and some new are added for check in default_checks: assert check in checks assert len(checks) > len(default_checks) <|fim▁hole|> checks = set(lint.output.error_details.keys()) checks |= set(defaultcfg.configuration['Descriptions'].keys()) score_keys = lint.config.configuration['Scoring'].keys() for score_key in score_keys: if score_key.startswith('percent-in-'): continue assert score_key in checks<|fim▁end|>
# Verify that all scoring keys are a known checks
<|file_name|>main_test.go<|end_file_name|><|fim▁begin|>package main import "testing" func Test_isSameStack(t *testing.T) { type args struct { archiveStackID string currentStackID string } tests := []struct { name string args args want bool }{ { "Going from empty to iOS", args{archiveStackID: "", currentStackID: "osx-xcode-12.3.x"}, false, }, { "Going from iOS to empty", args{archiveStackID: "osx-xcode-12.3.x", currentStackID: ""}, false, }, { "Going from Gen2 to Gen1", args{archiveStackID: "osx-xcode-12.3.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001", currentStackID: "osx-xcode-12.3.x"}, true, }, { "Going from Gen2 to Gen2 same machine", args{archiveStackID: "osx-xcode-12.3.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001", currentStackID: "osx-xcode-12.3.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001"}, true, }, { "Going from Gen2 to Gen2 different stack", args{archiveStackID: "osx-xcode-12.3.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001", currentStackID: "osx-xcode-12.4.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001"}, false, }, { "Going from Gen2 to Gen2 different machine", args{archiveStackID: "osx-xcode-12.3.x-gen2-mmg4-4c-20gb-300gb-atl01-ded001", currentStackID: "osx-xcode-12.3.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001"}, true, }, { "Going from Gen1 to Gen2", args{archiveStackID: "osx-xcode-12.3.x", currentStackID: "osx-xcode-12.3.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001"}, true, }, { "Going from Gen1 to Gen2 different stack", args{archiveStackID: "osx-xcode-12.4.x", currentStackID: "osx-xcode-12.3.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001"}, false, }, { "Going from Gen2 to Gen1 different stack", args{archiveStackID: "osx-xcode-12.4.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001", currentStackID: "osx-xcode-12.3.x"}, false, }, { "Going from Ubuntu to iOS", args{archiveStackID: "linux-docker-android", currentStackID: "osx-xcode-12.3.x"}, false, }, { "Going from Ubuntu to Ubuntu", args{archiveStackID: "linux-docker-android", currentStackID: "linux-docker-android"}, true, }, { "Going from iOS to Ubuntu", args{archiveStackID: "osx-xcode-12.3.x", currentStackID: "linux-docker-android"}, false, }, { "Going from iOS to iOS same stack", args{archiveStackID: "osx-xcode-12.3.x", currentStackID: "osx-xcode-12.3.x"}, true,<|fim▁hole|> }, { "Going from iOS to iOS different stack", args{archiveStackID: "osx-xcode-12.3.x", currentStackID: "osx-xcode-12.4.x"}, false, }, { "Going from Ubuntu to Ubuntu LTS", args{archiveStackID: "linux-docker-android", currentStackID: "linux-docker-android-lts"}, false, }, { "Going from Ubuntu LTS to Ubuntu", args{archiveStackID: "linux-docker-android-lts", currentStackID: "linux-docker-android"}, false, }, { "Going from Ubuntu to Gen2 iOS", args{archiveStackID: "linux-docker-android", currentStackID: "osx-xcode-12.3.x-gen2-mmg4-12c-60gb-300gb-atl01-ded001"}, false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := isSameStack(tt.args.archiveStackID, tt.args.currentStackID); got != tt.want { t.Errorf("isSameStack() = %v, want %v", got, tt.want) } }) } }<|fim▁end|>
<|file_name|>wavpack.py<|end_file_name|><|fim▁begin|># A WavPack reader/tagger # # Copyright 2006 Joe Wreschnig <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # # $Id: wavpack.py 4153 2007-08-05 07:07:49Z piman $ """WavPack reading and writing. WavPack is a lossless format that uses APEv2 tags. Read http://www.wavpack.com/ for more information. """ __all__ = ["WavPack", "Open", "delete"] from mutagen.apev2 import APEv2File, error, delete from mutagen._util import cdata<|fim▁hole|> class WavPackHeaderError(error): pass RATES = [6000, 8000, 9600, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000, 64000, 88200, 96000, 192000] class WavPackInfo(object): """WavPack stream information. Attributes: channels - number of audio channels (1 or 2) length - file length in seconds, as a float sample_rate - audio sampling rate in Hz version - WavPack stream version """ def __init__(self, fileobj): header = fileobj.read(28) if len(header) != 28 or not header.startswith("wvpk"): raise WavPackHeaderError("not a WavPack file") samples = cdata.uint_le(header[12:16]) flags = cdata.uint_le(header[24:28]) self.version = cdata.short_le(header[8:10]) self.channels = bool(flags & 4) or 2 self.sample_rate = RATES[(flags >> 23) & 0xF] self.length = float(samples) / self.sample_rate def pprint(self): return "WavPack, %.2f seconds, %d Hz" % (self.length, self.sample_rate) class WavPack(APEv2File): _Info = WavPackInfo _mimes = ["audio/x-wavpack"] def score(filename, fileobj, header): return header.startswith("wvpk") * 2 score = staticmethod(score)<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import os import inspect import vcr def build_path(function):<|fim▁hole|> function.__name__ + '.yml') vcr = vcr.config.VCR( func_path_generator=build_path, cassette_library_dir='tests/cassettes', match_on=['uri', 'method'], decode_compressed_response=True, record_mode='once' )<|fim▁end|>
return os.path.join(os.path.dirname(inspect.getfile(function)), 'cassettes', function.__module__.split('.')[1],
<|file_name|>0030_auto_20170707_1727.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2017-07-07 17:27 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('climate_data', '0029_auto_20170628_1527'), ] operations = [ migrations.AlterField( model_name='datatype', name='short_name', field=models.CharField(db_index=True, max_length=20, unique=True),<|fim▁hole|><|fim▁end|>
), ]
<|file_name|>drivertest_celery.py<|end_file_name|><|fim▁begin|>resource_id = "celery-1" _install_script = """ [ { "id": "celery-1", "key": {"name": "Celery", "version": "2.3"}, "config_port": { "password": "engage_129", "username": "engage_celery", "vhost": "engage_celery_vhost" }, "input_ports": { "broker": { "BROKER_HOST": "${hostname}", "BROKER_PORT": "5672", "broker": "rabbitmqctl" }, "host": { "cpu_arch": "x86_64", "genforma_home": "${deployment_home}", "hostname": "${hostname}", "log_directory": "${deployment_home}/log", "os_type": "mac-osx", "os_user_name": "${username}", "private_ip": null, "sudo_password": "GenForma/${username}/sudo_password" },<|fim▁hole|> "pip": { "pipbin": "${deployment_home}/python/bin/pip" }, "python": { "PYTHONPATH": "${deployment_home}/python/lib/python2.7/site-packages/", "home": "${deployment_home}/python/bin/python", "python_bin_dir": "${deployment_home}/python/bin", "type": "python", "version": "2.7" }, "setuptools": { "easy_install": "${deployment_home}/python/bin/easy_install" } }, "output_ports": { "celery": { "broker": "rabbitmqctl", "password": "engage_129", "username": "engage_celery", "vhost": "engage_celery_vhost" } }, "inside": { "id": "${hostname}", "key": {"name": "mac-osx", "version": "10.6"}, "port_mapping": { "host": "host" } }, "environment": [ { "id": "rabbitmq-1", "key": {"name": "rabbitmq", "version": "2.4"}, "port_mapping": { "broker": "broker" } }, { "id": "python-1", "key": {"name": "python", "version": "2.7"}, "port_mapping": { "python": "python" } }, { "id": "__GF_inst_2", "key": {"name": "pip", "version": "any"}, "port_mapping": { "pip": "pip" } }, { "id": "setuptools-1", "key": {"name": "setuptools", "version": "0.6"}, "port_mapping": { "setuptools": "setuptools" } } ] } ] """ def get_install_script(): return _install_script def get_password_data(): return {}<|fim▁end|>
<|file_name|>preview.rs<|end_file_name|><|fim▁begin|>// Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html use crate::{ card::CardQueue, config::SchedulerVersion, prelude::*, scheduler::states::{CardState, IntervalKind, PreviewState}, }; use super::{CardStateUpdater, RevlogEntryPartial}; impl CardStateUpdater { // fixme: check learning card moved into preview // restores correctly in both learn and day-learn case pub(super) fn apply_preview_state( &mut self, current: CardState, next: PreviewState, ) -> Result<Option<RevlogEntryPartial>> { if next.finished { self.card .remove_from_filtered_deck_restoring_queue(SchedulerVersion::V2); return Ok(None); } self.card.queue = CardQueue::PreviewRepeat; let interval = next.interval_kind(); match interval { IntervalKind::InSecs(secs) => { self.card.due = self.now.0 as i32 + secs as i32; } IntervalKind::InDays(_days) => { // unsupported } } Ok(RevlogEntryPartial::maybe_new( current, next.into(), 0.0, self.secs_until_rollover(), )) } } #[cfg(test)] mod test { use crate::collection::open_test_collection; use super::*; use crate::{ card::CardType, scheduler::{ answering::{CardAnswer, Rating}, states::{CardState, FilteredState}, }, timestamp::TimestampMillis, }; #[test] fn preview() -> Result<()> { let mut col = open_test_collection(); let mut c = Card { deck_id: DeckID(1), ctype: CardType::Learn, queue: CardQueue::DayLearn, remaining_steps: 2, due: 123, ..Default::default() }; col.add_card(&mut c)?; // pull the card into a preview deck let mut filtered_deck = Deck::new_filtered(); filtered_deck.filtered_mut()?.reschedule = false; col.add_or_update_deck(&mut filtered_deck)?; assert_eq!(col.rebuild_filtered_deck(filtered_deck.id)?, 1); let next = col.get_next_card_states(c.id)?; assert!(matches!( next.current, CardState::Filtered(FilteredState::Preview(_)) ));<|fim▁hole|> assert!(matches!( next.easy, CardState::Filtered(FilteredState::Preview(PreviewState { scheduled_secs: 0, finished: true })) )); // use Again on the preview col.answer_card(&CardAnswer { card_id: c.id, current_state: next.current, new_state: next.again, rating: Rating::Again, answered_at: TimestampMillis::now(), milliseconds_taken: 0, })?; c = col.storage.get_card(c.id)?.unwrap(); assert_eq!(c.queue, CardQueue::PreviewRepeat); // hard let next = col.get_next_card_states(c.id)?; col.answer_card(&CardAnswer { card_id: c.id, current_state: next.current, new_state: next.hard, rating: Rating::Hard, answered_at: TimestampMillis::now(), milliseconds_taken: 0, })?; c = col.storage.get_card(c.id)?.unwrap(); assert_eq!(c.queue, CardQueue::PreviewRepeat); // good let next = col.get_next_card_states(c.id)?; col.answer_card(&CardAnswer { card_id: c.id, current_state: next.current, new_state: next.good, rating: Rating::Good, answered_at: TimestampMillis::now(), milliseconds_taken: 0, })?; c = col.storage.get_card(c.id)?.unwrap(); assert_eq!(c.queue, CardQueue::PreviewRepeat); // and then it should return to its old state once easy selected let next = col.get_next_card_states(c.id)?; col.answer_card(&CardAnswer { card_id: c.id, current_state: next.current, new_state: next.easy, rating: Rating::Easy, answered_at: TimestampMillis::now(), milliseconds_taken: 0, })?; c = col.storage.get_card(c.id)?.unwrap(); assert_eq!(c.queue, CardQueue::DayLearn); assert_eq!(c.due, 123); Ok(()) } }<|fim▁end|>
// the exit state should have a 0 second interval, which will show up as (end)
<|file_name|>install.rs<|end_file_name|><|fim▁begin|>use command_prelude::*;<|fim▁hole|>use cargo::util::ToUrl; pub fn cli() -> App { subcommand("install") .about("Install a Rust binary") .arg(Arg::with_name("crate").empty_values(false).multiple(true)) .arg( opt("version", "Specify a version to install from crates.io") .alias("vers") .value_name("VERSION"), ) .arg(opt("git", "Git URL to install the specified crate from").value_name("URL")) .arg(opt("branch", "Branch to use when installing from git").value_name("BRANCH")) .arg(opt("tag", "Tag to use when installing from git").value_name("TAG")) .arg(opt("rev", "Specific commit to use when installing from git").value_name("SHA")) .arg(opt("path", "Filesystem path to local crate to install").value_name("PATH")) .arg(opt( "list", "list all installed packages and their versions", )) .arg_jobs() .arg(opt("force", "Force overwriting existing crates or binaries").short("f")) .arg_features() .arg(opt("debug", "Build in debug mode instead of release mode")) .arg_targets_bins_examples( "Install only the specified binary", "Install all binaries", "Install only the specified example", "Install all examples", ) .arg(opt("root", "Directory to install packages into").value_name("DIR")) .after_help( "\ This command manages Cargo's local set of installed binary crates. Only packages which have [[bin]] targets can be installed, and all binaries are installed into the installation root's `bin` folder. The installation root is determined, in order of precedence, by `--root`, `$CARGO_INSTALL_ROOT`, the `install.root` configuration key, and finally the home directory (which is either `$CARGO_HOME` if set or `$HOME/.cargo` by default). There are multiple sources from which a crate can be installed. The default location is crates.io but the `--git` and `--path` flags can change this source. If the source contains more than one package (such as crates.io or a git repository with multiple crates) the `<crate>` argument is required to indicate which crate should be installed. Crates from crates.io can optionally specify the version they wish to install via the `--vers` flags, and similarly packages from git repositories can optionally specify the branch, tag, or revision that should be installed. If a crate has multiple binaries, the `--bin` argument can selectively install only one of them, and if you'd rather install examples the `--example` argument can be used as well. By default cargo will refuse to overwrite existing binaries. The `--force` flag enables overwriting existing binaries. Thus you can reinstall a crate with `cargo install --force <crate>`. As a special convenience, omitting the <crate> specification entirely will install the crate in the current directory. That is, `install` is equivalent to the more explicit `install --path .`. If the source is crates.io or `--git` then by default the crate will be built in a temporary target directory. To avoid this, the target directory can be specified by setting the `CARGO_TARGET_DIR` environment variable to a relative path. In particular, this can be useful for caching build artifacts on continuous integration systems.", ) } pub fn exec(config: &mut Config, args: &ArgMatches) -> CliResult { let mut compile_opts = args.compile_options(config, CompileMode::Build)?; compile_opts.build_config.release = !args.is_present("debug"); let krates = args.values_of("crate") .unwrap_or_default() .collect::<Vec<_>>(); let mut from_cwd = false; let source = if let Some(url) = args.value_of("git") { let url = url.to_url()?; let gitref = if let Some(branch) = args.value_of("branch") { GitReference::Branch(branch.to_string()) } else if let Some(tag) = args.value_of("tag") { GitReference::Tag(tag.to_string()) } else if let Some(rev) = args.value_of("rev") { GitReference::Rev(rev.to_string()) } else { GitReference::Branch("master".to_string()) }; SourceId::for_git(&url, gitref)? } else if let Some(path) = args.value_of_path("path", config) { SourceId::for_path(&path)? } else if krates.is_empty() { from_cwd = true; SourceId::for_path(config.cwd())? } else { SourceId::crates_io(config)? }; let version = args.value_of("version"); let root = args.value_of("root"); if args.is_present("list") { ops::install_list(root, config)?; } else { ops::install( root, krates, &source, from_cwd, version, &compile_opts, args.is_present("force"), )?; } Ok(()) }<|fim▁end|>
use cargo::core::{GitReference, SourceId}; use cargo::ops;
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export * from './types' export * from './takeWhile'
<|file_name|>toctree.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ sphinx.environment.managers.toctree ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Toctree manager for sphinx.environment. :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS. :license: BSD, see LICENSE for details. """ from six import iteritems from docutils import nodes from sphinx import addnodes from sphinx.util import url_re from sphinx.util.nodes import clean_astext, process_only_nodes from sphinx.transforms import SphinxContentsFilter from sphinx.environment.managers import EnvironmentManager class Toctree(EnvironmentManager): name = 'toctree' def __init__(self, env): super(Toctree, self).__init__(env) self.tocs = env.tocs self.toc_num_entries = env.toc_num_entries self.toc_secnumbers = env.toc_secnumbers self.toc_fignumbers = env.toc_fignumbers self.toctree_includes = env.toctree_includes self.files_to_rebuild = env.files_to_rebuild self.glob_toctrees = env.glob_toctrees self.numbered_toctrees = env.numbered_toctrees def clear_doc(self, docname): self.tocs.pop(docname, None) self.toc_secnumbers.pop(docname, None) self.toc_fignumbers.pop(docname, None) self.toc_num_entries.pop(docname, None) self.toctree_includes.pop(docname, None) self.glob_toctrees.discard(docname) self.numbered_toctrees.discard(docname) for subfn, fnset in list(self.files_to_rebuild.items()): fnset.discard(docname) if not fnset: del self.files_to_rebuild[subfn] def merge_other(self, docnames, other): for docname in docnames: self.tocs[docname] = other.tocs[docname] self.toc_num_entries[docname] = other.toc_num_entries[docname] if docname in other.toctree_includes: self.toctree_includes[docname] = other.toctree_includes[docname] if docname in other.glob_toctrees: self.glob_toctrees.add(docname) if docname in other.numbered_toctrees: self.numbered_toctrees.add(docname) for subfn, fnset in other.files_to_rebuild.items(): self.files_to_rebuild.setdefault(subfn, set()).update(fnset & docnames) def process_doc(self, docname, doctree): """Build a TOC from the doctree and store it in the inventory.""" numentries = [0] # nonlocal again... def traverse_in_section(node, cls): """Like traverse(), but stay within the same section.""" result = [] if isinstance(node, cls): result.append(node) for child in node.children: if isinstance(child, nodes.section): continue result.extend(traverse_in_section(child, cls)) return result def build_toc(node, depth=1): entries = [] for sectionnode in node: # find all toctree nodes in this section and add them # to the toc (just copying the toctree node which is then # resolved in self.get_and_resolve_doctree) if isinstance(sectionnode, addnodes.only): onlynode = addnodes.only(expr=sectionnode['expr']) blist = build_toc(sectionnode, depth) if blist: onlynode += blist.children entries.append(onlynode) continue if not isinstance(sectionnode, nodes.section): for toctreenode in traverse_in_section(sectionnode, addnodes.toctree): item = toctreenode.copy() entries.append(item) # important: do the inventory stuff self.note_toctree(docname, toctreenode) continue title = sectionnode[0] # copy the contents of the section title, but without references # and unnecessary stuff visitor = SphinxContentsFilter(doctree) title.walkabout(visitor) nodetext = visitor.get_entry_text() if not numentries[0]: # for the very first toc entry, don't add an anchor # as it is the file's title anyway anchorname = '' else: anchorname = '#' + sectionnode['ids'][0] numentries[0] += 1 # make these nodes: # list_item -> compact_paragraph -> reference reference = nodes.reference( '', '', internal=True, refuri=docname, anchorname=anchorname, *nodetext) para = addnodes.compact_paragraph('', '', reference) item = nodes.list_item('', para) sub_item = build_toc(sectionnode, depth + 1) item += sub_item entries.append(item) if entries: return nodes.bullet_list('', *entries) return [] toc = build_toc(doctree) if toc: self.tocs[docname] = toc else: self.tocs[docname] = nodes.bullet_list('') self.toc_num_entries[docname] = numentries[0] def note_toctree(self, docname, toctreenode): """Note a TOC tree directive in a document and gather information about file relations from it. """ if toctreenode['glob']: self.glob_toctrees.add(docname) if toctreenode.get('numbered'): self.numbered_toctrees.add(docname) includefiles = toctreenode['includefiles'] for includefile in includefiles: # note that if the included file is rebuilt, this one must be # too (since the TOC of the included file could have changed) self.files_to_rebuild.setdefault(includefile, set()).add(docname) self.toctree_includes.setdefault(docname, []).extend(includefiles) def get_toc_for(self, docname, builder): """Return a TOC nodetree -- for use on the same page only!""" tocdepth = self.env.metadata[docname].get('tocdepth', 0) try: toc = self.tocs[docname].deepcopy() self._toctree_prune(toc, 2, tocdepth) except KeyError: # the document does not exist anymore: return a dummy node that # renders to nothing return nodes.paragraph() process_only_nodes(toc, builder.tags, warn_node=self.env.warn_node) for node in toc.traverse(nodes.reference): node['refuri'] = node['anchorname'] or '#' return toc def get_toctree_for(self, docname, builder, collapse, **kwds): """Return the global TOC nodetree.""" doctree = self.env.get_doctree(self.env.config.master_doc) toctrees = [] if 'includehidden' not in kwds: kwds['includehidden'] = True if 'maxdepth' not in kwds: kwds['maxdepth'] = 0 kwds['collapse'] = collapse for toctreenode in doctree.traverse(addnodes.toctree): toctree = self.env.resolve_toctree(docname, builder, toctreenode, prune=True, **kwds) if toctree: toctrees.append(toctree) if not toctrees: return None result = toctrees[0] for toctree in toctrees[1:]: result.extend(toctree.children) return result def resolve_toctree(self, docname, builder, toctree, prune=True, maxdepth=0, titles_only=False, collapse=False, includehidden=False): """Resolve a *toctree* node into individual bullet lists with titles as items, returning None (if no containing titles are found) or a new node. If *prune* is True, the tree is pruned to *maxdepth*, or if that is 0, to the value of the *maxdepth* option on the *toctree* node. If *titles_only* is True, only toplevel document titles will be in the resulting tree. If *collapse* is True, all branches not containing docname will be collapsed. """ if toctree.get('hidden', False) and not includehidden: return None # For reading the following two helper function, it is useful to keep # in mind the node structure of a toctree (using HTML-like node names # for brevity): # # <ul> # <li> # <p><a></p> # <p><a></p> # ... # <ul> # ... # </ul> # </li> # </ul> # # The transformation is made in two passes in order to avoid # interactions between marking and pruning the tree (see bug #1046). toctree_ancestors = self.get_toctree_ancestors(docname) def _toctree_add_classes(node, depth): """Add 'toctree-l%d' and 'current' classes to the toctree.""" for subnode in node.children: if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)): # for <p> and <li>, indicate the depth level and recurse subnode['classes'].append('toctree-l%d' % (depth-1)) _toctree_add_classes(subnode, depth) elif isinstance(subnode, nodes.bullet_list): # for <ul>, just recurse _toctree_add_classes(subnode, depth+1) elif isinstance(subnode, nodes.reference): # for <a>, identify which entries point to the current # document and therefore may not be collapsed if subnode['refuri'] == docname: if not subnode['anchorname']: # give the whole branch a 'current' class # (useful for styling it differently) branchnode = subnode while branchnode: branchnode['classes'].append('current') branchnode = branchnode.parent # mark the list_item as "on current page" if subnode.parent.parent.get('iscurrent'): # but only if it's not already done return while subnode: subnode['iscurrent'] = True subnode = subnode.parent def _entries_from_toctree(toctreenode, parents, separate=False, subtree=False): """Return TOC entries for a toctree node.""" refs = [(e[0], e[1]) for e in toctreenode['entries']] entries = [] for (title, ref) in refs: try: refdoc = None if url_re.match(ref): if title is None: title = ref reference = nodes.reference('', '', internal=False, refuri=ref, anchorname='', *[nodes.Text(title)]) para = addnodes.compact_paragraph('', '', reference) item = nodes.list_item('', para) toc = nodes.bullet_list('', item) elif ref == 'self': # 'self' refers to the document from which this # toctree originates ref = toctreenode['parent'] if not title: title = clean_astext(self.env.titles[ref]) reference = nodes.reference('', '', internal=True, refuri=ref, anchorname='', *[nodes.Text(title)]) para = addnodes.compact_paragraph('', '', reference) item = nodes.list_item('', para) # don't show subitems toc = nodes.bullet_list('', item) else: if ref in parents: self.env.warn(ref, 'circular toctree references ' 'detected, ignoring: %s <- %s' % (ref, ' <- '.join(parents))) continue refdoc = ref toc = self.tocs[ref].deepcopy() maxdepth = self.env.metadata[ref].get('tocdepth', 0) if ref not in toctree_ancestors or (prune and maxdepth > 0): self._toctree_prune(toc, 2, maxdepth, collapse) process_only_nodes(toc, builder.tags, warn_node=self.env.warn_node) if title and toc.children and len(toc.children) == 1: child = toc.children[0] for refnode in child.traverse(nodes.reference): if refnode['refuri'] == ref and \ not refnode['anchorname']: refnode.children = [nodes.Text(title)] if not toc.children: # empty toc means: no titles will show up in the toctree self.env.warn_node( 'toctree contains reference to document %r that ' 'doesn\'t have a title: no link will be generated' % ref, toctreenode) except KeyError: # this is raised if the included file does not exist self.env.warn_node( 'toctree contains reference to nonexisting document %r' % ref, toctreenode) else: # if titles_only is given, only keep the main title and # sub-toctrees if titles_only: # delete everything but the toplevel title(s) # and toctrees for toplevel in toc: # nodes with length 1 don't have any children anyway if len(toplevel) > 1: subtrees = toplevel.traverse(addnodes.toctree) if subtrees: toplevel[1][:] = subtrees else: toplevel.pop(1) # resolve all sub-toctrees for subtocnode in toc.traverse(addnodes.toctree): if not (subtocnode.get('hidden', False) and not includehidden): i = subtocnode.parent.index(subtocnode) + 1 for item in _entries_from_toctree( subtocnode, [refdoc] + parents, subtree=True): subtocnode.parent.insert(i, item) i += 1 subtocnode.parent.remove(subtocnode) if separate: entries.append(toc) else: entries.extend(toc.children) if not subtree and not separate: ret = nodes.bullet_list() ret += entries return [ret] return entries maxdepth = maxdepth or toctree.get('maxdepth', -1) if not titles_only and toctree.get('titlesonly', False): titles_only = True if not includehidden and toctree.get('includehidden', False): includehidden = True # NOTE: previously, this was separate=True, but that leads to artificial # separation when two or more toctree entries form a logical unit, so # separating mode is no longer used -- it's kept here for history's sake tocentries = _entries_from_toctree(toctree, [], separate=False) if not tocentries: return None newnode = addnodes.compact_paragraph('', '') caption = toctree.attributes.get('caption') if caption: caption_node = nodes.caption(caption, '', *[nodes.Text(caption)]) caption_node.line = toctree.line caption_node.source = toctree.source caption_node.rawsource = toctree['rawcaption'] if hasattr(toctree, 'uid'): # move uid to caption_node to translate it caption_node.uid = toctree.uid del toctree.uid newnode += caption_node newnode.extend(tocentries) newnode['toctree'] = True # prune the tree to maxdepth, also set toc depth and current classes _toctree_add_classes(newnode, 1) self._toctree_prune(newnode, 1, prune and maxdepth or 0, collapse) if len(newnode[-1]) == 0: # No titles found return None # set the target paths in the toctrees (they are not known at TOC # generation time) for refnode in newnode.traverse(nodes.reference): if not url_re.match(refnode['refuri']): refnode['refuri'] = builder.get_relative_uri( docname, refnode['refuri']) + refnode['anchorname'] return newnode def get_toctree_ancestors(self, docname): parent = {} for p, children in iteritems(self.toctree_includes): for child in children: parent[child] = p ancestors = [] d = docname while d in parent and d not in ancestors: ancestors.append(d) d = parent[d] return ancestors def _toctree_prune(self, node, depth, maxdepth, collapse=False): """Utility: Cut a TOC at a specified depth.""" for subnode in node.children[:]: if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)): # for <p> and <li>, just recurse self._toctree_prune(subnode, depth, maxdepth, collapse) elif isinstance(subnode, nodes.bullet_list): # for <ul>, determine if the depth is too large or if the # entry is to be collapsed if maxdepth > 0 and depth > maxdepth: subnode.parent.replace(subnode, []) else: # cull sub-entries whose parents aren't 'current' if (collapse and depth > 1 and 'iscurrent' not in subnode.parent): subnode.parent.remove(subnode) else: # recurse on visible children self._toctree_prune(subnode, depth+1, maxdepth, collapse) def assign_section_numbers(self): """Assign a section number to each heading under a numbered toctree.""" # a list of all docnames whose section numbers changed rewrite_needed = [] assigned = set() old_secnumbers = self.toc_secnumbers self.toc_secnumbers = self.env.toc_secnumbers = {} def _walk_toc(node, secnums, depth, titlenode=None): # titlenode is the title of the document, it will get assigned a # secnumber too, so that it shows up in next/prev/parent rellinks for subnode in node.children: if isinstance(subnode, nodes.bullet_list): numstack.append(0) _walk_toc(subnode, secnums, depth-1, titlenode) numstack.pop() titlenode = None elif isinstance(subnode, nodes.list_item): _walk_toc(subnode, secnums, depth, titlenode) titlenode = None elif isinstance(subnode, addnodes.only): # at this stage we don't know yet which sections are going # to be included; just include all of them, even if it leads # to gaps in the numbering _walk_toc(subnode, secnums, depth, titlenode) titlenode = None elif isinstance(subnode, addnodes.compact_paragraph): numstack[-1] += 1 if depth > 0: number = tuple(numstack) else: number = None secnums[subnode[0]['anchorname']] = \ subnode[0]['secnumber'] = number if titlenode: titlenode['secnumber'] = number titlenode = None elif isinstance(subnode, addnodes.toctree): _walk_toctree(subnode, depth) def _walk_toctree(toctreenode, depth): if depth == 0: return for (title, ref) in toctreenode['entries']: if url_re.match(ref) or ref == 'self' or ref in assigned: # don't mess with those continue if ref in self.tocs: secnums = self.toc_secnumbers[ref] = {} assigned.add(ref) _walk_toc(self.tocs[ref], secnums, depth, self.env.titles.get(ref)) if secnums != old_secnumbers.get(ref): rewrite_needed.append(ref) for docname in self.numbered_toctrees: assigned.add(docname) doctree = self.env.get_doctree(docname) for toctreenode in doctree.traverse(addnodes.toctree): depth = toctreenode.get('numbered', 0) if depth: # every numbered toctree gets new numbering numstack = [0] _walk_toctree(toctreenode, depth) return rewrite_needed def assign_figure_numbers(self): """Assign a figure number to each figure under a numbered toctree.""" rewrite_needed = [] assigned = set() old_fignumbers = self.toc_fignumbers self.toc_fignumbers = self.env.toc_fignumbers = {} fignum_counter = {} def get_section_number(docname, section): anchorname = '#' + section['ids'][0] secnumbers = self.toc_secnumbers.get(docname, {}) if anchorname in secnumbers: secnum = secnumbers.get(anchorname) else: secnum = secnumbers.get('') return secnum or tuple() def get_next_fignumber(figtype, secnum): counter = fignum_counter.setdefault(figtype, {}) secnum = secnum[:self.env.config.numfig_secnum_depth] counter[secnum] = counter.get(secnum, 0) + 1 return secnum + (counter[secnum],) def register_fignumber(docname, secnum, figtype, fignode): self.toc_fignumbers.setdefault(docname, {}) fignumbers = self.toc_fignumbers[docname].setdefault(figtype, {}) figure_id = fignode['ids'][0] fignumbers[figure_id] = get_next_fignumber(figtype, secnum) def _walk_doctree(docname, doctree, secnum): for subnode in doctree.children: if isinstance(subnode, nodes.section): next_secnum = get_section_number(docname, subnode) if next_secnum: _walk_doctree(docname, subnode, next_secnum) else: _walk_doctree(docname, subnode, secnum) continue elif isinstance(subnode, addnodes.toctree): for title, subdocname in subnode['entries']: if url_re.match(subdocname) or subdocname == 'self': # don't mess with those continue _walk_doc(subdocname, secnum)<|fim▁hole|> continue figtype = self.env.domains['std'].get_figtype(subnode) if figtype and subnode['ids']: register_fignumber(docname, secnum, figtype, subnode) _walk_doctree(docname, subnode, secnum) def _walk_doc(docname, secnum): if docname not in assigned: assigned.add(docname) doctree = self.env.get_doctree(docname) _walk_doctree(docname, doctree, secnum) if self.env.config.numfig: _walk_doc(self.env.config.master_doc, tuple()) for docname, fignums in iteritems(self.toc_fignumbers): if fignums != old_fignumbers.get(docname): rewrite_needed.append(docname) return rewrite_needed<|fim▁end|>
<|file_name|>SoundBySpeciePaginationController.java<|end_file_name|><|fim▁begin|>package net.indrix.arara.servlets.pagination; import java.sql.SQLException; import java.util.List; import net.indrix.arara.dao.DatabaseDownException; public class SoundBySpeciePaginationController extends <|fim▁hole|> * Creates a new PaginationController object, with the given number of elements per page, and * with the flag identification * * @param soundsPerPage The amount of sounds per page * @param identification The flag for identification */ public SoundBySpeciePaginationController(int soundsPerPage, boolean identification) { super(soundsPerPage, identification); } @Override protected List retrieveAllData() throws DatabaseDownException, SQLException { logger.debug("SoundBySpeciePaginationController.retrieveAllData : retrieving all sounds..."); List listOfSounds = null; if (id != -1){ listOfSounds = model.retrieveIDsForSpecie(getId()); } else { listOfSounds = model.retrieveIDsForSpecieName(getText()); } logger.debug("SoundBySpeciePaginationController.retrieveAllData : " + listOfSounds.size() + " sounds retrieved..."); return listOfSounds; } }<|fim▁end|>
SoundPaginationController { /**
<|file_name|>issue-2735-3.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at<|fim▁hole|>// http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[feature(managed_boxes)]; use std::cell::Cell; // This test should behave exactly like issue-2735-2 struct defer { b: @Cell<bool>, } #[unsafe_destructor] impl Drop for defer { fn drop(&mut self) { self.b.set(true); } } fn defer(b: @Cell<bool>) -> defer { defer { b: b } } pub fn main() { let dtor_ran = @Cell::new(false); defer(dtor_ran); assert!(dtor_ran.get()); }<|fim▁end|>
<|file_name|>EG_TextAutofit.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Baliance. All rights reserved. // // DO NOT EDIT: generated by gooxml ECMA-376 generator // // Use of this source code is governed by the terms of the Affero GNU General // Public License version 3.0 as published by the Free Software Foundation and // appearing in the file LICENSE included in the packaging of this file. A // commercial license can be purchased by contacting [email protected]. package dml import ( "encoding/xml" "log" ) type EG_TextAutofit struct { NoAutofit *CT_TextNoAutofit NormAutofit *CT_TextNormalAutofit SpAutoFit *CT_TextShapeAutofit } func NewEG_TextAutofit() *EG_TextAutofit { ret := &EG_TextAutofit{} return ret } func (m *EG_TextAutofit) MarshalXML(e *xml.Encoder, start xml.StartElement) error { if m.NoAutofit != nil { senoAutofit := xml.StartElement{Name: xml.Name{Local: "a:noAutofit"}} e.EncodeElement(m.NoAutofit, senoAutofit) } if m.NormAutofit != nil { senormAutofit := xml.StartElement{Name: xml.Name{Local: "a:normAutofit"}} e.EncodeElement(m.NormAutofit, senormAutofit) } if m.SpAutoFit != nil { sespAutoFit := xml.StartElement{Name: xml.Name{Local: "a:spAutoFit"}} e.EncodeElement(m.SpAutoFit, sespAutoFit) } return nil }<|fim▁hole|>func (m *EG_TextAutofit) UnmarshalXML(d *xml.Decoder, start xml.StartElement) error { // initialize to default lEG_TextAutofit: for { tok, err := d.Token() if err != nil { return err } switch el := tok.(type) { case xml.StartElement: switch el.Name { case xml.Name{Space: "http://schemas.openxmlformats.org/drawingml/2006/main", Local: "noAutofit"}: m.NoAutofit = NewCT_TextNoAutofit() if err := d.DecodeElement(m.NoAutofit, &el); err != nil { return err } case xml.Name{Space: "http://schemas.openxmlformats.org/drawingml/2006/main", Local: "normAutofit"}: m.NormAutofit = NewCT_TextNormalAutofit() if err := d.DecodeElement(m.NormAutofit, &el); err != nil { return err } case xml.Name{Space: "http://schemas.openxmlformats.org/drawingml/2006/main", Local: "spAutoFit"}: m.SpAutoFit = NewCT_TextShapeAutofit() if err := d.DecodeElement(m.SpAutoFit, &el); err != nil { return err } default: log.Printf("skipping unsupported element on EG_TextAutofit %v", el.Name) if err := d.Skip(); err != nil { return err } } case xml.EndElement: break lEG_TextAutofit case xml.CharData: } } return nil } // Validate validates the EG_TextAutofit and its children func (m *EG_TextAutofit) Validate() error { return m.ValidateWithPath("EG_TextAutofit") } // ValidateWithPath validates the EG_TextAutofit and its children, prefixing error messages with path func (m *EG_TextAutofit) ValidateWithPath(path string) error { if m.NoAutofit != nil { if err := m.NoAutofit.ValidateWithPath(path + "/NoAutofit"); err != nil { return err } } if m.NormAutofit != nil { if err := m.NormAutofit.ValidateWithPath(path + "/NormAutofit"); err != nil { return err } } if m.SpAutoFit != nil { if err := m.SpAutoFit.ValidateWithPath(path + "/SpAutoFit"); err != nil { return err } } return nil }<|fim▁end|>
<|file_name|>issue-33187.rs<|end_file_name|><|fim▁begin|>// run-pass // ignore-compare-mode-chalk struct Foo<A: Repr>(<A as Repr>::Data); impl<A> Copy for Foo<A> where <A as Repr>::Data: Copy { } impl<A> Clone for Foo<A> where <A as Repr>::Data: Clone { fn clone(&self) -> Self { Foo(self.0.clone()) } } <|fim▁hole|>impl<A> Repr for A { type Data = u32; } fn main() { }<|fim▁end|>
trait Repr { type Data; }
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) import os from spack import * class Vasp(MakefilePackage): """ The Vienna Ab initio Simulation Package (VASP) is a computer program for atomic scale materials modelling, e.g. electronic structure calculations and quantum-mechanical molecular dynamics, from first principles. """ homepage = "https://vasp.at" url = "file://{0}/vasp.5.4.4.pl2.tgz".format(os.getcwd()) manual_download = True version('6.2.0', sha256='49e7ba351bd634bc5f5f67a8ef1e38e64e772857a1c02f602828898a84197e25') version('6.1.1', sha256='e37a4dfad09d3ad0410833bcd55af6b599179a085299026992c2d8e319bf6927') version('5.4.4.pl2', sha256='98f75fd75399a23d76d060a6155f4416b340a1704f256a00146f89024035bc8e') version('5.4.4', sha256='5bd2449462386f01e575f9adf629c08cb03a13142806ffb6a71309ca4431cfb3') resource(name='vaspsol', git='https://github.com/henniggroup/VASPsol.git', tag='V1.0', when='+vaspsol') variant('openmp', default=False, description='Enable openmp build') variant('scalapack', default=False, description='Enables build with SCALAPACK') variant('cuda', default=False, description='Enables running on Nvidia GPUs') variant('vaspsol', default=False, description='Enable VASPsol implicit solvation model\n' 'https://github.com/henniggroup/VASPsol') depends_on('rsync', type='build') depends_on('blas') depends_on('lapack') depends_on('fftw-api') depends_on('mpi', type=('build', 'link', 'run')) depends_on('scalapack', when='+scalapack') depends_on('cuda', when='+cuda')<|fim▁hole|> conflicts('+openmp', when='@:6.1.1', msg='openmp support started from 6.2') parallel = False def edit(self, spec, prefix): if '%gcc' in spec: if '+openmp' in spec: make_include = join_path('arch', 'makefile.include.linux_gnu_omp') else: make_include = join_path('arch', 'makefile.include.linux_gnu') elif '%nvhpc' in spec: make_include = join_path('arch', 'makefile.include.linux_pgi') filter_file('-pgc++libs', '-c++libs', make_include, string=True) filter_file('pgcc', spack_cc, make_include) filter_file('pgc++', spack_cxx, make_include, string=True) filter_file('pgfortran', spack_fc, make_include) filter_file('/opt/pgi/qd-2.3.17/install/include', spec['qd'].prefix.include, make_include) filter_file('/opt/pgi/qd-2.3.17/install/lib', spec['qd'].prefix.lib, make_include) elif '%aocc' in spec: if '+openmp' in spec: copy( join_path('arch', 'makefile.include.linux_gnu_omp'), join_path('arch', 'makefile.include.linux_aocc_omp') ) make_include = join_path('arch', 'makefile.include.linux_aocc_omp') else: copy( join_path('arch', 'makefile.include.linux_gnu'), join_path('arch', 'makefile.include.linux_aocc') ) make_include = join_path('arch', 'makefile.include.linux_aocc') filter_file( 'gcc', '{0} {1}'.format(spack_cc, '-Mfree'), make_include, string=True ) filter_file('g++', spack_cxx, make_include, string=True) filter_file('^CFLAGS_LIB[ ]{0,}=.*$', 'CFLAGS_LIB = -O3', make_include) filter_file('^FFLAGS_LIB[ ]{0,}=.*$', 'FFLAGS_LIB = -O2', make_include) filter_file('^OFLAG[ ]{0,}=.*$', 'OFLAG = -O3', make_include) filter_file('^FC[ ]{0,}=.*$', 'FC = {0}'.format(spec['mpi'].mpifc), make_include, string=True) filter_file('^FCL[ ]{0,}=.*$', 'FCL = {0}'.format(spec['mpi'].mpifc), make_include, string=True) else: if '+openmp' in spec: make_include = join_path('arch', 'makefile.include.linux_{0}_omp'. format(spec.compiler.name)) else: make_include = join_path('arch', 'makefile.include.linux_' + spec.compiler.name) os.rename(make_include, 'makefile.include') # This bunch of 'filter_file()' is to make these options settable # as environment variables filter_file('^CPP_OPTIONS[ ]{0,}=[ ]{0,}', 'CPP_OPTIONS ?= ', 'makefile.include') filter_file('^FFLAGS[ ]{0,}=[ ]{0,}', 'FFLAGS ?= ', 'makefile.include') filter_file('^LIBDIR[ ]{0,}=.*$', '', 'makefile.include') filter_file('^BLAS[ ]{0,}=.*$', 'BLAS ?=', 'makefile.include') filter_file('^LAPACK[ ]{0,}=.*$', 'LAPACK ?=', 'makefile.include') filter_file('^FFTW[ ]{0,}?=.*$', 'FFTW ?=', 'makefile.include') filter_file('^MPI_INC[ ]{0,}=.*$', 'MPI_INC ?=', 'makefile.include') filter_file('-DscaLAPACK.*$\n', '', 'makefile.include') filter_file('^SCALAPACK[ ]{0,}=.*$', 'SCALAPACK ?=', 'makefile.include') if '+cuda' in spec: filter_file('^OBJECTS_GPU[ ]{0,}=.*$', 'OBJECTS_GPU ?=', 'makefile.include') filter_file('^CPP_GPU[ ]{0,}=.*$', 'CPP_GPU ?=', 'makefile.include') filter_file('^CFLAGS[ ]{0,}=.*$', 'CFLAGS ?=', 'makefile.include') if '+vaspsol' in spec: copy('VASPsol/src/solvation.F', 'src/') def setup_build_environment(self, spack_env): spec = self.spec cpp_options = ['-DMPI -DMPI_BLOCK=8000', '-Duse_collective', '-DCACHE_SIZE=4000', '-Davoidalloc', '-Duse_bse_te', '-Dtbdyn', '-Duse_shmem'] if '%nvhpc' in self.spec: cpp_options.extend(['-DHOST=\\"LinuxPGI\\"', '-DPGI16', '-Dqd_emulate']) elif '%aocc' in self.spec: cpp_options.extend(['-DHOST=\\"LinuxGNU\\"', '-Dfock_dblbuf']) if '+openmp' in self.spec: cpp_options.extend(['-D_OPENMP']) else: cpp_options.append('-DHOST=\\"LinuxGNU\\"') if self.spec.satisfies('@6:'): cpp_options.append('-Dvasp6') cflags = ['-fPIC', '-DADD_'] fflags = [] if '%gcc' in spec or '%intel' in spec: fflags.append('-w') elif '%nvhpc' in spec: fflags.extend(['-Mnoupcase', '-Mbackslash', '-Mlarge_arrays']) elif '%aocc' in spec: fflags.extend(['-fno-fortran-main', '-Mbackslash', '-ffast-math']) spack_env.set('BLAS', spec['blas'].libs.ld_flags) spack_env.set('LAPACK', spec['lapack'].libs.ld_flags) spack_env.set('FFTW', spec['fftw-api'].prefix) spack_env.set('MPI_INC', spec['mpi'].prefix.include) if '%nvhpc' in spec: spack_env.set('QD', spec['qd'].prefix) if '+scalapack' in spec: cpp_options.append('-DscaLAPACK') spack_env.set('SCALAPACK', spec['scalapack'].libs.ld_flags) if '+cuda' in spec: cpp_gpu = ['-DCUDA_GPU', '-DRPROMU_CPROJ_OVERLAP', '-DCUFFT_MIN=28', '-DUSE_PINNED_MEMORY'] objects_gpu = ['fftmpiw.o', 'fftmpi_map.o', 'fft3dlib.o', 'fftw3d_gpu.o', 'fftmpiw_gpu.o'] cflags.extend(['-DGPUSHMEM=300', '-DHAVE_CUBLAS']) spack_env.set('CUDA_ROOT', spec['cuda'].prefix) spack_env.set('CPP_GPU', ' '.join(cpp_gpu)) spack_env.set('OBJECTS_GPU', ' '.join(objects_gpu)) if '+vaspsol' in spec: cpp_options.append('-Dsol_compat') if spec.satisfies('%gcc@10:'): fflags.append('-fallow-argument-mismatch') # Finally spack_env.set('CPP_OPTIONS', ' '.join(cpp_options)) spack_env.set('CFLAGS', ' '.join(cflags)) spack_env.set('FFLAGS', ' '.join(fflags)) def build(self, spec, prefix): if '+cuda' in self.spec: make('gpu', 'gpu_ncl') else: make('std', 'gam', 'ncl') def install(self, spec, prefix): install_tree('bin/', prefix.bin)<|fim▁end|>
depends_on('qd', when='%nvhpc') conflicts('%gcc@:8', msg='GFortran before 9.x does not support all features needed to build VASP') conflicts('+vaspsol', when='+cuda', msg='+vaspsol only available for CPU')
<|file_name|>mark.js<|end_file_name|><|fim▁begin|>/*!*************************************************** * mark.js v8.7.0 * https://github.com/julmot/mark.js * Copyright (c) 2014–2017, Julian Motz * Released under the MIT license https://git.io/vwTVl *****************************************************/ "use strict"; var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }(); var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; }; function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } (function (factory, window, document) { if (typeof define === "function" && define.amd) { define([], function () { return factory(window, document); }); } else if ((typeof module === "undefined" ? "undefined" : _typeof(module)) === "object" && module.exports) { module.exports = factory(window, document); } else { factory(window, document); } })(function (window, document) { var Mark = function () { function Mark(ctx) { _classCallCheck(this, Mark); this.ctx = ctx; this.ie = false; var ua = window.navigator.userAgent;<|fim▁hole|> } _createClass(Mark, [{ key: "log", value: function log(msg) { var level = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : "debug"; var log = this.opt.log; if (!this.opt.debug) { return; } if ((typeof log === "undefined" ? "undefined" : _typeof(log)) === "object" && typeof log[level] === "function") { log[level]("mark.js: " + msg); } } }, { key: "escapeStr", value: function escapeStr(str) { return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); } }, { key: "createRegExp", value: function createRegExp(str) { str = this.escapeStr(str); if (Object.keys(this.opt.synonyms).length) { str = this.createSynonymsRegExp(str); } if (this.opt.ignoreJoiners) { str = this.setupIgnoreJoinersRegExp(str); } if (this.opt.diacritics) { str = this.createDiacriticsRegExp(str); } str = this.createMergedBlanksRegExp(str); if (this.opt.ignoreJoiners) { str = this.createIgnoreJoinersRegExp(str); } str = this.createAccuracyRegExp(str); return str; } }, { key: "createSynonymsRegExp", value: function createSynonymsRegExp(str) { var syn = this.opt.synonyms, sens = this.opt.caseSensitive ? "" : "i"; for (var index in syn) { if (syn.hasOwnProperty(index)) { var value = syn[index], k1 = this.escapeStr(index), k2 = this.escapeStr(value); str = str.replace(new RegExp("(" + k1 + "|" + k2 + ")", "gm" + sens), "(" + k1 + "|" + k2 + ")"); } } return str; } }, { key: "setupIgnoreJoinersRegExp", value: function setupIgnoreJoinersRegExp(str) { return str.replace(/[^(|)\\]/g, function (val, indx, original) { var nextChar = original.charAt(indx + 1); if (/[(|)\\]/.test(nextChar) || nextChar === "") { return val; } else { return val + "\0"; } }); } }, { key: "createIgnoreJoinersRegExp", value: function createIgnoreJoinersRegExp(str) { return str.split("\0").join("[\\u00ad|\\u200b|\\u200c|\\u200d]?"); } }, { key: "createDiacriticsRegExp", value: function createDiacriticsRegExp(str) { var sens = this.opt.caseSensitive ? "" : "i", dct = this.opt.caseSensitive ? ["aàáâãäåāąă", "AÀÁÂÃÄÅĀĄĂ", "cçćč", "CÇĆČ", "dđď", "DĐĎ", "eèéêëěēę", "EÈÉÊËĚĒĘ", "iìíîïī", "IÌÍÎÏĪ", "lł", "LŁ", "nñňń", "NÑŇŃ", "oòóôõöøō", "OÒÓÔÕÖØŌ", "rř", "RŘ", "sšśș", "SŠŚȘ", "tťț", "TŤȚ", "uùúûüůū", "UÙÚÛÜŮŪ", "yÿý", "YŸÝ", "zžżź", "ZŽŻŹ"] : ["aÀÁÂÃÄÅàáâãäåĀāąĄăĂ", "cÇçćĆčČ", "dđĐďĎ", "eÈÉÊËèéêëěĚĒēęĘ", "iÌÍÎÏìíîïĪī", "lłŁ", "nÑñňŇńŃ", "oÒÓÔÕÖØòóôõöøŌō", "rřŘ", "sŠšśŚșȘ", "tťŤțȚ", "uÙÚÛÜùúûüůŮŪū", "yŸÿýÝ", "zŽžżŻźŹ"]; var handled = []; str.split("").forEach(function (ch) { dct.every(function (dct) { if (dct.indexOf(ch) !== -1) { if (handled.indexOf(dct) > -1) { return false; } str = str.replace(new RegExp("[" + dct + "]", "gm" + sens), "[" + dct + "]"); handled.push(dct); } return true; }); }); return str; } }, { key: "createMergedBlanksRegExp", value: function createMergedBlanksRegExp(str) { return str.replace(/[\s]+/gmi, "[\\s]+"); } }, { key: "createAccuracyRegExp", value: function createAccuracyRegExp(str) { var _this = this; var acc = this.opt.accuracy, val = typeof acc === "string" ? acc : acc.value, ls = typeof acc === "string" ? [] : acc.limiters, lsJoin = ""; ls.forEach(function (limiter) { lsJoin += "|" + _this.escapeStr(limiter); }); switch (val) { case "partially": default: return "()(" + str + ")"; case "complementary": return "()([^\\s" + lsJoin + "]*" + str + "[^\\s" + lsJoin + "]*)"; case "exactly": return "(^|\\s" + lsJoin + ")(" + str + ")(?=$|\\s" + lsJoin + ")"; } } }, { key: "getSeparatedKeywords", value: function getSeparatedKeywords(sv) { var _this2 = this; var stack = []; sv.forEach(function (kw) { if (!_this2.opt.separateWordSearch) { if (kw.trim() && stack.indexOf(kw) === -1) { stack.push(kw); } } else { kw.split(" ").forEach(function (kwSplitted) { if (kwSplitted.trim() && stack.indexOf(kwSplitted) === -1) { stack.push(kwSplitted); } }); } }); return { "keywords": stack.sort(function (a, b) { return b.length - a.length; }), "length": stack.length }; } }, { key: "getTextNodes", value: function getTextNodes(cb) { var _this3 = this; var val = "", nodes = []; this.iterator.forEachNode(NodeFilter.SHOW_TEXT, function (node) { nodes.push({ start: val.length, end: (val += node.textContent).length, node: node }); }, function (node) { if (_this3.matchesExclude(node.parentNode)) { return NodeFilter.FILTER_REJECT; } else { return NodeFilter.FILTER_ACCEPT; } }, function () { cb({ value: val, nodes: nodes }); }); } }, { key: "matchesExclude", value: function matchesExclude(el) { return DOMIterator.matches(el, this.opt.exclude.concat(["script", "style", "title", "head", "html"])); } }, { key: "wrapRangeInTextNode", value: function wrapRangeInTextNode(node, start, end) { var hEl = !this.opt.element ? "mark" : this.opt.element, startNode = node.splitText(start), ret = startNode.splitText(end - start); var repl = document.createElement(hEl); repl.setAttribute("data-markjs", "true"); if (this.opt.className) { repl.setAttribute("class", this.opt.className); } repl.textContent = startNode.textContent; startNode.parentNode.replaceChild(repl, startNode); return ret; } }, { key: "wrapRangeInMappedTextNode", value: function wrapRangeInMappedTextNode(dict, start, end, filterCb, eachCb) { var _this4 = this; dict.nodes.every(function (n, i) { var sibl = dict.nodes[i + 1]; if (typeof sibl === "undefined" || sibl.start > start) { var _ret = function () { if (!filterCb(n.node)) { return { v: false }; } var s = start - n.start, e = (end > n.end ? n.end : end) - n.start, startStr = dict.value.substr(0, n.start), endStr = dict.value.substr(e + n.start); n.node = _this4.wrapRangeInTextNode(n.node, s, e); dict.value = startStr + endStr; dict.nodes.forEach(function (k, j) { if (j >= i) { if (dict.nodes[j].start > 0 && j !== i) { dict.nodes[j].start -= e; } dict.nodes[j].end -= e; } }); end -= e; eachCb(n.node.previousSibling, n.start); if (end > n.end) { start = n.end; } else { return { v: false }; } }(); if ((typeof _ret === "undefined" ? "undefined" : _typeof(_ret)) === "object") return _ret.v; } return true; }); } }, { key: "wrapMatches", value: function wrapMatches(regex, ignoreGroups, filterCb, eachCb, endCb) { var _this5 = this; var matchIdx = ignoreGroups === 0 ? 0 : ignoreGroups + 1; this.getTextNodes(function (dict) { dict.nodes.forEach(function (node) { node = node.node; var match = void 0; while ((match = regex.exec(node.textContent)) !== null && match[matchIdx] !== "") { if (!filterCb(match[matchIdx], node)) { continue; } var pos = match.index; if (matchIdx !== 0) { for (var i = 1; i < matchIdx; i++) { pos += match[i].length; } } node = _this5.wrapRangeInTextNode(node, pos, pos + match[matchIdx].length); eachCb(node.previousSibling); regex.lastIndex = 0; } }); endCb(); }); } }, { key: "wrapMatchesAcrossElements", value: function wrapMatchesAcrossElements(regex, ignoreGroups, filterCb, eachCb, endCb) { var _this6 = this; var matchIdx = ignoreGroups === 0 ? 0 : ignoreGroups + 1; this.getTextNodes(function (dict) { var match = void 0; while ((match = regex.exec(dict.value)) !== null && match[matchIdx] !== "") { var start = match.index; if (matchIdx !== 0) { for (var i = 1; i < matchIdx; i++) { start += match[i].length; } } var end = start + match[matchIdx].length; _this6.wrapRangeInMappedTextNode(dict, start, end, function (node) { return filterCb(match[matchIdx], node); }, function (node, lastIndex) { regex.lastIndex = lastIndex; eachCb(node); }); } endCb(); }); } }, { key: "unwrapMatches", value: function unwrapMatches(node) { var parent = node.parentNode; var docFrag = document.createDocumentFragment(); while (node.firstChild) { docFrag.appendChild(node.removeChild(node.firstChild)); } parent.replaceChild(docFrag, node); if (!this.ie) { parent.normalize(); } else { this.normalizeTextNode(parent); } } }, { key: "normalizeTextNode", value: function normalizeTextNode(node) { if (!node) { return; } if (node.nodeType === 3) { while (node.nextSibling && node.nextSibling.nodeType === 3) { node.nodeValue += node.nextSibling.nodeValue; node.parentNode.removeChild(node.nextSibling); } } else { this.normalizeTextNode(node.firstChild); } this.normalizeTextNode(node.nextSibling); } }, { key: "markRegExp", value: function markRegExp(regexp, opt) { var _this7 = this; this.opt = opt; this.log("Searching with expression \"" + regexp + "\""); var totalMatches = 0, fn = "wrapMatches"; var eachCb = function eachCb(element) { totalMatches++; _this7.opt.each(element); }; if (this.opt.acrossElements) { fn = "wrapMatchesAcrossElements"; } this[fn](regexp, this.opt.ignoreGroups, function (match, node) { return _this7.opt.filter(node, match, totalMatches); }, eachCb, function () { if (totalMatches === 0) { _this7.opt.noMatch(regexp); } _this7.opt.done(totalMatches); }); } }, { key: "mark", value: function mark(sv, opt) { var _this8 = this; this.opt = opt; var totalMatches = 0, fn = "wrapMatches"; var _getSeparatedKeywords = this.getSeparatedKeywords(typeof sv === "string" ? [sv] : sv), kwArr = _getSeparatedKeywords.keywords, kwArrLen = _getSeparatedKeywords.length, sens = this.opt.caseSensitive ? "" : "i", handler = function handler(kw) { var regex = new RegExp(_this8.createRegExp(kw), "gm" + sens), matches = 0; _this8.log("Searching with expression \"" + regex + "\""); _this8[fn](regex, 1, function (term, node) { return _this8.opt.filter(node, kw, totalMatches, matches); }, function (element) { matches++; totalMatches++; _this8.opt.each(element); }, function () { if (matches === 0) { _this8.opt.noMatch(kw); } if (kwArr[kwArrLen - 1] === kw) { _this8.opt.done(totalMatches); } else { handler(kwArr[kwArr.indexOf(kw) + 1]); } }); }; if (this.opt.acrossElements) { fn = "wrapMatchesAcrossElements"; } if (kwArrLen === 0) { this.opt.done(totalMatches); } else { handler(kwArr[0]); } } }, { key: "unmark", value: function unmark(opt) { var _this9 = this; this.opt = opt; var sel = this.opt.element ? this.opt.element : "*"; sel += "[data-markjs]"; if (this.opt.className) { sel += "." + this.opt.className; } this.log("Removal selector \"" + sel + "\""); this.iterator.forEachNode(NodeFilter.SHOW_ELEMENT, function (node) { _this9.unwrapMatches(node); }, function (node) { var matchesSel = DOMIterator.matches(node, sel), matchesExclude = _this9.matchesExclude(node); if (!matchesSel || matchesExclude) { return NodeFilter.FILTER_REJECT; } else { return NodeFilter.FILTER_ACCEPT; } }, this.opt.done); } }, { key: "opt", set: function set(val) { this._opt = _extends({}, { "element": "", "className": "", "exclude": [], "iframes": false, "separateWordSearch": true, "diacritics": true, "synonyms": {}, "accuracy": "partially", "acrossElements": false, "caseSensitive": false, "ignoreJoiners": false, "ignoreGroups": 0, "each": function each() {}, "noMatch": function noMatch() {}, "filter": function filter() { return true; }, "done": function done() {}, "debug": false, "log": window.console }, val); }, get: function get() { return this._opt; } }, { key: "iterator", get: function get() { if (!this._iterator) { this._iterator = new DOMIterator(this.ctx, this.opt.iframes, this.opt.exclude); } return this._iterator; } }]); return Mark; }(); var DOMIterator = function () { function DOMIterator(ctx) { var iframes = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : true; var exclude = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : []; _classCallCheck(this, DOMIterator); this.ctx = ctx; this.iframes = iframes; this.exclude = exclude; } _createClass(DOMIterator, [{ key: "getContexts", value: function getContexts() { var ctx = void 0, filteredCtx = []; if (typeof this.ctx === "undefined" || !this.ctx) { ctx = []; } else if (NodeList.prototype.isPrototypeOf(this.ctx)) { ctx = Array.prototype.slice.call(this.ctx); } else if (Array.isArray(this.ctx)) { ctx = this.ctx; } else if (typeof this.ctx === "string") { ctx = Array.prototype.slice.call(document.querySelectorAll(this.ctx)); } else { ctx = [this.ctx]; } ctx.forEach(function (ctx) { var isDescendant = filteredCtx.filter(function (contexts) { return contexts.contains(ctx); }).length > 0; if (filteredCtx.indexOf(ctx) === -1 && !isDescendant) { filteredCtx.push(ctx); } }); return filteredCtx; } }, { key: "getIframeContents", value: function getIframeContents(ifr, successFn) { var errorFn = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : function () {}; var doc = void 0; try { var ifrWin = ifr.contentWindow; doc = ifrWin.document; if (!ifrWin || !doc) { throw new Error("iframe inaccessible"); } } catch (e) { errorFn(); } if (doc) { successFn(doc); } } }, { key: "onIframeReady", value: function onIframeReady(ifr, successFn, errorFn) { var _this10 = this; try { (function () { var ifrWin = ifr.contentWindow, bl = "about:blank", compl = "complete", isBlank = function isBlank() { var src = ifr.getAttribute("src").trim(), href = ifrWin.location.href; return href === bl && src !== bl && src; }, observeOnload = function observeOnload() { var listener = function listener() { try { if (!isBlank()) { ifr.removeEventListener("load", listener); _this10.getIframeContents(ifr, successFn, errorFn); } } catch (e) { errorFn(); } }; ifr.addEventListener("load", listener); }; if (ifrWin.document.readyState === compl) { if (isBlank()) { observeOnload(); } else { _this10.getIframeContents(ifr, successFn, errorFn); } } else { observeOnload(); } })(); } catch (e) { errorFn(); } } }, { key: "waitForIframes", value: function waitForIframes(ctx, done) { var _this11 = this; var eachCalled = 0; this.forEachIframe(ctx, function () { return true; }, function (ifr) { eachCalled++; _this11.waitForIframes(ifr.querySelector("html"), function () { if (! --eachCalled) { done(); } }); }, function (handled) { if (!handled) { done(); } }); } }, { key: "forEachIframe", value: function forEachIframe(ctx, filter, each) { var _this12 = this; var end = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : function () {}; var ifr = ctx.querySelectorAll("iframe"), open = ifr.length, handled = 0; ifr = Array.prototype.slice.call(ifr); var checkEnd = function checkEnd() { if (--open <= 0) { end(handled); } }; if (!open) { checkEnd(); } ifr.forEach(function (ifr) { if (DOMIterator.matches(ifr, _this12.exclude)) { checkEnd(); } else { _this12.onIframeReady(ifr, function (con) { if (filter(ifr)) { handled++; each(con); } checkEnd(); }, checkEnd); } }); } }, { key: "createIterator", value: function createIterator(ctx, whatToShow, filter) { return document.createNodeIterator(ctx, whatToShow, filter, false); } }, { key: "createInstanceOnIframe", value: function createInstanceOnIframe(contents) { return new DOMIterator(contents.querySelector("html"), this.iframes); } }, { key: "compareNodeIframe", value: function compareNodeIframe(node, prevNode, ifr) { var compCurr = node.compareDocumentPosition(ifr), prev = Node.DOCUMENT_POSITION_PRECEDING; if (compCurr & prev) { if (prevNode !== null) { var compPrev = prevNode.compareDocumentPosition(ifr), after = Node.DOCUMENT_POSITION_FOLLOWING; if (compPrev & after) { return true; } } else { return true; } } return false; } }, { key: "getIteratorNode", value: function getIteratorNode(itr) { var prevNode = itr.previousNode(); var node = void 0; if (prevNode === null) { node = itr.nextNode(); } else { node = itr.nextNode() && itr.nextNode(); } return { prevNode: prevNode, node: node }; } }, { key: "checkIframeFilter", value: function checkIframeFilter(node, prevNode, currIfr, ifr) { var key = false, handled = false; ifr.forEach(function (ifrDict, i) { if (ifrDict.val === currIfr) { key = i; handled = ifrDict.handled; } }); if (this.compareNodeIframe(node, prevNode, currIfr)) { if (key === false && !handled) { ifr.push({ val: currIfr, handled: true }); } else if (key !== false && !handled) { ifr[key].handled = true; } return true; } if (key === false) { ifr.push({ val: currIfr, handled: false }); } return false; } }, { key: "handleOpenIframes", value: function handleOpenIframes(ifr, whatToShow, eCb, fCb) { var _this13 = this; ifr.forEach(function (ifrDict) { if (!ifrDict.handled) { _this13.getIframeContents(ifrDict.val, function (con) { _this13.createInstanceOnIframe(con).forEachNode(whatToShow, eCb, fCb); }); } }); } }, { key: "iterateThroughNodes", value: function iterateThroughNodes(whatToShow, ctx, eachCb, filterCb, doneCb) { var _this14 = this; var itr = this.createIterator(ctx, whatToShow, filterCb); var ifr = [], elements = [], node = void 0, prevNode = void 0, retrieveNodes = function retrieveNodes() { var _getIteratorNode = _this14.getIteratorNode(itr); prevNode = _getIteratorNode.prevNode; node = _getIteratorNode.node; return node; }; while (retrieveNodes()) { if (this.iframes) { this.forEachIframe(ctx, function (currIfr) { return _this14.checkIframeFilter(node, prevNode, currIfr, ifr); }, function (con) { _this14.createInstanceOnIframe(con).forEachNode(whatToShow, eachCb, filterCb); }); } elements.push(node); } elements.forEach(function (node) { eachCb(node); }); if (this.iframes) { this.handleOpenIframes(ifr, whatToShow, eachCb, filterCb); } doneCb(); } }, { key: "forEachNode", value: function forEachNode(whatToShow, each, filter) { var _this15 = this; var done = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : function () {}; var contexts = this.getContexts(); var open = contexts.length; if (!open) { done(); } contexts.forEach(function (ctx) { var ready = function ready() { _this15.iterateThroughNodes(whatToShow, ctx, each, filter, function () { if (--open <= 0) { done(); } }); }; if (_this15.iframes) { _this15.waitForIframes(ctx, ready); } else { ready(); } }); } }], [{ key: "matches", value: function matches(element, selector) { var selectors = typeof selector === "string" ? [selector] : selector, fn = element.matches || element.matchesSelector || element.msMatchesSelector || element.mozMatchesSelector || element.oMatchesSelector || element.webkitMatchesSelector; if (fn) { var match = false; selectors.every(function (sel) { if (fn.call(element, sel)) { match = true; return false; } return true; }); return match; } else { return false; } } }]); return DOMIterator; }(); window.Mark = function (ctx) { var _this16 = this; var instance = new Mark(ctx); this.mark = function (sv, opt) { instance.mark(sv, opt); return _this16; }; this.markRegExp = function (sv, opt) { instance.markRegExp(sv, opt); return _this16; }; this.unmark = function (opt) { instance.unmark(opt); return _this16; }; return this; }; return window.Mark; }, window, document);<|fim▁end|>
if (ua.indexOf("MSIE") > -1 || ua.indexOf("Trident") > -1) { this.ie = true; }
<|file_name|>snapshot.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). use std::collections::HashMap; use std::fmt; use std::hash; use std::iter::Iterator; use std::path::{Path, PathBuf}; use std::sync::Arc; use deepsize::DeepSizeOf; use futures::future; use futures::FutureExt; use fs::{ DigestTrie, Dir, DirectoryDigest, File, GitignoreStyleExcludes, GlobMatching, PathStat, PosixFS, PreparedPathGlobs, SymlinkBehavior, EMPTY_DIGEST_TREE, }; use hashing::{Digest, EMPTY_DIGEST}; use protos::gen::build::bazel::remote::execution::v2 as remexec; use crate::Store; /// The listing of a DirectoryDigest. /// /// Similar to DirectoryDigest, the presence of the DigestTrie does _not_ guarantee that /// the contents of the Digest have been persisted to the Store. See that struct's docs. #[derive(Clone, DeepSizeOf)] pub struct Snapshot { pub digest: Digest, pub tree: DigestTrie, } impl Eq for Snapshot {} impl PartialEq for Snapshot { fn eq(&self, other: &Self) -> bool { self.digest == other.digest } } impl hash::Hash for Snapshot { fn hash<H: hash::Hasher>(&self, state: &mut H) { self.digest.hash(state); } } impl Snapshot { pub fn empty() -> Self { Self { digest: EMPTY_DIGEST, tree: EMPTY_DIGEST_TREE.clone(), } } pub async fn from_path_stats< S: StoreFileByDigest<Error> + Sized + Clone + Send + 'static, Error: fmt::Debug + 'static + Send, >( store: Store, file_digester: S, path_stats: Vec<PathStat>, ) -> Result<Snapshot, String> { let (paths, files): (Vec<_>, Vec<_>) = path_stats .iter() .filter_map(|ps| match ps { PathStat::File { path, stat } => Some((path.clone(), stat.clone())), _ => None, }) .unzip(); let file_digests = future::try_join_all( files .into_iter() .map(|file| file_digester.store_by_digest(file)) .collect::<Vec<_>>(), ) .await .map_err(|e| format!("Failed to digest inputs: {e:?}"))?; let file_digests_map = paths .into_iter() .zip(file_digests) .collect::<HashMap<_, _>>(); let tree = DigestTrie::from_path_stats(path_stats, &file_digests_map)?; // TODO: When "enough" intrinsics are ported to directly producing/consuming DirectoryDigests // this call to persist the tree to the store should be removed, and the tree will be in-memory // only (as allowed by the DirectoryDigest contract). See #13112. let directory_digest = store.record_digest_trie(tree.clone(), true).await?; Ok(Self { digest: directory_digest.as_digest(), tree, }) } pub async fn from_digest(store: Store, digest: DirectoryDigest) -> Result<Snapshot, String> { Ok(Self { digest: digest.as_digest(), tree: store.load_digest_trie(digest).await?, }) } pub async fn get_directory_or_err( store: Store, digest: Digest, ) -> Result<remexec::Directory, String> { let maybe_dir = store.load_directory(digest).await?; maybe_dir.ok_or_else(|| format!("{:?} was not known", digest)) } /// /// Capture a Snapshot of a presumed-immutable piece of the filesystem. /// /// Note that we don't use a Graph here, and don't cache any intermediate steps, we just place /// the resultant Snapshot into the store and return it. This is important, because we're reading /// things from arbitrary filepaths which we don't want to cache in the graph, as we don't watch /// them for changes. Because we're not caching things, we can safely configure the virtual /// filesystem to be symlink-oblivious. /// /// If the `digest_hint` is given, first attempt to load the Snapshot using that Digest, and only /// fall back to actually walking the filesystem if we don't have it (either due to garbage /// collection or Digest-oblivious legacy caching). /// pub async fn capture_snapshot_from_arbitrary_root<P: AsRef<Path> + Send + 'static>( store: Store, executor: task_executor::Executor, root_path: P, path_globs: PreparedPathGlobs, digest_hint: Option<DirectoryDigest>, ) -> Result<Snapshot, String> { // Attempt to use the digest hint to load a Snapshot without expanding the globs; otherwise, // expand the globs to capture a Snapshot. let snapshot_result = if let Some(digest) = digest_hint { Snapshot::from_digest(store.clone(), digest).await } else { Err("No digest hint provided.".to_string()) }; if let Ok(snapshot) = snapshot_result { Ok(snapshot) } else { let posix_fs = Arc::new(PosixFS::new_with_symlink_behavior( root_path, GitignoreStyleExcludes::create(vec![])?, executor, SymlinkBehavior::Oblivious, )?); let path_stats = posix_fs .expand_globs(path_globs, None) .await .map_err(|err| format!("Error expanding globs: {}", err))?; Snapshot::from_path_stats( store.clone(), OneOffStoreFileByDigest::new(store, posix_fs, true), path_stats, ) .await } } /// # Safety /// /// This should only be used for testing, as this will always create an invalid Snapshot. pub unsafe fn create_for_testing_ffi( digest: Digest, files: Vec<String>, dirs: Vec<String>, ) -> Result<Self, String> { // NB: All files receive the EMPTY_DIGEST. let file_digests = files .iter() .map(|s| (PathBuf::from(&s), EMPTY_DIGEST)) .collect(); let file_path_stats: Vec<PathStat> = files .into_iter() .map(|s| { PathStat::file( PathBuf::from(s.clone()), File { path: PathBuf::from(s), is_executable: false, }, ) }) .collect(); let dir_path_stats: Vec<PathStat> = dirs .into_iter() .map(|s| PathStat::dir(PathBuf::from(&s), Dir(PathBuf::from(s)))) .collect(); let tree = DigestTrie::from_path_stats([file_path_stats, dir_path_stats].concat(), &file_digests)?; Ok(Self { // NB: The DigestTrie's computed digest is ignored in favor of the given Digest. digest, tree, }) } } impl fmt::Debug for Snapshot { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "Snapshot(digest={:?}, entries={})", self.digest, self.tree.digests().len() ) } } impl From<Snapshot> for DirectoryDigest {<|fim▁hole|>} // StoreFileByDigest allows a File to be saved to an underlying Store, in such a way that it can be // looked up by the Digest produced by the store_by_digest method. // It is a separate trait so that caching implementations can be written which wrap the Store (used // to store the bytes) and Vfs (used to read the files off disk if needed). pub trait StoreFileByDigest<Error> { fn store_by_digest(&self, file: File) -> future::BoxFuture<'static, Result<Digest, Error>>; } /// /// A StoreFileByDigest which reads immutable files with a PosixFS and writes to a Store, with no /// caching. /// #[derive(Clone)] pub struct OneOffStoreFileByDigest { store: Store, posix_fs: Arc<PosixFS>, immutable: bool, } impl OneOffStoreFileByDigest { pub fn new(store: Store, posix_fs: Arc<PosixFS>, immutable: bool) -> OneOffStoreFileByDigest { OneOffStoreFileByDigest { store, posix_fs, immutable, } } } impl StoreFileByDigest<String> for OneOffStoreFileByDigest { fn store_by_digest(&self, file: File) -> future::BoxFuture<'static, Result<Digest, String>> { let store = self.store.clone(); let posix_fs = self.posix_fs.clone(); let immutable = self.immutable; let res = async move { let path = posix_fs.file_path(&file); store .store_file(true, immutable, move || std::fs::File::open(&path)) .await }; res.boxed() } }<|fim▁end|>
fn from(s: Snapshot) -> Self { Self::new(s.digest, s.tree) }
<|file_name|>slick.core.js<|end_file_name|><|fim▁begin|>/*** * Contains core SlickGrid classes. * @module Core * @namespace Slick */ (function ($) { // register namespace $.extend(true, window, { "Slick": { "Event": Event, "EventData": EventData, "EventHandler": EventHandler, "Range": Range, "NonDataRow": NonDataItem, "Group": Group, "GroupTotals": GroupTotals, "EditorLock": EditorLock, /*** * A global singleton editor lock. * @class GlobalEditorLock * @static * @constructor */ "GlobalEditorLock": new EditorLock(), "TreeColumns": TreeColumns } }); /*** * An event object for passing data to event handlers and letting them control propagation. * <p>This is pretty much identical to how W3C and jQuery implement events.</p> * @class EventData * @constructor */ function EventData() { var isPropagationStopped = false; var isImmediatePropagationStopped = false; /*** * Stops event from propagating up the DOM tree. * @method stopPropagation */ this.stopPropagation = function () { isPropagationStopped = true; }; /*** * Returns whether stopPropagation was called on this event object. * @method isPropagationStopped * @return {Boolean} */ this.isPropagationStopped = function () { return isPropagationStopped; }; /*** * Prevents the rest of the handlers from being executed. * @method stopImmediatePropagation */ this.stopImmediatePropagation = function () { isImmediatePropagationStopped = true; }; /*** * Returns whether stopImmediatePropagation was called on this event object.\ * @method isImmediatePropagationStopped * @return {Boolean} */ this.isImmediatePropagationStopped = function () { return isImmediatePropagationStopped; } } /*** * A simple publisher-subscriber implementation. * @class Event * @constructor */ function Event() { var handlers = []; /*** * Adds an event handler to be called when the event is fired. * <p>Event handler will receive two arguments - an <code>EventData</code> and the <code>data</code> * object the event was fired with.<p> * @method subscribe * @param fn {Function} Event handler. */ this.subscribe = function (fn) { handlers.push(fn); }; /*** * Removes an event handler added with <code>subscribe(fn)</code>. * @method unsubscribe * @param fn {Function} Event handler to be removed. */ this.unsubscribe = function (fn) { for (var i = handlers.length - 1; i >= 0; i--) { if (handlers[i] === fn) { handlers.splice(i, 1); } } }; /*** * Fires an event notifying all subscribers. * @method notify * @param args {Object} Additional data object to be passed to all handlers. * @param e {EventData} * Optional. * An <code>EventData</code> object to be passed to all handlers. * For DOM events, an existing W3C/jQuery event object can be passed in. * @param scope {Object} * Optional. * The scope ("this") within which the handler will be executed. * If not specified, the scope will be set to the <code>Event</code> instance. */ this.notify = function (args, e, scope) { e = e || new EventData(); scope = scope || this; var returnValue; for (var i = 0; i < handlers.length && !(e.isPropagationStopped() || e.isImmediatePropagationStopped()); i++) { returnValue = handlers[i].call(scope, e, args); } return returnValue; }; } function EventHandler() { var handlers = []; this.subscribe = function (event, handler) { handlers.push({ event: event, handler: handler }); event.subscribe(handler); return this; // allow chaining }; this.unsubscribe = function (event, handler) { var i = handlers.length; while (i--) { if (handlers[i].event === event && handlers[i].handler === handler) { handlers.splice(i, 1); event.unsubscribe(handler); return; } } return this; // allow chaining }; this.unsubscribeAll = function () { var i = handlers.length; while (i--) { handlers[i].event.unsubscribe(handlers[i].handler); } handlers = []; return this; // allow chaining } } /*** * A structure containing a range of cells. * @class Range * @constructor * @param fromRow {Integer} Starting row. * @param fromCell {Integer} Starting cell. * @param toRow {Integer} Optional. Ending row. Defaults to <code>fromRow</code>. * @param toCell {Integer} Optional. Ending cell. Defaults to <code>fromCell</code>. */ function Range(fromRow, fromCell, toRow, toCell) { if (toRow === undefined && toCell === undefined) { toRow = fromRow; toCell = fromCell; } /*** * @property fromRow * @type {Integer} */ this.fromRow = Math.min(fromRow, toRow); /*** * @property fromCell * @type {Integer} */ this.fromCell = Math.min(fromCell, toCell); /*** * @property toRow * @type {Integer} */ this.toRow = Math.max(fromRow, toRow); /*** * @property toCell * @type {Integer} */ this.toCell = Math.max(fromCell, toCell); /*** * Returns whether a range represents a single row. * @method isSingleRow * @return {Boolean} */ this.isSingleRow = function () { return this.fromRow == this.toRow; }; /*** * Returns whether a range represents a single cell. * @method isSingleCell * @return {Boolean} */ this.isSingleCell = function () { return this.fromRow == this.toRow && this.fromCell == this.toCell; }; /*** * Returns whether a range contains a given cell. * @method contains * @param row {Integer} * @param cell {Integer} * @return {Boolean} */ this.contains = function (row, cell) { return row >= this.fromRow && row <= this.toRow && cell >= this.fromCell && cell <= this.toCell; }; /*** * Returns a readable representation of a range. * @method toString * @return {String} */ this.toString = function () { if (this.isSingleCell()) { return "(" + this.fromRow + ":" + this.fromCell + ")"; } else { return "(" + this.fromRow + ":" + this.fromCell + " - " + this.toRow + ":" + this.toCell + ")"; } } } /*** * A base class that all special / non-data rows (like Group and GroupTotals) derive from. * @class NonDataItem * @constructor */ function NonDataItem() { this.__nonDataRow = true; } /*** * Information about a group of rows. * @class Group * @extends Slick.NonDataItem * @constructor */ function Group() { this.__group = true; /** * Grouping level, starting with 0. * @property level * @type {Number} */ this.level = 0; /*** * Number of rows in the group. * @property count * @type {Integer} */ this.count = 0; /*** * Grouping value. * @property value * @type {Object} */ this.value = null; /*** * Formatted display value of the group. * @property title * @type {String} */ this.title = null; /*** * Whether a group is collapsed. * @property collapsed * @type {Boolean} */ this.collapsed = false; /*** * GroupTotals, if any. * @property totals * @type {GroupTotals} */ this.totals = null; /** * Rows that are part of the group. * @property rows * @type {Array} */ this.rows = []; /** * Sub-groups that are part of the group. * @property groups * @type {Array} */ this.groups = null; /** * A unique key used to identify the group. This key can be used in calls to DataView * collapseGroup() or expandGroup(). * @property groupingKey * @type {Object} */ this.groupingKey = null; } Group.prototype = new NonDataItem(); /*** * Compares two Group instances. * @method equals * @return {Boolean} * @param group {Group} Group instance to compare to. */ Group.prototype.equals = function (group) { return this.value === group.value && this.count === group.count && this.collapsed === group.collapsed && this.title === group.title; }; /*** * Information about group totals. * An instance of GroupTotals will be created for each totals row and passed to the aggregators * so that they can store arbitrary data in it. That data can later be accessed by group totals * formatters during the display. * @class GroupTotals * @extends Slick.NonDataItem * @constructor */ function GroupTotals() { this.__groupTotals = true; /*** * Parent Group. * @param group * @type {Group} */ this.group = null; /*** * Whether the totals have been fully initialized / calculated. * Will be set to false for lazy-calculated group totals. * @param initialized * @type {Boolean} */ this.initialized = false; } GroupTotals.prototype = new NonDataItem(); /*** * A locking helper to track the active edit controller and ensure that only a single controller * can be active at a time. This prevents a whole class of state and validation synchronization * issues. An edit controller (such as SlickGrid) can query if an active edit is in progress * and attempt a commit or cancel before proceeding. * @class EditorLock * @constructor */ function EditorLock() { var activeEditController = null; /*** * Returns true if a specified edit controller is active (has the edit lock). * If the parameter is not specified, returns true if any edit controller is active. * @method isActive * @param editController {EditController} * @return {Boolean} */ this.isActive = function (editController) { return (editController ? activeEditController === editController : activeEditController !== null); }; /*** * Sets the specified edit controller as the active edit controller (acquire edit lock). * If another edit controller is already active, and exception will be thrown. * @method activate * @param editController {EditController} edit controller acquiring the lock */ this.activate = function (editController) { if (editController === activeEditController) { // already activated? return; } if (activeEditController !== null) { throw "SlickGrid.EditorLock.activate: an editController is still active, can't activate another editController"; } if (!editController.commitCurrentEdit) { throw "SlickGrid.EditorLock.activate: editController must implement .commitCurrentEdit()"; } if (!editController.cancelCurrentEdit) { throw "SlickGrid.EditorLock.activate: editController must implement .cancelCurrentEdit()"; } activeEditController = editController; }; /*** * Unsets the specified edit controller as the active edit controller (release edit lock). * If the specified edit controller is not the active one, an exception will be thrown. * @method deactivate * @param editController {EditController} edit controller releasing the lock */ this.deactivate = function (editController) { if (activeEditController !== editController) { throw "SlickGrid.EditorLock.deactivate: specified editController is not the currently active one"; } activeEditController = null; }; /*** * Attempts to commit the current edit by calling "commitCurrentEdit" method on the active edit * controller and returns whether the commit attempt was successful (commit may fail due to validation * errors, etc.). Edit controller's "commitCurrentEdit" must return true if the commit has succeeded * and false otherwise. If no edit controller is active, returns true. * @method commitCurrentEdit * @return {Boolean} */ this.commitCurrentEdit = function () { return (activeEditController ? activeEditController.commitCurrentEdit() : true); }; /*** * Attempts to cancel the current edit by calling "cancelCurrentEdit" method on the active edit * controller and returns whether the edit was successfully cancelled. If no edit controller is * active, returns true. * @method cancelCurrentEdit * @return {Boolean} */ this.cancelCurrentEdit = function cancelCurrentEdit() { return (activeEditController ? activeEditController.cancelCurrentEdit() : true); }; } /** * * @param {Array} treeColumns Array com levels of columns * @returns {{hasDepth: 'hasDepth', getTreeColumns: 'getTreeColumns', extractColumns: 'extractColumns', getDepth: 'getDepth', getColumnsInDepth: 'getColumnsInDepth', getColumnsInGroup: 'getColumnsInGroup', visibleColumns: 'visibleColumns', filter: 'filter', reOrder: reOrder}} * @constructor */ function TreeColumns(treeColumns) { var columnsById = {}; function init() { mapToId(treeColumns); } function mapToId(columns) { columns .forEach(function (column) { columnsById[column.id] = column; if (column.columns) mapToId(column.columns); }); } function filter(node, condition) { return node.filter(function (column) { var valid = condition.call(column); if (valid && column.columns) column.columns = filter(column.columns, condition); return valid && (!column.columns || column.columns.length); }); } function sort(columns, grid) { columns .sort(function (a, b) { var indexA = getOrDefault(grid.getColumnIndex(a.id)), indexB = getOrDefault(grid.getColumnIndex(b.id)); return indexA - indexB; }) .forEach(function (column) { if (column.columns) sort(column.columns, grid); }); } function getOrDefault(value) { return typeof value === 'undefined' ? -1 : value; } function getDepth(node) { if (node.length) for (var i in node) return getDepth(node[i]); else if (node.columns) return 1 + getDepth(node.columns); else return 1; } function getColumnsInDepth(node, depth, current) { var columns = []; current = current || 0; if (depth == current) { if (node.length) node.forEach(function(n) { if (n.columns) n.extractColumns = function() { return extractColumns(n); }; }); return node; } else for (var i in node) if (node[i].columns) { columns = columns.concat(getColumnsInDepth(node[i].columns, depth, current + 1)); } return columns; } function extractColumns(node) { var result = []; if (node.hasOwnProperty('length')) { for (var i = 0; i < node.length; i++) result = result.concat(extractColumns(node[i])); } else { if (node.hasOwnProperty('columns')) result = result.concat(extractColumns(node.columns)); else return node; } return result; } function cloneTreeColumns() { return $.extend(true, [], treeColumns); } init(); this.hasDepth = function () { for (var i in treeColumns) if (treeColumns[i].hasOwnProperty('columns')) return true; return false;<|fim▁hole|> }; this.extractColumns = function () { return this.hasDepth()? extractColumns(treeColumns): treeColumns; }; this.getDepth = function () { return getDepth(treeColumns); }; this.getColumnsInDepth = function (depth) { return getColumnsInDepth(treeColumns, depth); }; this.getColumnsInGroup = function (groups) { return extractColumns(groups); }; this.visibleColumns = function () { return filter(cloneTreeColumns(), function () { return this.visible; }); }; this.filter = function (condition) { return filter(cloneTreeColumns(), condition); }; this.reOrder = function (grid) { return sort(treeColumns, grid); }; this.getById = function (id) { return columnsById[id]; } this.getInIds = function (ids) { return ids.map(function (id) { return columnsById[id]; }); } } })(jQuery);<|fim▁end|>
}; this.getTreeColumns = function () { return treeColumns;
<|file_name|>Teacher.java<|end_file_name|><|fim▁begin|>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.hoang.fu; /** * * @author hoangpt */ public class Teacher extends Employee implements ITeacher { Teacher(String name) { this.name = name; } @Override float calculateSalary(){ return 500f; } @Override public int calculateBonus() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. }<|fim▁hole|> @Override public float calculateAllowance() { throw new UnsupportedOperationException("Not supported yet."); //To change body of generated methods, choose Tools | Templates. } }<|fim▁end|>
<|file_name|>AnimationState.py<|end_file_name|><|fim▁begin|># # The Multiverse Platform is made available under the MIT License. # # Copyright (c) 2012 The Multiverse Foundation # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation # files (the "Software"), to deal in the Software without restriction, # including without limitation the rights to use, copy, modify, # merge, publish, distribute, sublicense, and/or sell copies # of the Software, and to permit persons to whom the Software # is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES # OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT # HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # import ClientAPI class AnimationState: # # Constructor # def __init__(self): assert False # # Property Getters # def _get_Time(self): return self._state.State.Time def __getattr__(self, attrname): if attrname in self._getters: return self._getters[attrname](self) else: raise AttributeError, attrname #<|fim▁hole|> def __setattr__(self, attrname, value): if attrname in self._setters: self._setters[attrname](self, value) else: raise AttributeError, attrname _getters = { 'Time' : _get_Time } _setters = { } # # Methods # def AddTime(self, t): return self._state.AddTime(t) def RegisterTimeEventHandler(self, time, handler): AnimationStateEventWrapper(self, handler, time) # # This class is just another way of making an AnimationState, with a different constructor, # since we don't have constructor overloading within a single class. This should only # be used internally by the API. # class _ExistingAnimationState(AnimationState): # # Constructor # def __init__(self, state): self.__dict__['_state'] = state def __setattr__(self, attrname, value): AnimationState.__setattr__(self, attrname, value) class AnimationStateEventWrapper: def __init__(self, state, handler, triggerTime): self.animState = state self.realHandler = handler state._state.RegisterTimeEventHandler(triggerTime, self.Handler) def Handler(self, axiomState, triggerTime): self.realHandler(self.animState, triggerTime)<|fim▁end|>
# Property Setters #
<|file_name|>binance-batch-orders.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import os import sys root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) sys.path.append(root + '/python') import ccxt # noqa: E402 exchange = ccxt.binance({ "apiKey": "YOUR_API_KEY", "secret": "YOUR_SECRET", 'enableRateLimit': True, }) orders = [ { "symbol" : "BTCUSDT", "side" : "BUY", "positionSide" : "LONG", "type" : "MARKET",<|fim▁hole|> orders = [exchange.encode_uri_component(exchange.json(order), safe=",") for order in orders] response = exchange.fapiPrivatePostBatchOrders({ 'batchOrders': '[' + ','.join(orders) + ']' }) print(response)<|fim▁end|>
"quantity": float(0.005) } ]
<|file_name|>resources.pb.cc<|end_file_name|><|fim▁begin|>// Generated by the protocol buffer compiler. DO NOT EDIT! // source: tensorflow_serving/resources/resources.proto #include "tensorflow_serving/resources/resources.pb.h" #include <algorithm> #include <google/protobuf/stubs/common.h> #include <google/protobuf/stubs/port.h> #include <google/protobuf/io/coded_stream.h> #include <google/protobuf/wire_format_lite_inl.h> #include <google/protobuf/descriptor.h> #include <google/protobuf/generated_message_reflection.h> #include <google/protobuf/reflection_ops.h> #include <google/protobuf/wire_format.h> // This is a temporary google only hack #ifdef GOOGLE_PROTOBUF_ENFORCE_UNIQUENESS #include "third_party/protobuf/version.h" #endif // @@protoc_insertion_point(includes) namespace protobuf_google_2fprotobuf_2fwrappers_2eproto { extern PROTOBUF_INTERNAL_EXPORT_protobuf_google_2fprotobuf_2fwrappers_2eproto ::google::protobuf::internal::SCCInfo<0> scc_info_UInt32Value; } // namespace protobuf_google_2fprotobuf_2fwrappers_2eproto namespace protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto { extern PROTOBUF_INTERNAL_EXPORT_protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_Resource; extern PROTOBUF_INTERNAL_EXPORT_protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto ::google::protobuf::internal::SCCInfo<1> scc_info_ResourceAllocation_Entry; } // namespace protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto namespace tensorflow { namespace serving { class ResourceDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<Resource> _instance; } _Resource_default_instance_; class ResourceAllocation_EntryDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<ResourceAllocation_Entry> _instance; } _ResourceAllocation_Entry_default_instance_; class ResourceAllocationDefaultTypeInternal { public: ::google::protobuf::internal::ExplicitlyConstructed<ResourceAllocation> _instance; } _ResourceAllocation_default_instance_; } // namespace serving } // namespace tensorflow namespace protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto { static void InitDefaultsResource() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::tensorflow::serving::_Resource_default_instance_; new (ptr) ::tensorflow::serving::Resource(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::tensorflow::serving::Resource::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<1> scc_info_Resource = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsResource}, { &protobuf_google_2fprotobuf_2fwrappers_2eproto::scc_info_UInt32Value.base,}}; static void InitDefaultsResourceAllocation_Entry() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::tensorflow::serving::_ResourceAllocation_Entry_default_instance_; new (ptr) ::tensorflow::serving::ResourceAllocation_Entry(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::tensorflow::serving::ResourceAllocation_Entry::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<1> scc_info_ResourceAllocation_Entry = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsResourceAllocation_Entry}, { &protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::scc_info_Resource.base,}}; static void InitDefaultsResourceAllocation() { GOOGLE_PROTOBUF_VERIFY_VERSION; { void* ptr = &::tensorflow::serving::_ResourceAllocation_default_instance_; new (ptr) ::tensorflow::serving::ResourceAllocation(); ::google::protobuf::internal::OnShutdownDestroyMessage(ptr); } ::tensorflow::serving::ResourceAllocation::InitAsDefaultInstance(); } ::google::protobuf::internal::SCCInfo<1> scc_info_ResourceAllocation = {{ATOMIC_VAR_INIT(::google::protobuf::internal::SCCInfoBase::kUninitialized), 1, InitDefaultsResourceAllocation}, { &protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::scc_info_ResourceAllocation_Entry.base,}}; void InitDefaults() { ::google::protobuf::internal::InitSCC(&scc_info_Resource.base); ::google::protobuf::internal::InitSCC(&scc_info_ResourceAllocation_Entry.base); ::google::protobuf::internal::InitSCC(&scc_info_ResourceAllocation.base); } ::google::protobuf::Metadata file_level_metadata[3]; const ::google::protobuf::uint32 TableStruct::offsets[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::tensorflow::serving::Resource, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::tensorflow::serving::Resource, device_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::tensorflow::serving::Resource, device_instance_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::tensorflow::serving::Resource, kind_), ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::tensorflow::serving::ResourceAllocation_Entry, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::tensorflow::serving::ResourceAllocation_Entry, resource_), GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::tensorflow::serving::ResourceAllocation_Entry, quantity_), ~0u, // no _has_bits_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::tensorflow::serving::ResourceAllocation, _internal_metadata_), ~0u, // no _extensions_ ~0u, // no _oneof_case_ ~0u, // no _weak_field_map_ GOOGLE_PROTOBUF_GENERATED_MESSAGE_FIELD_OFFSET(::tensorflow::serving::ResourceAllocation, resource_quantities_), }; static const ::google::protobuf::internal::MigrationSchema schemas[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { { 0, -1, sizeof(::tensorflow::serving::Resource)}, { 8, -1, sizeof(::tensorflow::serving::ResourceAllocation_Entry)}, { 15, -1, sizeof(::tensorflow::serving::ResourceAllocation)}, }; static ::google::protobuf::Message const * const file_default_instances[] = { reinterpret_cast<const ::google::protobuf::Message*>(&::tensorflow::serving::_Resource_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::tensorflow::serving::_ResourceAllocation_Entry_default_instance_), reinterpret_cast<const ::google::protobuf::Message*>(&::tensorflow::serving::_ResourceAllocation_default_instance_), }; void protobuf_AssignDescriptors() { AddDescriptors(); AssignDescriptors( "tensorflow_serving/resources/resources.proto", schemas, file_default_instances, TableStruct::offsets, file_level_metadata, NULL, NULL); } void protobuf_AssignDescriptorsOnce() { static ::google::protobuf::internal::once_flag once; ::google::protobuf::internal::call_once(once, protobuf_AssignDescriptors); } void protobuf_RegisterTypes(const ::std::string&) GOOGLE_PROTOBUF_ATTRIBUTE_COLD; void protobuf_RegisterTypes(const ::std::string&) { protobuf_AssignDescriptorsOnce(); ::google::protobuf::internal::RegisterAllTypes(file_level_metadata, 3); } void AddDescriptorsImpl() { InitDefaults(); static const char descriptor[] GOOGLE_PROTOBUF_ATTRIBUTE_SECTION_VARIABLE(protodesc_cold) = { "\n,tensorflow_serving/resources/resources" ".proto\022\022tensorflow.serving\032\036google/proto" "buf/wrappers.proto\"_\n\010Resource\022\016\n\006device" "\030\001 \001(\t\0225\n\017device_instance\030\002 \001(\0132\034.google" ".protobuf.UInt32Value\022\014\n\004kind\030\003 \001(\t\"\252\001\n\022" "ResourceAllocation\022I\n\023resource_quantitie" "s\030\001 \003(\0132,.tensorflow.serving.ResourceAll" "ocation.Entry\032I\n\005Entry\022.\n\010resource\030\001 \001(\013" "2\034.tensorflow.serving.Resource\022\020\n\010quanti" "ty\030\002 \001(\004b\006proto3" }; ::google::protobuf::DescriptorPool::InternalAddGeneratedFile( descriptor, 376); ::google::protobuf::MessageFactory::InternalRegisterGeneratedFile( "tensorflow_serving/resources/resources.proto", &protobuf_RegisterTypes); ::protobuf_google_2fprotobuf_2fwrappers_2eproto::AddDescriptors(); } void AddDescriptors() { static ::google::protobuf::internal::once_flag once; ::google::protobuf::internal::call_once(once, AddDescriptorsImpl); } // Force AddDescriptors() to be called at dynamic initialization time. struct StaticDescriptorInitializer { StaticDescriptorInitializer() { AddDescriptors(); } } static_descriptor_initializer; } // namespace protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto namespace tensorflow { namespace serving { // =================================================================== void Resource::InitAsDefaultInstance() { ::tensorflow::serving::_Resource_default_instance_._instance.get_mutable()->device_instance_ = const_cast< ::google::protobuf::UInt32Value*>( ::google::protobuf::UInt32Value::internal_default_instance()); } void Resource::clear_device_instance() { if (GetArenaNoVirtual() == NULL && device_instance_ != NULL) { delete device_instance_; } device_instance_ = NULL; } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int Resource::kDeviceFieldNumber; const int Resource::kDeviceInstanceFieldNumber; const int Resource::kKindFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 Resource::Resource() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::scc_info_Resource.base); SharedCtor(); // @@protoc_insertion_point(constructor:tensorflow.serving.Resource) } Resource::Resource(const Resource& from) : ::google::protobuf::Message(), _internal_metadata_(NULL) { _internal_metadata_.MergeFrom(from._internal_metadata_); device_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (from.device().size() > 0) { device_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.device_); } kind_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());<|fim▁hole|> if (from.has_device_instance()) { device_instance_ = new ::google::protobuf::UInt32Value(*from.device_instance_); } else { device_instance_ = NULL; } // @@protoc_insertion_point(copy_constructor:tensorflow.serving.Resource) } void Resource::SharedCtor() { device_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); kind_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); device_instance_ = NULL; } Resource::~Resource() { // @@protoc_insertion_point(destructor:tensorflow.serving.Resource) SharedDtor(); } void Resource::SharedDtor() { device_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); kind_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (this != internal_default_instance()) delete device_instance_; } void Resource::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* Resource::descriptor() { ::protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const Resource& Resource::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::scc_info_Resource.base); return *internal_default_instance(); } void Resource::Clear() { // @@protoc_insertion_point(message_clear_start:tensorflow.serving.Resource) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; device_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); kind_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited()); if (GetArenaNoVirtual() == NULL && device_instance_ != NULL) { delete device_instance_; } device_instance_ = NULL; _internal_metadata_.Clear(); } bool Resource::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:tensorflow.serving.Resource) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // string device = 1; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadString( input, this->mutable_device())); DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->device().data(), static_cast<int>(this->device().length()), ::google::protobuf::internal::WireFormatLite::PARSE, "tensorflow.serving.Resource.device")); } else { goto handle_unusual; } break; } // .google.protobuf.UInt32Value device_instance = 2; case 2: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(18u /* 18 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, mutable_device_instance())); } else { goto handle_unusual; } break; } // string kind = 3; case 3: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(26u /* 26 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadString( input, this->mutable_kind())); DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->kind().data(), static_cast<int>(this->kind().length()), ::google::protobuf::internal::WireFormatLite::PARSE, "tensorflow.serving.Resource.kind")); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:tensorflow.serving.Resource) return true; failure: // @@protoc_insertion_point(parse_failure:tensorflow.serving.Resource) return false; #undef DO_ } void Resource::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:tensorflow.serving.Resource) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // string device = 1; if (this->device().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->device().data(), static_cast<int>(this->device().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "tensorflow.serving.Resource.device"); ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( 1, this->device(), output); } // .google.protobuf.UInt32Value device_instance = 2; if (this->has_device_instance()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 2, this->_internal_device_instance(), output); } // string kind = 3; if (this->kind().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->kind().data(), static_cast<int>(this->kind().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "tensorflow.serving.Resource.kind"); ::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased( 3, this->kind(), output); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:tensorflow.serving.Resource) } ::google::protobuf::uint8* Resource::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:tensorflow.serving.Resource) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // string device = 1; if (this->device().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->device().data(), static_cast<int>(this->device().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "tensorflow.serving.Resource.device"); target = ::google::protobuf::internal::WireFormatLite::WriteStringToArray( 1, this->device(), target); } // .google.protobuf.UInt32Value device_instance = 2; if (this->has_device_instance()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( 2, this->_internal_device_instance(), deterministic, target); } // string kind = 3; if (this->kind().size() > 0) { ::google::protobuf::internal::WireFormatLite::VerifyUtf8String( this->kind().data(), static_cast<int>(this->kind().length()), ::google::protobuf::internal::WireFormatLite::SERIALIZE, "tensorflow.serving.Resource.kind"); target = ::google::protobuf::internal::WireFormatLite::WriteStringToArray( 3, this->kind(), target); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:tensorflow.serving.Resource) return target; } size_t Resource::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:tensorflow.serving.Resource) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } // string device = 1; if (this->device().size() > 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize( this->device()); } // string kind = 3; if (this->kind().size() > 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::StringSize( this->kind()); } // .google.protobuf.UInt32Value device_instance = 2; if (this->has_device_instance()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( *device_instance_); } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void Resource::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:tensorflow.serving.Resource) GOOGLE_DCHECK_NE(&from, this); const Resource* source = ::google::protobuf::internal::DynamicCastToGenerated<const Resource>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.serving.Resource) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.serving.Resource) MergeFrom(*source); } } void Resource::MergeFrom(const Resource& from) { // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.serving.Resource) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; if (from.device().size() > 0) { device_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.device_); } if (from.kind().size() > 0) { kind_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.kind_); } if (from.has_device_instance()) { mutable_device_instance()->::google::protobuf::UInt32Value::MergeFrom(from.device_instance()); } } void Resource::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:tensorflow.serving.Resource) if (&from == this) return; Clear(); MergeFrom(from); } void Resource::CopyFrom(const Resource& from) { // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.serving.Resource) if (&from == this) return; Clear(); MergeFrom(from); } bool Resource::IsInitialized() const { return true; } void Resource::Swap(Resource* other) { if (other == this) return; InternalSwap(other); } void Resource::InternalSwap(Resource* other) { using std::swap; device_.Swap(&other->device_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); kind_.Swap(&other->kind_, &::google::protobuf::internal::GetEmptyStringAlreadyInited(), GetArenaNoVirtual()); swap(device_instance_, other->device_instance_); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata Resource::GetMetadata() const { protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::file_level_metadata[kIndexInFileMessages]; } // =================================================================== void ResourceAllocation_Entry::InitAsDefaultInstance() { ::tensorflow::serving::_ResourceAllocation_Entry_default_instance_._instance.get_mutable()->resource_ = const_cast< ::tensorflow::serving::Resource*>( ::tensorflow::serving::Resource::internal_default_instance()); } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int ResourceAllocation_Entry::kResourceFieldNumber; const int ResourceAllocation_Entry::kQuantityFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ResourceAllocation_Entry::ResourceAllocation_Entry() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::scc_info_ResourceAllocation_Entry.base); SharedCtor(); // @@protoc_insertion_point(constructor:tensorflow.serving.ResourceAllocation.Entry) } ResourceAllocation_Entry::ResourceAllocation_Entry(const ResourceAllocation_Entry& from) : ::google::protobuf::Message(), _internal_metadata_(NULL) { _internal_metadata_.MergeFrom(from._internal_metadata_); if (from.has_resource()) { resource_ = new ::tensorflow::serving::Resource(*from.resource_); } else { resource_ = NULL; } quantity_ = from.quantity_; // @@protoc_insertion_point(copy_constructor:tensorflow.serving.ResourceAllocation.Entry) } void ResourceAllocation_Entry::SharedCtor() { ::memset(&resource_, 0, static_cast<size_t>( reinterpret_cast<char*>(&quantity_) - reinterpret_cast<char*>(&resource_)) + sizeof(quantity_)); } ResourceAllocation_Entry::~ResourceAllocation_Entry() { // @@protoc_insertion_point(destructor:tensorflow.serving.ResourceAllocation.Entry) SharedDtor(); } void ResourceAllocation_Entry::SharedDtor() { if (this != internal_default_instance()) delete resource_; } void ResourceAllocation_Entry::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* ResourceAllocation_Entry::descriptor() { ::protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const ResourceAllocation_Entry& ResourceAllocation_Entry::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::scc_info_ResourceAllocation_Entry.base); return *internal_default_instance(); } void ResourceAllocation_Entry::Clear() { // @@protoc_insertion_point(message_clear_start:tensorflow.serving.ResourceAllocation.Entry) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; if (GetArenaNoVirtual() == NULL && resource_ != NULL) { delete resource_; } resource_ = NULL; quantity_ = GOOGLE_ULONGLONG(0); _internal_metadata_.Clear(); } bool ResourceAllocation_Entry::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:tensorflow.serving.ResourceAllocation.Entry) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // .tensorflow.serving.Resource resource = 1; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, mutable_resource())); } else { goto handle_unusual; } break; } // uint64 quantity = 2; case 2: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(16u /* 16 & 0xFF */)) { DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive< ::google::protobuf::uint64, ::google::protobuf::internal::WireFormatLite::TYPE_UINT64>( input, &quantity_))); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:tensorflow.serving.ResourceAllocation.Entry) return true; failure: // @@protoc_insertion_point(parse_failure:tensorflow.serving.ResourceAllocation.Entry) return false; #undef DO_ } void ResourceAllocation_Entry::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:tensorflow.serving.ResourceAllocation.Entry) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // .tensorflow.serving.Resource resource = 1; if (this->has_resource()) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 1, this->_internal_resource(), output); } // uint64 quantity = 2; if (this->quantity() != 0) { ::google::protobuf::internal::WireFormatLite::WriteUInt64(2, this->quantity(), output); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:tensorflow.serving.ResourceAllocation.Entry) } ::google::protobuf::uint8* ResourceAllocation_Entry::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:tensorflow.serving.ResourceAllocation.Entry) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // .tensorflow.serving.Resource resource = 1; if (this->has_resource()) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( 1, this->_internal_resource(), deterministic, target); } // uint64 quantity = 2; if (this->quantity() != 0) { target = ::google::protobuf::internal::WireFormatLite::WriteUInt64ToArray(2, this->quantity(), target); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:tensorflow.serving.ResourceAllocation.Entry) return target; } size_t ResourceAllocation_Entry::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:tensorflow.serving.ResourceAllocation.Entry) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } // .tensorflow.serving.Resource resource = 1; if (this->has_resource()) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::MessageSize( *resource_); } // uint64 quantity = 2; if (this->quantity() != 0) { total_size += 1 + ::google::protobuf::internal::WireFormatLite::UInt64Size( this->quantity()); } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void ResourceAllocation_Entry::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:tensorflow.serving.ResourceAllocation.Entry) GOOGLE_DCHECK_NE(&from, this); const ResourceAllocation_Entry* source = ::google::protobuf::internal::DynamicCastToGenerated<const ResourceAllocation_Entry>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.serving.ResourceAllocation.Entry) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.serving.ResourceAllocation.Entry) MergeFrom(*source); } } void ResourceAllocation_Entry::MergeFrom(const ResourceAllocation_Entry& from) { // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.serving.ResourceAllocation.Entry) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; if (from.has_resource()) { mutable_resource()->::tensorflow::serving::Resource::MergeFrom(from.resource()); } if (from.quantity() != 0) { set_quantity(from.quantity()); } } void ResourceAllocation_Entry::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:tensorflow.serving.ResourceAllocation.Entry) if (&from == this) return; Clear(); MergeFrom(from); } void ResourceAllocation_Entry::CopyFrom(const ResourceAllocation_Entry& from) { // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.serving.ResourceAllocation.Entry) if (&from == this) return; Clear(); MergeFrom(from); } bool ResourceAllocation_Entry::IsInitialized() const { return true; } void ResourceAllocation_Entry::Swap(ResourceAllocation_Entry* other) { if (other == this) return; InternalSwap(other); } void ResourceAllocation_Entry::InternalSwap(ResourceAllocation_Entry* other) { using std::swap; swap(resource_, other->resource_); swap(quantity_, other->quantity_); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata ResourceAllocation_Entry::GetMetadata() const { protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::file_level_metadata[kIndexInFileMessages]; } // =================================================================== void ResourceAllocation::InitAsDefaultInstance() { } #if !defined(_MSC_VER) || _MSC_VER >= 1900 const int ResourceAllocation::kResourceQuantitiesFieldNumber; #endif // !defined(_MSC_VER) || _MSC_VER >= 1900 ResourceAllocation::ResourceAllocation() : ::google::protobuf::Message(), _internal_metadata_(NULL) { ::google::protobuf::internal::InitSCC( &protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::scc_info_ResourceAllocation.base); SharedCtor(); // @@protoc_insertion_point(constructor:tensorflow.serving.ResourceAllocation) } ResourceAllocation::ResourceAllocation(const ResourceAllocation& from) : ::google::protobuf::Message(), _internal_metadata_(NULL), resource_quantities_(from.resource_quantities_) { _internal_metadata_.MergeFrom(from._internal_metadata_); // @@protoc_insertion_point(copy_constructor:tensorflow.serving.ResourceAllocation) } void ResourceAllocation::SharedCtor() { } ResourceAllocation::~ResourceAllocation() { // @@protoc_insertion_point(destructor:tensorflow.serving.ResourceAllocation) SharedDtor(); } void ResourceAllocation::SharedDtor() { } void ResourceAllocation::SetCachedSize(int size) const { _cached_size_.Set(size); } const ::google::protobuf::Descriptor* ResourceAllocation::descriptor() { ::protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::file_level_metadata[kIndexInFileMessages].descriptor; } const ResourceAllocation& ResourceAllocation::default_instance() { ::google::protobuf::internal::InitSCC(&protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::scc_info_ResourceAllocation.base); return *internal_default_instance(); } void ResourceAllocation::Clear() { // @@protoc_insertion_point(message_clear_start:tensorflow.serving.ResourceAllocation) ::google::protobuf::uint32 cached_has_bits = 0; // Prevent compiler warnings about cached_has_bits being unused (void) cached_has_bits; resource_quantities_.Clear(); _internal_metadata_.Clear(); } bool ResourceAllocation::MergePartialFromCodedStream( ::google::protobuf::io::CodedInputStream* input) { #define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure ::google::protobuf::uint32 tag; // @@protoc_insertion_point(parse_start:tensorflow.serving.ResourceAllocation) for (;;) { ::std::pair<::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u); tag = p.first; if (!p.second) goto handle_unusual; switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) { // repeated .tensorflow.serving.ResourceAllocation.Entry resource_quantities = 1; case 1: { if (static_cast< ::google::protobuf::uint8>(tag) == static_cast< ::google::protobuf::uint8>(10u /* 10 & 0xFF */)) { DO_(::google::protobuf::internal::WireFormatLite::ReadMessage( input, add_resource_quantities())); } else { goto handle_unusual; } break; } default: { handle_unusual: if (tag == 0) { goto success; } DO_(::google::protobuf::internal::WireFormat::SkipField( input, tag, _internal_metadata_.mutable_unknown_fields())); break; } } } success: // @@protoc_insertion_point(parse_success:tensorflow.serving.ResourceAllocation) return true; failure: // @@protoc_insertion_point(parse_failure:tensorflow.serving.ResourceAllocation) return false; #undef DO_ } void ResourceAllocation::SerializeWithCachedSizes( ::google::protobuf::io::CodedOutputStream* output) const { // @@protoc_insertion_point(serialize_start:tensorflow.serving.ResourceAllocation) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated .tensorflow.serving.ResourceAllocation.Entry resource_quantities = 1; for (unsigned int i = 0, n = static_cast<unsigned int>(this->resource_quantities_size()); i < n; i++) { ::google::protobuf::internal::WireFormatLite::WriteMessageMaybeToArray( 1, this->resource_quantities(static_cast<int>(i)), output); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { ::google::protobuf::internal::WireFormat::SerializeUnknownFields( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), output); } // @@protoc_insertion_point(serialize_end:tensorflow.serving.ResourceAllocation) } ::google::protobuf::uint8* ResourceAllocation::InternalSerializeWithCachedSizesToArray( bool deterministic, ::google::protobuf::uint8* target) const { (void)deterministic; // Unused // @@protoc_insertion_point(serialize_to_array_start:tensorflow.serving.ResourceAllocation) ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; // repeated .tensorflow.serving.ResourceAllocation.Entry resource_quantities = 1; for (unsigned int i = 0, n = static_cast<unsigned int>(this->resource_quantities_size()); i < n; i++) { target = ::google::protobuf::internal::WireFormatLite:: InternalWriteMessageToArray( 1, this->resource_quantities(static_cast<int>(i)), deterministic, target); } if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { target = ::google::protobuf::internal::WireFormat::SerializeUnknownFieldsToArray( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance()), target); } // @@protoc_insertion_point(serialize_to_array_end:tensorflow.serving.ResourceAllocation) return target; } size_t ResourceAllocation::ByteSizeLong() const { // @@protoc_insertion_point(message_byte_size_start:tensorflow.serving.ResourceAllocation) size_t total_size = 0; if ((_internal_metadata_.have_unknown_fields() && ::google::protobuf::internal::GetProto3PreserveUnknownsDefault())) { total_size += ::google::protobuf::internal::WireFormat::ComputeUnknownFieldsSize( (::google::protobuf::internal::GetProto3PreserveUnknownsDefault() ? _internal_metadata_.unknown_fields() : _internal_metadata_.default_instance())); } // repeated .tensorflow.serving.ResourceAllocation.Entry resource_quantities = 1; { unsigned int count = static_cast<unsigned int>(this->resource_quantities_size()); total_size += 1UL * count; for (unsigned int i = 0; i < count; i++) { total_size += ::google::protobuf::internal::WireFormatLite::MessageSize( this->resource_quantities(static_cast<int>(i))); } } int cached_size = ::google::protobuf::internal::ToCachedSize(total_size); SetCachedSize(cached_size); return total_size; } void ResourceAllocation::MergeFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_merge_from_start:tensorflow.serving.ResourceAllocation) GOOGLE_DCHECK_NE(&from, this); const ResourceAllocation* source = ::google::protobuf::internal::DynamicCastToGenerated<const ResourceAllocation>( &from); if (source == NULL) { // @@protoc_insertion_point(generalized_merge_from_cast_fail:tensorflow.serving.ResourceAllocation) ::google::protobuf::internal::ReflectionOps::Merge(from, this); } else { // @@protoc_insertion_point(generalized_merge_from_cast_success:tensorflow.serving.ResourceAllocation) MergeFrom(*source); } } void ResourceAllocation::MergeFrom(const ResourceAllocation& from) { // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.serving.ResourceAllocation) GOOGLE_DCHECK_NE(&from, this); _internal_metadata_.MergeFrom(from._internal_metadata_); ::google::protobuf::uint32 cached_has_bits = 0; (void) cached_has_bits; resource_quantities_.MergeFrom(from.resource_quantities_); } void ResourceAllocation::CopyFrom(const ::google::protobuf::Message& from) { // @@protoc_insertion_point(generalized_copy_from_start:tensorflow.serving.ResourceAllocation) if (&from == this) return; Clear(); MergeFrom(from); } void ResourceAllocation::CopyFrom(const ResourceAllocation& from) { // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.serving.ResourceAllocation) if (&from == this) return; Clear(); MergeFrom(from); } bool ResourceAllocation::IsInitialized() const { return true; } void ResourceAllocation::Swap(ResourceAllocation* other) { if (other == this) return; InternalSwap(other); } void ResourceAllocation::InternalSwap(ResourceAllocation* other) { using std::swap; CastToBase(&resource_quantities_)->InternalSwap(CastToBase(&other->resource_quantities_)); _internal_metadata_.Swap(&other->_internal_metadata_); } ::google::protobuf::Metadata ResourceAllocation::GetMetadata() const { protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::protobuf_AssignDescriptorsOnce(); return ::protobuf_tensorflow_5fserving_2fresources_2fresources_2eproto::file_level_metadata[kIndexInFileMessages]; } // @@protoc_insertion_point(namespace_scope) } // namespace serving } // namespace tensorflow namespace google { namespace protobuf { template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::tensorflow::serving::Resource* Arena::CreateMaybeMessage< ::tensorflow::serving::Resource >(Arena* arena) { return Arena::CreateInternal< ::tensorflow::serving::Resource >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::tensorflow::serving::ResourceAllocation_Entry* Arena::CreateMaybeMessage< ::tensorflow::serving::ResourceAllocation_Entry >(Arena* arena) { return Arena::CreateInternal< ::tensorflow::serving::ResourceAllocation_Entry >(arena); } template<> GOOGLE_PROTOBUF_ATTRIBUTE_NOINLINE ::tensorflow::serving::ResourceAllocation* Arena::CreateMaybeMessage< ::tensorflow::serving::ResourceAllocation >(Arena* arena) { return Arena::CreateInternal< ::tensorflow::serving::ResourceAllocation >(arena); } } // namespace protobuf } // namespace google // @@protoc_insertion_point(global_scope)<|fim▁end|>
if (from.kind().size() > 0) { kind_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.kind_); }
<|file_name|>TypeCastingException.java<|end_file_name|><|fim▁begin|>/*************************************************************************** * Copyright (C) 2011-2015 by Fabrizio Montesi <[email protected]> * * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU Library General Public License as * * published by the Free Software Foundation; either version 2 of the * * License, or (at your option) any later version. * * * * This program is distributed in the hope that it will be useful, *<|fim▁hole|> * GNU General Public License for more details. * * * * You should have received a copy of the GNU Library General Public * * License along with this program; if not, write to the * * Free Software Foundation, Inc., * * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. * * * * For details about the authors of this software, see the AUTHORS file. * ***************************************************************************/ package jolie.runtime.typing; import jolie.lang.Constants; /** * * @author Fabrizio Montesi */ public class TypeCastingException extends Exception { public final static long serialVersionUID = Constants.serialVersionUID(); public TypeCastingException() { super(); } public TypeCastingException( String message ) { super( message ); } /* * @Override public Throwable fillInStackTrace() { return this; } */ }<|fim▁end|>
* but WITHOUT ANY WARRANTY; without even the implied warranty of * * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
<|file_name|>ar.js<|end_file_name|><|fim▁begin|>/*! * froala_editor v3.1.1 (https://www.froala.com/wysiwyg-editor) * License https://froala.com/wysiwyg-editor/terms/ * Copyright 2014-2020 Froala Labs */ (function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('froala-editor')) : typeof define === 'function' && define.amd ? define(['froala-editor'], factory) : (factory(global.FroalaEditor)); }(this, (function (FE) { 'use strict'; FE = FE && FE.hasOwnProperty('default') ? FE['default'] : FE; /** * Arabic */ FE.LANGUAGE['ar'] = { translation: { // Place holder 'Type something': "\u0627\u0643\u062A\u0628 \u0634\u064A\u0626\u0627", // Basic formatting 'Bold': "\u063A\u0627\u0645\u0642", 'Italic': "\u0645\u0627\u0626\u0644", 'Underline': "\u062A\u0633\u0637\u064A\u0631", 'Strikethrough': "\u064A\u062A\u0648\u0633\u0637 \u062E\u0637", // Main buttons 'Insert': "\u0625\u062F\u0631\u0627\u062C", 'Delete': "\u062D\u0630\u0641", 'Cancel': "\u0625\u0644\u063A\u0627\u0621", 'OK': "\u0645\u0648\u0627\u0641\u0642", 'Back': "\u0638\u0647\u0631", 'Remove': "\u0625\u0632\u0627\u0644\u0629", 'More': "\u0623\u0643\u062B\u0631", 'Update': "\u0627\u0644\u062A\u062D\u062F\u064A\u062B", 'Style': "\u0623\u0633\u0644\u0648\u0628", // Font 'Font Family': "\u0639\u0627\u0626\u0644\u0629 \u0627\u0644\u062E\u0637", 'Font Size': "\u062D\u062C\u0645 \u0627\u0644\u062E\u0637", // Colors 'Colors': "\u0627\u0644\u0623\u0644\u0648\u0627\u0646", 'Background': "\u0627\u0644\u062E\u0644\u0641\u064A\u0629", 'Text': "\u0627\u0644\u0646\u0635", 'HEX Color': 'عرافة اللون', // Paragraphs 'Paragraph Format': "\u062A\u0646\u0633\u064A\u0642 \u0627\u0644\u0641\u0642\u0631\u0629", 'Normal': "\u0637\u0628\u064A\u0639\u064A", 'Code': "\u0643\u0648\u062F", 'Heading 1': "\u0627\u0644\u0639\u0646\u0627\u0648\u064A\u0646 1", 'Heading 2': "\u0627\u0644\u0639\u0646\u0627\u0648\u064A\u0646 2", 'Heading 3': "\u0627\u0644\u0639\u0646\u0627\u0648\u064A\u0646 3", 'Heading 4': "\u0627\u0644\u0639\u0646\u0627\u0648\u064A\u0646 4", // Style 'Paragraph Style': "\u0646\u0645\u0637 \u0627\u0644\u0641\u0642\u0631\u0629", 'Inline Style': "\u0627\u0644\u0646\u0645\u0637 \u0627\u0644\u0645\u0636\u0645\u0646", // Alignment 'Align': "\u0645\u062D\u0627\u0630\u0627\u0629", 'Align Left': "\u0645\u062D\u0627\u0630\u0627\u0629 \u0627\u0644\u0646\u0635 \u0644\u0644\u064A\u0633\u0627\u0631", 'Align Center': "\u062A\u0648\u0633\u064A\u0637", 'Align Right': "\u0645\u062D\u0627\u0630\u0627\u0629 \u0627\u0644\u0646\u0635 \u0644\u0644\u064A\u0645\u064A\u0646", 'Align Justify': "\u0636\u0628\u0637", 'None': "\u0644\u0627 \u0634\u064A\u0621", // Lists 'Ordered List': "\u0642\u0627\u0626\u0645\u0629 \u0645\u0631\u062A\u0628\u0629", 'Default': 'الافتراضي', 'Lower Alpha': 'أقل ألفا', 'Lower Greek': 'أقل اليونانية', 'Lower Roman': 'انخفاض الروماني', 'Upper Alpha': 'العلوي ألفا', 'Upper Roman': 'الروماني العلوي', 'Unordered List': "\u0642\u0627\u0626\u0645\u0629 \u063A\u064A\u0631 \u0645\u0631\u062A\u0628\u0629", 'Circle': 'دائرة', 'Disc': 'القرص', 'Square': 'ميدان', // Line height 'Line Height': 'ارتفاع خط', 'Single': 'غير مرتبطة', 'Double': 'مزدوج', // Indent 'Decrease Indent': "\u0627\u0646\u062E\u0641\u0627\u0636 \u0627\u0644\u0645\u0633\u0627\u0641\u0629 \u0627\u0644\u0628\u0627\u062F\u0626\u0629", 'Increase Indent': "\u0632\u064A\u0627\u062F\u0629 \u0627\u0644\u0645\u0633\u0627\u0641\u0629 \u0627\u0644\u0628\u0627\u062F\u0626\u0629", // Links 'Insert Link': "\u0625\u062F\u0631\u0627\u062C \u0631\u0627\u0628\u0637", 'Open in new tab': "\u0641\u062A\u062D \u0641\u064A \u0639\u0644\u0627\u0645\u0629 \u062A\u0628\u0648\u064A\u0628 \u062C\u062F\u064A\u062F\u0629", 'Open Link': "\u0627\u0641\u062A\u062D \u0627\u0644\u0631\u0627\u0628\u0637", 'Edit Link': "\u0627\u0631\u062A\u0628\u0627\u0637 \u062A\u062D\u0631\u064A\u0631", 'Unlink': "\u062D\u0630\u0641 \u0627\u0644\u0631\u0627\u0628\u0637", 'Choose Link': "\u0627\u062E\u062A\u064A\u0627\u0631 \u0635\u0644\u0629", // Images 'Insert Image': "\u0625\u062F\u0631\u0627\u062C \u0635\u0648\u0631\u0629", 'Upload Image': "\u062A\u062D\u0645\u064A\u0644 \u0635\u0648\u0631\u0629", 'By URL': "\u0628\u0648\u0627\u0633\u0637\u0629 URL", 'Browse': "\u062A\u0635\u0641\u062D", 'Drop image': "\u0625\u0633\u0642\u0627\u0637 \u0635\u0648\u0631\u0629", 'or click': "\u0623\u0648 \u0627\u0646\u0642\u0631 \u0641\u0648\u0642",<|fim▁hole|> 'Tags': "\u0627\u0644\u0643\u0644\u0645\u0627\u062A", 'Are you sure? Image will be deleted.': "\u0647\u0644 \u0623\u0646\u062A \u0645\u062A\u0623\u0643\u062F\u061F \u0633\u064A\u062A\u0645 \u062D\u0630\u0641 \u0627\u0644\u0635\u0648\u0631\u0629.", 'Replace': "\u0627\u0633\u062A\u0628\u062F\u0627\u0644", 'Uploading': "\u062A\u062D\u0645\u064A\u0644", 'Loading image': "\u0635\u0648\u0631\u0629 \u062A\u062D\u0645\u064A\u0644", 'Display': "\u0639\u0631\u0636", 'Inline': "\u0641\u064A \u062E\u0637", 'Break Text': "\u0646\u0635 \u0627\u0633\u062A\u0631\u0627\u062D\u0629", 'Alternative Text': "\u0646\u0635 \u0628\u062F\u064A\u0644", 'Change Size': "\u062A\u063A\u064A\u064A\u0631 \u062D\u062C\u0645", 'Width': "\u0639\u0631\u0636", 'Height': "\u0627\u0631\u062A\u0641\u0627\u0639", 'Something went wrong. Please try again.': ".\u062D\u062F\u062B \u062E\u0637\u0623 \u0645\u0627. \u062D\u0627\u0648\u0644 \u0645\u0631\u0629 \u0627\u062E\u0631\u0649", 'Image Caption': 'تعليق على الصورة', 'Advanced Edit': 'تعديل متقدم', // Video 'Insert Video': "\u0625\u062F\u0631\u0627\u062C \u0641\u064A\u062F\u064A\u0648", 'Embedded Code': "\u0627\u0644\u062A\u0639\u0644\u064A\u0645\u0627\u062A \u0627\u0644\u0628\u0631\u0645\u062C\u064A\u0629 \u0627\u0644\u0645\u0636\u0645\u0646\u0629", 'Paste in a video URL': 'لصق في عنوان ورل للفيديو', 'Drop video': 'انخفاض الفيديو', 'Your browser does not support HTML5 video.': 'متصفحك لا يدعم فيديو HTML5.', 'Upload Video': 'رفع فيديو', // Tables 'Insert Table': "\u0625\u062F\u0631\u0627\u062C \u062C\u062F\u0648\u0644", 'Table Header': "\u0631\u0623\u0633 \u0627\u0644\u062C\u062F\u0648\u0644", 'Remove Table': "\u0625\u0632\u0627\u0644\u0629 \u0627\u0644\u062C\u062F\u0648\u0644", 'Table Style': "\u0646\u0645\u0637 \u0627\u0644\u062C\u062F\u0648\u0644", 'Horizontal Align': "\u0645\u062D\u0627\u0630\u0627\u0629 \u0623\u0641\u0642\u064A\u0629", 'Row': "\u0635\u0641", 'Insert row above': "\u0625\u062F\u0631\u0627\u062C \u0635\u0641 \u0644\u0644\u0623\u0639\u0644\u0649", 'Insert row below': "\u0625\u062F\u0631\u0627\u062C \u0635\u0641 \u0644\u0644\u0623\u0633\u0641\u0644", 'Delete row': "\u062D\u0630\u0641 \u0635\u0641", 'Column': "\u0639\u0645\u0648\u062F", 'Insert column before': "\u0625\u062F\u0631\u0627\u062C \u0639\u0645\u0648\u062F \u0644\u0644\u064A\u0633\u0627\u0631", 'Insert column after': "\u0625\u062F\u0631\u0627\u062C \u0639\u0645\u0648\u062F \u0644\u0644\u064A\u0645\u064A\u0646", 'Delete column': "\u062D\u0630\u0641 \u0639\u0645\u0648\u062F", 'Cell': "\u062E\u0644\u064A\u0629", 'Merge cells': "\u062F\u0645\u062C \u062E\u0644\u0627\u064A\u0627", 'Horizontal split': "\u0627\u0646\u0642\u0633\u0627\u0645 \u0623\u0641\u0642\u064A", 'Vertical split': "\u0627\u0644\u0627\u0646\u0642\u0633\u0627\u0645 \u0627\u0644\u0639\u0645\u0648\u062F\u064A", 'Cell Background': "\u062E\u0644\u0641\u064A\u0629 \u0627\u0644\u062E\u0644\u064A\u0629", 'Vertical Align': "\u0645\u062D\u0627\u0630\u0627\u0629 \u0639\u0645\u0648\u062F\u064A\u0629", 'Top': "\u0623\u0639\u0644\u0649", 'Middle': "\u0648\u0633\u0637", 'Bottom': "\u0623\u0633\u0641\u0644", 'Align Top': "\u0645\u062D\u0627\u0630\u0627\u0629 \u0623\u0639\u0644\u0649", 'Align Middle': "\u0645\u062D\u0627\u0630\u0627\u0629 \u0648\u0633\u0637", 'Align Bottom': "\u0645\u062D\u0627\u0630\u0627\u0629 \u0627\u0644\u0623\u0633\u0641\u0644", 'Cell Style': "\u0646\u0645\u0637 \u0627\u0644\u062E\u0644\u064A\u0629", // Files 'Upload File': "\u062A\u062D\u0645\u064A\u0644 \u0627\u0644\u0645\u0644\u0641", 'Drop file': "\u0627\u0646\u062E\u0641\u0627\u0636 \u0627\u0644\u0645\u0644\u0641", // Emoticons 'Emoticons': "\u0627\u0644\u0645\u0634\u0627\u0639\u0631", 'Grinning face': "\u064A\u0643\u0634\u0631 \u0648\u062C\u0647\u0647", 'Grinning face with smiling eyes': "\u0645\u0628\u062A\u0633\u0645\u0627 \u0648\u062C\u0647 \u0645\u0639 \u064A\u0628\u062A\u0633\u0645 \u0627\u0644\u0639\u064A\u0646", 'Face with tears of joy': "\u0648\u062C\u0647 \u0645\u0639 \u062F\u0645\u0648\u0639 \u0627\u0644\u0641\u0631\u062D", 'Smiling face with open mouth': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u0628\u062A\u0633\u0645 \u0645\u0639 \u0641\u062A\u062D \u0627\u0644\u0641\u0645", 'Smiling face with open mouth and smiling eyes': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u0628\u062A\u0633\u0645 \u0645\u0639 \u0641\u062A\u062D \u0627\u0644\u0641\u0645 \u0648\u0627\u0644\u0639\u064A\u0646\u064A\u0646 \u064A\u0628\u062A\u0633\u0645", 'Smiling face with open mouth and cold sweat': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u0628\u062A\u0633\u0645 \u0645\u0639 \u0641\u062A\u062D \u0627\u0644\u0641\u0645 \u0648\u0627\u0644\u0639\u0631\u0642 \u0627\u0644\u0628\u0627\u0631\u062F", 'Smiling face with open mouth and tightly-closed eyes': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u0628\u062A\u0633\u0645 \u0645\u0639 \u0641\u062A\u062D \u0627\u0644\u0641\u0645 \u0648\u0627\u0644\u0639\u064A\u0646\u064A\u0646 \u0645\u063A\u0644\u0642\u0629 \u0628\u0625\u062D\u0643\u0627\u0645", 'Smiling face with halo': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u0628\u062A\u0633\u0645 \u0645\u0639 \u0647\u0627\u0644\u0629", 'Smiling face with horns': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u0628\u062A\u0633\u0645 \u0628\u0642\u0631\u0648\u0646", 'Winking face': "\u0627\u0644\u063A\u0645\u0632 \u0648\u062C\u0647", 'Smiling face with smiling eyes': "\u064A\u0628\u062A\u0633\u0645 \u0648\u062C\u0647 \u0645\u0639 \u0639\u064A\u0648\u0646 \u062A\u0628\u062A\u0633\u0645", 'Face savoring delicious food': "\u064A\u0648\u0627\u062C\u0647 \u0644\u0630\u064A\u0630 \u0627\u0644\u0645\u0630\u0627\u0642 \u0644\u0630\u064A\u0630 \u0627\u0644\u0637\u0639\u0627\u0645", 'Relieved face': "\u0648\u062C\u0647 \u0628\u0627\u0644\u0627\u0631\u062A\u064A\u0627\u062D", 'Smiling face with heart-shaped eyes': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u0628\u062A\u0633\u0645 \u0628\u0639\u064A\u0646\u064A\u0646 \u0639\u0644\u0649 \u0634\u0643\u0644 \u0642\u0644\u0628", 'Smiling face with sunglasses': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u0628\u062A\u0633\u0645 \u0645\u0639 \u0627\u0644\u0646\u0638\u0627\u0631\u0627\u062A \u0627\u0644\u0634\u0645\u0633\u064A\u0629", 'Smirking face': "\u0633\u0645\u064A\u0631\u0643\u064A\u0646\u062C \u0627\u0644\u0648\u062C\u0647", 'Neutral face': "\u0645\u062D\u0627\u064A\u062F \u0627\u0644\u0648\u062C\u0647", 'Expressionless face': "\u0648\u062C\u0647 \u0627\u0644\u062A\u0639\u0627\u0628\u064A\u0631", 'Unamused face': "\u0644\u0627 \u0645\u0633\u0644\u064A\u0627 \u0627\u0644\u0648\u062C\u0647", 'Face with cold sweat': "\u0648\u062C\u0647 \u0645\u0639 \u0639\u0631\u0642 \u0628\u0627\u0631\u062F", 'Pensive face': "\u0648\u062C\u0647 \u0645\u062A\u0623\u0645\u0644", 'Confused face': "\u0648\u062C\u0647 \u0627\u0644\u062E\u0644\u0637", 'Confounded face': "\u0648\u062C\u0647 \u0645\u0631\u062A\u0628\u0643", 'Kissing face': "\u062A\u0642\u0628\u064A\u0644 \u0627\u0644\u0648\u062C\u0647", 'Face throwing a kiss': "\u0645\u0648\u0627\u062C\u0647\u0629 \u0631\u0645\u064A \u0642\u0628\u0644\u0629", 'Kissing face with smiling eyes': "\u062A\u0642\u0628\u064A\u0644 \u0648\u062C\u0647 \u0645\u0639 \u0639\u064A\u0648\u0646 \u062A\u0628\u062A\u0633\u0645", 'Kissing face with closed eyes': "\u062A\u0642\u0628\u064A\u0644 \u0648\u062C\u0647 \u0645\u0639 \u0639\u064A\u0648\u0646 \u0645\u063A\u0644\u0642\u0629", 'Face with stuck out tongue': "\u0627\u0644\u0648\u062C\u0647 \u0645\u0639 \u062A\u0645\u0633\u0643 \u0628\u0647\u0627 \u0627\u0644\u0644\u0633\u0627\u0646", 'Face with stuck out tongue and winking eye': "\u0627\u0644\u0648\u062C\u0647 \u0645\u0639 \u062A\u0645\u0633\u0643 \u0628\u0647\u0627 \u0627\u0644\u0644\u0633\u0627\u0646 \u0648\u0627\u0644\u0639\u064A\u0646 \u0627\u0644\u062A\u063A\u0627\u0636\u064A", 'Face with stuck out tongue and tightly-closed eyes': "\u0627\u0644\u0648\u062C\u0647 \u0645\u0639 \u062A\u0645\u0633\u0643 \u0628\u0647\u0627 \u0627\u0644\u0644\u0633\u0627\u0646 \u0648\u0627\u0644\u0639\u064A\u0648\u0646 \u0645\u063A\u0644\u0642\u0629 \u0628\u0623\u062D\u0643\u0627\u0645-", 'Disappointed face': "\u0648\u062C\u0647\u0627 \u062E\u064A\u0628\u0629 \u0623\u0645\u0644", 'Worried face': "\u0648\u062C\u0647\u0627 \u0627\u0644\u0642\u0644\u0642\u0648\u0646", 'Angry face': "\u0648\u062C\u0647 \u063A\u0627\u0636\u0628", 'Pouting face': "\u0627\u0644\u0639\u0628\u0648\u0633 \u0648\u062C\u0647", 'Crying face': "\u0627\u0644\u0628\u0643\u0627\u0621 \u0627\u0644\u0648\u062C\u0647", 'Persevering face': "\u0627\u0644\u0645\u062B\u0627\u0628\u0631\u0629 \u0648\u062C\u0647\u0647", 'Face with look of triumph': "\u0648\u0627\u062C\u0647 \u0645\u0639 \u0646\u0638\u0631\u0629 \u0627\u0646\u062A\u0635\u0627\u0631", 'Disappointed but relieved face': "\u0628\u062E\u064A\u0628\u0629 \u0623\u0645\u0644 \u0648\u0644\u0643\u0646 \u064A\u0639\u0641\u0649 \u0648\u062C\u0647", 'Frowning face with open mouth': "\u0645\u0642\u0637\u0628 \u0627\u0644\u0648\u062C\u0647 \u0645\u0639 \u0641\u062A\u062D \u0627\u0644\u0641\u0645", 'Anguished face': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u0624\u0644\u0645", 'Fearful face': "\u0627\u0644\u0648\u062C\u0647 \u0627\u0644\u0645\u062E\u064A\u0641", 'Weary face': "\u0648\u062C\u0647\u0627 \u0628\u0627\u0644\u0636\u062C\u0631", 'Sleepy face': "\u0648\u062C\u0647 \u0646\u0639\u0633\u0627\u0646", 'Tired face': "\u0648\u062C\u0647 \u0645\u062A\u0639\u0628", 'Grimacing face': "\u0648\u062E\u0631\u062C \u0633\u064A\u0633 \u0627\u0644\u0648\u062C\u0647", 'Loudly crying face': "\u0627\u0644\u0628\u0643\u0627\u0621 \u0628\u0635\u0648\u062A \u0639\u0627\u0644 \u0648\u062C\u0647\u0647", 'Face with open mouth': "\u0648\u0627\u062C\u0647 \u0645\u0639 \u0641\u062A\u062D \u0627\u0644\u0641\u0645", 'Hushed face': "\u0648\u062C\u0647\u0627 \u0627\u0644\u062A\u0643\u062A\u0645", 'Face with open mouth and cold sweat': "\u0648\u0627\u062C\u0647 \u0645\u0639 \u0641\u062A\u062D \u0627\u0644\u0641\u0645 \u0648\u0627\u0644\u0639\u0631\u0642 \u0627\u0644\u0628\u0627\u0631\u062F", 'Face screaming in fear': "\u0648\u0627\u062C\u0647 \u064A\u0635\u0631\u062E \u0641\u064A \u062E\u0648\u0641", 'Astonished face': "\u0648\u062C\u0647\u0627 \u062F\u0647\u0634", 'Flushed face': "\u0627\u062D\u0645\u0631\u0627\u0631 \u0627\u0644\u0648\u062C\u0647", 'Sleeping face': "\u0627\u0644\u0646\u0648\u0645 \u0627\u0644\u0648\u062C\u0647", 'Dizzy face': "\u0648\u062C\u0647\u0627 \u0628\u0627\u0644\u062F\u0648\u0627\u0631", 'Face without mouth': "\u0648\u0627\u062C\u0647 \u062F\u0648\u0646 \u0627\u0644\u0641\u0645", 'Face with medical mask': "\u0648\u0627\u062C\u0647 \u0645\u0639 \u0642\u0646\u0627\u0639 \u0627\u0644\u0637\u0628\u064A\u0629", // Line breaker 'Break': "\u0627\u0644\u0627\u0646\u0642\u0633\u0627\u0645", // Math 'Subscript': "\u0645\u0646\u062E\u0641\u0636", 'Superscript': "\u062D\u0631\u0641 \u0641\u0648\u0642\u064A", // Full screen 'Fullscreen': "\u0643\u0627\u0645\u0644 \u0627\u0644\u0634\u0627\u0634\u0629", // Horizontal line 'Insert Horizontal Line': "\u0625\u062F\u0631\u0627\u062C \u062E\u0637 \u0623\u0641\u0642\u064A", // Clear formatting 'Clear Formatting': "\u0625\u0632\u0627\u0644\u0629 \u0627\u0644\u062A\u0646\u0633\u064A\u0642", // Save 'Save': "\u062D\u0641\u0638", // Undo, redo 'Undo': "\u062A\u0631\u0627\u062C\u0639", 'Redo': "\u0625\u0639\u0627\u062F\u0629", // Select all 'Select All': "\u062A\u062D\u062F\u064A\u062F \u0627\u0644\u0643\u0644", // Code view 'Code View': "\u0639\u0631\u0636 \u0627\u0644\u062A\u0639\u0644\u064A\u0645\u0627\u062A \u0627\u0644\u0628\u0631\u0645\u062C\u064A\u0629", // Quote 'Quote': "\u0627\u0642\u062A\u0628\u0633", 'Increase': "\u0632\u064A\u0627\u062F\u0629", 'Decrease': "\u0627\u0646\u062E\u0641\u0627\u0636", // Quick Insert 'Quick Insert': "\u0625\u062F\u0631\u0627\u062C \u0633\u0631\u064A\u0639", // Spcial Characters 'Special Characters': 'أحرف خاصة', 'Latin': 'لاتينية', 'Greek': 'الإغريقي', 'Cyrillic': 'السيريلية', 'Punctuation': 'علامات ترقيم', 'Currency': 'دقة', 'Arrows': 'السهام', 'Math': 'الرياضيات', 'Misc': 'متفرقات', // Print. 'Print': 'طباعة', // Spell Checker. 'Spell Checker': 'مدقق املائي', // Help 'Help': 'مساعدة', 'Shortcuts': 'اختصارات', 'Inline Editor': 'محرر مضمنة', 'Show the editor': 'عرض المحرر', 'Common actions': 'الإجراءات المشتركة', 'Copy': 'نسخ', 'Cut': 'يقطع', 'Paste': 'معجون', 'Basic Formatting': 'التنسيق الأساسي', 'Increase quote level': 'زيادة مستوى الاقتباس', 'Decrease quote level': 'انخفاض مستوى الاقتباس', 'Image / Video': 'صورة / فيديو', 'Resize larger': 'تغيير حجم أكبر', 'Resize smaller': 'تغيير حجم أصغر', 'Table': 'الطاولة', 'Select table cell': 'حدد خلية الجدول', 'Extend selection one cell': 'توسيع اختيار خلية واحدة', 'Extend selection one row': 'تمديد اختيار صف واحد', 'Navigation': 'التنقل', 'Focus popup / toolbar': 'التركيز المنبثقة / شريط الأدوات', 'Return focus to previous position': 'عودة التركيز إلى الموقف السابق', // Embed.ly 'Embed URL': 'تضمين عنوان ورل', 'Paste in a URL to embed': 'الصق في عنوان ورل لتضمينه', // Word Paste. 'The pasted content is coming from a Microsoft Word document. Do you want to keep the format or clean it up?': 'المحتوى الذي تم لصقه قادم من وثيقة كلمة ميكروسوفت. هل تريد الاحتفاظ بالتنسيق أو تنظيفه؟', 'Keep': 'احتفظ', 'Clean': 'نظيف', 'Word Paste Detected': 'تم اكتشاف معجون الكلمات', // Character Counter 'Characters': 'الشخصيات', // More Buttons 'More Text': 'المزيد من النص', 'More Paragraph': ' المزيد من الفقرة', 'More Rich': ' أكثر ثراء', 'More Misc': ' أكثر متفرقات' }, direction: 'rtl' }; }))); //# sourceMappingURL=ar.js.map<|fim▁end|>
'Manage Images': "\u0625\u062F\u0627\u0631\u0629 \u0627\u0644\u0635\u0648\u0631", 'Loading': "\u062A\u062D\u0645\u064A\u0644", 'Deleting': "\u062D\u0630\u0641",
<|file_name|>plain.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from behave.formatter.base import Formatter class PlainFormatter(Formatter): """ Provides a simple plain formatter without coloring/formatting. In addition, multi-line text and tables are not shown in output (SAD). """ name = 'plain' description = 'Very basic formatter with maximum compatibility' def __init__(self, stream, config): super(PlainFormatter, self).__init__(stream, config) self.steps = [] self.show_timings = config.show_timings def reset_steps(self): self.steps = [] def feature(self, feature): self.reset_steps() self.stream.write(u'%s: %s\n' % (feature.keyword, feature.name)) def background(self, background): self.stream.write(u'%s: %s\n' % (background.keyword, background.name)) def scenario(self, scenario): self.reset_steps() self.stream.write(u'%11s: %s\n' % (scenario.keyword, scenario.name)) def scenario_outline(self, outline): self.reset_steps() self.stream.write(u' %s: %s\n' % (outline.keyword, outline.name)) def step(self, step): self.steps.append(step) def result(self, result): step = self.steps.pop(0) # TODO right-align the keyword to maximum keyword width? self.stream.write(u'%12s %s ... ' % (step.keyword, step.name)) status = result.status if self.show_timings: status += " in %0.2fs" % step.duration <|fim▁hole|> if result.error_message: self.stream.write(u'%s\n%s\n' % (status, result.error_message)) else: self.stream.write(u'%s\n' % status)<|fim▁end|>
<|file_name|>scheduler.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- encoding: utf-8 -*- # vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8: # Author: Binux<[email protected]> # http://binux.me # Created on 2014-02-07 17:05:11 import itertools import json import logging import os import time from collections import deque from six import iteritems, itervalues from six.moves import queue as Queue from pyspider.libs import counter, utils from pyspider.libs.base_handler import BaseHandler from .task_queue import TaskQueue logger = logging.getLogger('scheduler') class Project(object): ''' project for scheduler ''' def __init__(self, scheduler, project_info): ''' ''' self.scheduler = scheduler self.active_tasks = deque(maxlen=scheduler.ACTIVE_TASKS) self.task_queue = TaskQueue() self.task_loaded = False self._selected_tasks = False # selected tasks after recent pause self._send_finished_event_wait = 0 # wait for scheduler.FAIL_PAUSE_NUM loop steps before sending the event self.md5sum = None self._send_on_get_info = False self.waiting_get_info = True self._paused = False self._paused_time = 0 self._unpause_last_seen = None self.update(project_info) @property def paused(self): # unpaused --(last FAIL_PAUSE_NUM task failed)--> paused --(PAUSE_TIME)--> unpause_checking # unpaused <--(last UNPAUSE_CHECK_NUM task have success)--| # paused <--(last UNPAUSE_CHECK_NUM task no success)--| if not self._paused: fail_cnt = 0 for _, task in self.active_tasks: # ignore select task if task.get('type') == self.scheduler.TASK_PACK: continue if 'process' not in task['track']: logger.error('process not in task, %r', task) if task['track']['process']['ok']: break else: fail_cnt += 1 if fail_cnt >= self.scheduler.FAIL_PAUSE_NUM: break if fail_cnt >= self.scheduler.FAIL_PAUSE_NUM: self._paused = True self._paused_time = time.time() elif self._paused is True and (self._paused_time + self.scheduler.PAUSE_TIME < time.time()): self._paused = 'checking' self._unpause_last_seen = self.active_tasks[0][1] if len(self.active_tasks) else None elif self._paused == 'checking': cnt = 0 fail_cnt = 0 for _, task in self.active_tasks: if task is self._unpause_last_seen: break # ignore select task if task.get('type') == self.scheduler.TASK_PACK: continue cnt += 1 if task['track']['process']['ok']: # break with enough check cnt cnt = max(cnt, self.scheduler.UNPAUSE_CHECK_NUM) break else: fail_cnt += 1 if cnt >= self.scheduler.UNPAUSE_CHECK_NUM: if fail_cnt == cnt: self._paused = True self._paused_time = time.time() else: self._paused = False return self._paused is True def update(self, project_info): self.project_info = project_info self.name = project_info['name'] self.group = project_info['group'] self.db_status = project_info['status'] self.updatetime = project_info['updatetime'] md5sum = utils.md5string(project_info['script']) if (self.md5sum != md5sum or self.waiting_get_info) and self.active: self._send_on_get_info = True self.waiting_get_info = True self.md5sum = md5sum if self.active: self.task_queue.rate = project_info['rate'] self.task_queue.burst = project_info['burst'] else: self.task_queue.rate = 0 self.task_queue.burst = 0 logger.info('project %s updated, status:%s, paused:%s, %d tasks', self.name, self.db_status, self.paused, len(self.task_queue)) def on_get_info(self, info): self.waiting_get_info = False self.min_tick = info.get('min_tick', 0) self.retry_delay = info.get('retry_delay', {}) self.crawl_config = info.get('crawl_config', {}) @property def active(self): return self.db_status in ('RUNNING', 'DEBUG') class Scheduler(object): UPDATE_PROJECT_INTERVAL = 5 * 60 default_schedule = { 'priority': 0, 'retries': 3, 'exetime': 0, 'age': -1, 'itag': None, } LOOP_LIMIT = 1000 LOOP_INTERVAL = 0.1 ACTIVE_TASKS = 100 INQUEUE_LIMIT = 0 EXCEPTION_LIMIT = 3 DELETE_TIME = 24 * 60 * 60 DEFAULT_RETRY_DELAY = { 0: 30, 1: 1*60*60, 2: 6*60*60, 3: 12*60*60, '': 24*60*60 } FAIL_PAUSE_NUM = 10 PAUSE_TIME = 5*60 UNPAUSE_CHECK_NUM = 3 TASK_PACK = 1 STATUS_PACK = 2 # current not used REQUEST_PACK = 3 # current not used def __init__(self, taskdb, projectdb, newtask_queue, status_queue, out_queue, data_path='./data', resultdb=None): self.taskdb = taskdb self.projectdb = projectdb self.resultdb = resultdb self.newtask_queue = newtask_queue self.status_queue = status_queue self.out_queue = out_queue self.data_path = data_path self._send_buffer = deque() self._quit = False self._exceptions = 0 self.projects = dict() self._force_update_project = False self._last_update_project = 0 self._last_tick = int(time.time()) self._postpone_request = [] self._cnt = { "5m_time": counter.CounterManager( lambda: counter.TimebaseAverageEventCounter(30, 10)), "5m": counter.CounterManager( lambda: counter.TimebaseAverageWindowCounter(30, 10)), "1h": counter.CounterManager( lambda: counter.TimebaseAverageWindowCounter(60, 60)), "1d": counter.CounterManager( lambda: counter.TimebaseAverageWindowCounter(10 * 60, 24 * 6)), "all": counter.CounterManager( lambda: counter.TotalCounter()), } self._cnt['1h'].load(os.path.join(self.data_path, 'scheduler.1h')) self._cnt['1d'].load(os.path.join(self.data_path, 'scheduler.1d')) self._cnt['all'].load(os.path.join(self.data_path, 'scheduler.all')) self._last_dump_cnt = 0 def _update_projects(self): '''Check project update''' now = time.time() if ( not self._force_update_project and self._last_update_project + self.UPDATE_PROJECT_INTERVAL > now ): return for project in self.projectdb.check_update(self._last_update_project): self._update_project(project) logger.debug("project: %s updated.", project['name']) self._force_update_project = False self._last_update_project = now get_info_attributes = ['min_tick', 'retry_delay', 'crawl_config'] def _update_project(self, project): '''update one project''' if project['name'] not in self.projects: self.projects[project['name']] = Project(self, project) else: self.projects[project['name']].update(project) project = self.projects[project['name']] if project._send_on_get_info: # update project runtime info from processor by sending a _on_get_info # request, result is in status_page.track.save project._send_on_get_info = False self.on_select_task({ 'taskid': '_on_get_info', 'project': project.name, 'url': 'data:,_on_get_info', 'status': self.taskdb.SUCCESS, 'fetch': { 'save': self.get_info_attributes, }, 'process': { 'callback': '_on_get_info', }, }) # load task queue when project is running and delete task_queue when project is stoped if project.active: if not project.task_loaded: self._load_tasks(project) project.task_loaded = True else: if project.task_loaded: project.task_queue = TaskQueue() project.task_loaded = False if project not in self._cnt['all']: self._update_project_cnt(project.name) scheduler_task_fields = ['taskid', 'project', 'schedule', ] def _load_tasks(self, project): '''load tasks from database''' task_queue = project.task_queue for task in self.taskdb.load_tasks( self.taskdb.ACTIVE, project.name, self.scheduler_task_fields ): taskid = task['taskid'] _schedule = task.get('schedule', self.default_schedule) priority = _schedule.get('priority', self.default_schedule['priority']) exetime = _schedule.get('exetime', self.default_schedule['exetime']) task_queue.put(taskid, priority, exetime) project.task_loaded = True logger.debug('project: %s loaded %d tasks.', project.name, len(task_queue)) if project not in self._cnt['all']: self._update_project_cnt(project) self._cnt['all'].value((project.name, 'pending'), len(project.task_queue)) def _update_project_cnt(self, project_name): status_count = self.taskdb.status_count(project_name) self._cnt['all'].value( (project_name, 'success'), status_count.get(self.taskdb.SUCCESS, 0) ) self._cnt['all'].value( (project_name, 'failed'), status_count.get(self.taskdb.FAILED, 0) + status_count.get(self.taskdb.BAD, 0) ) self._cnt['all'].value( (project_name, 'pending'), status_count.get(self.taskdb.ACTIVE, 0) ) def task_verify(self, task): ''' return False if any of 'taskid', 'project', 'url' is not in task dict or project in not in task_queue ''' for each in ('taskid', 'project', 'url', ): if each not in task or not task[each]: logger.error('%s not in task: %.200r', each, task) return False if task['project'] not in self.projects: logger.error('unknown project: %s', task['project']) return False project = self.projects[task['project']] if not project.active: logger.error('project %s not started, please set status to RUNNING or DEBUG', task['project']) return False return True def insert_task(self, task): '''insert task into database''' return self.taskdb.insert(task['project'], task['taskid'], task) def update_task(self, task): '''update task in database''' return self.taskdb.update(task['project'], task['taskid'], task) def put_task(self, task): '''put task to task queue''' _schedule = task.get('schedule', self.default_schedule) self.projects[task['project']].task_queue.put( task['taskid'], priority=_schedule.get('priority', self.default_schedule['priority']), exetime=_schedule.get('exetime', self.default_schedule['exetime']) ) def send_task(self, task, force=True): ''' dispatch task to fetcher out queue may have size limit to prevent block, a send_buffer is used ''' try: self.out_queue.put_nowait(task) except Queue.Full: if force: self._send_buffer.appendleft(task) else: raise def _check_task_done(self): '''Check status queue''' cnt = 0 try: while True: task = self.status_queue.get_nowait() # check _on_get_info result here if task.get('taskid') == '_on_get_info' and 'project' in task and 'track' in task: if task['project'] not in self.projects: continue project = self.projects[task['project']] project.on_get_info(task['track'].get('save') or {}) logger.info( '%s on_get_info %r', task['project'], task['track'].get('save', {}) ) continue elif not self.task_verify(task): continue self.on_task_status(task) cnt += 1 except Queue.Empty: pass return cnt merge_task_fields = ['taskid', 'project', 'url', 'status', 'schedule', 'lastcrawltime'] def _check_request(self): '''Check new task queue''' # check _postpone_request first todo = [] for task in self._postpone_request: if task['project'] not in self.projects: continue if self.projects[task['project']].task_queue.is_processing(task['taskid']): todo.append(task) else: self.on_request(task) self._postpone_request = todo tasks = {} while len(tasks) < self.LOOP_LIMIT: try: task = self.newtask_queue.get_nowait() except Queue.Empty: break if isinstance(task, list): _tasks = task else: _tasks = (task, ) for task in _tasks: if not self.task_verify(task): continue if task['taskid'] in self.projects[task['project']].task_queue: if not task.get('schedule', {}).get('force_update', False): logger.debug('ignore newtask %(project)s:%(taskid)s %(url)s', task) continue if task['taskid'] in tasks: if not task.get('schedule', {}).get('force_update', False): continue tasks[task['taskid']] = task for task in itervalues(tasks): self.on_request(task) return len(tasks) def _check_cronjob(self): """Check projects cronjob tick, return True when a new tick is sended""" now = time.time() self._last_tick = int(self._last_tick) if now - self._last_tick < 1: return False self._last_tick += 1 for project in itervalues(self.projects): if not project.active: continue if project.waiting_get_info: continue if project.min_tick == 0: continue if self._last_tick % int(project.min_tick) != 0: continue self.on_select_task({ 'taskid': '_on_cronjob', 'project': project.name, 'url': 'data:,_on_cronjob', 'status': self.taskdb.SUCCESS, 'fetch': { 'save': { 'tick': self._last_tick, }, }, 'process': { 'callback': '_on_cronjob', }, }) return True request_task_fields = [ 'taskid', 'project', 'url', 'status', 'schedule', 'fetch', 'process', 'track', 'lastcrawltime' ] def _check_select(self): '''Select task to fetch & process''' while self._send_buffer: _task = self._send_buffer.pop() try: # use force=False here to prevent automatic send_buffer append and get exception self.send_task(_task, False) except Queue.Full: self._send_buffer.append(_task) break if self.out_queue.full(): return {} taskids = [] cnt = 0 cnt_dict = dict() limit = self.LOOP_LIMIT for project in itervalues(self.projects): if not project.active: continue # only check project pause when select new tasks, cronjob and new request still working if project.paused: continue if project.waiting_get_info: continue if cnt >= limit: break # task queue task_queue = project.task_queue task_queue.check_update() project_cnt = 0 # check send_buffer here. when not empty, out_queue may blocked. Not sending tasks while cnt < limit and project_cnt < limit / 10: taskid = task_queue.get() if not taskid: break taskids.append((project.name, taskid)) if taskid != 'on_finished': project_cnt += 1 cnt += 1 cnt_dict[project.name] = project_cnt if project_cnt: project._selected_tasks = True project._send_finished_event_wait = 0 # check and send finished event to project if not project_cnt and len(task_queue) == 0 and project._selected_tasks: # wait for self.FAIL_PAUSE_NUM steps to make sure all tasks in queue have been processed if project._send_finished_event_wait < self.FAIL_PAUSE_NUM: project._send_finished_event_wait += 1 else: project._selected_tasks = False project._send_finished_event_wait = 0 self.newtask_queue.put({ 'project': project.name, 'taskid': 'on_finished', 'url': 'data:,on_finished', 'process': { 'callback': 'on_finished', }, "schedule": { "age": 0, "priority": 9, "force_update": True, }, }) for project, taskid in taskids: self._load_put_task(project, taskid) return cnt_dict def _load_put_task(self, project, taskid): try: task = self.taskdb.get_task(project, taskid, fields=self.request_task_fields) except ValueError: logger.error('bad task pack %s:%s', project, taskid) return if not task: return task = self.on_select_task(task) def _print_counter_log(self): # print top 5 active counters keywords = ('pending', 'success', 'retry', 'failed') total_cnt = {} project_actives = [] project_fails = [] for key in keywords: total_cnt[key] = 0 for project, subcounter in iteritems(self._cnt['5m']): actives = 0 for key in keywords: cnt = subcounter.get(key, None) if cnt: cnt = cnt.sum total_cnt[key] += cnt actives += cnt project_actives.append((actives, project)) fails = subcounter.get('failed', None) if fails: project_fails.append((fails.sum, project)) top_2_fails = sorted(project_fails, reverse=True)[:2] top_3_actives = sorted([x for x in project_actives if x[1] not in top_2_fails], reverse=True)[:5 - len(top_2_fails)] log_str = ("in 5m: new:%(pending)d,success:%(success)d," "retry:%(retry)d,failed:%(failed)d" % total_cnt) for _, project in itertools.chain(top_3_actives, top_2_fails): subcounter = self._cnt['5m'][project].to_dict(get_value='sum') log_str += " %s:%d,%d,%d,%d" % (project, subcounter.get('pending', 0), subcounter.get('success', 0), subcounter.get('retry', 0), subcounter.get('failed', 0)) logger.info(log_str) def _dump_cnt(self): '''Dump counters to file''' self._cnt['1h'].dump(os.path.join(self.data_path, 'scheduler.1h')) self._cnt['1d'].dump(os.path.join(self.data_path, 'scheduler.1d')) self._cnt['all'].dump(os.path.join(self.data_path, 'scheduler.all')) def _try_dump_cnt(self): '''Dump counters every 60 seconds''' now = time.time() if now - self._last_dump_cnt > 60: self._last_dump_cnt = now self._dump_cnt() self._print_counter_log() def _check_delete(self): '''Check project delete''' now = time.time() for project in list(itervalues(self.projects)): if project.db_status != 'STOP': continue if now - project.updatetime < self.DELETE_TIME: continue if 'delete' not in self.projectdb.split_group(project.group): continue logger.warning("deleting project: %s!", project.name) del self.projects[project.name] self.taskdb.drop(project.name) self.projectdb.drop(project.name) if self.resultdb: self.resultdb.drop(project.name) for each in self._cnt.values(): del each[project.name] def __len__(self): return sum(len(x.task_queue) for x in itervalues(self.projects)) def quit(self): '''Set quit signal''' self._quit = True # stop xmlrpc server if hasattr(self, 'xmlrpc_server'): self.xmlrpc_ioloop.add_callback(self.xmlrpc_server.stop) self.xmlrpc_ioloop.add_callback(self.xmlrpc_ioloop.stop) def run_once(self): '''comsume queues and feed tasks to fetcher, once''' self._update_projects() self._check_task_done() self._check_request() while self._check_cronjob(): pass self._check_select() self._check_delete() self._try_dump_cnt() def run(self): '''Start scheduler loop''' logger.info("scheduler starting...") while not self._quit: try: time.sleep(self.LOOP_INTERVAL) self.run_once() self._exceptions = 0 except KeyboardInterrupt: break except Exception as e: logger.exception(e) self._exceptions += 1 if self._exceptions > self.EXCEPTION_LIMIT: break continue logger.info("scheduler exiting...") self._dump_cnt() def trigger_on_start(self, project): '''trigger an on_start callback of project''' self.newtask_queue.put({ "project": project, "taskid": "on_start", "url": "data:,on_start", "process": { "callback": "on_start", }, }) def xmlrpc_run(self, port=23333, bind='127.0.0.1', logRequests=False): '''Start xmlrpc interface''' from pyspider.libs.wsgi_xmlrpc import WSGIXMLRPCApplication application = WSGIXMLRPCApplication() application.register_function(self.quit, '_quit') application.register_function(self.__len__, 'size') def dump_counter(_time, _type): try: return self._cnt[_time].to_dict(_type) except: logger.exception('') application.register_function(dump_counter, 'counter') def new_task(task): if self.task_verify(task): self.newtask_queue.put(task) return True return False application.register_function(new_task, 'newtask') def send_task(task): '''dispatch task to fetcher''' self.send_task(task) return True application.register_function(send_task, 'send_task') def update_project(): self._force_update_project = True application.register_function(update_project, 'update_project') def get_active_tasks(project=None, limit=100): allowed_keys = set(( 'type', 'taskid', 'project', 'status', 'url', 'lastcrawltime', 'updatetime', 'track', )) track_allowed_keys = set(( 'ok', 'time', 'follows', 'status_code', )) iters = [iter(x.active_tasks) for k, x in iteritems(self.projects) if x and (k == project if project else True)] tasks = [next(x, None) for x in iters] result = [] while len(result) < limit and tasks and not all(x is None for x in tasks): updatetime, task = t = max(t for t in tasks if t) i = tasks.index(t) tasks[i] = next(iters[i], None) for key in list(task): if key == 'track': for k in list(task[key].get('fetch', [])): if k not in track_allowed_keys: del task[key]['fetch'][k] for k in list(task[key].get('process', [])): if k not in track_allowed_keys: del task[key]['process'][k] if key in allowed_keys: continue del task[key] result.append(t) # fix for "<type 'exceptions.TypeError'>:dictionary key must be string" # have no idea why return json.loads(json.dumps(result)) application.register_function(get_active_tasks, 'get_active_tasks') def get_projects_pause_status(): result = {} for project_name, project in iteritems(self.projects): result[project_name] = project.paused return result application.register_function(get_projects_pause_status, 'get_projects_pause_status') def webui_update(): return { 'pause_status': get_projects_pause_status(), 'counter': { '5m_time': dump_counter('5m_time', 'avg'), '5m': dump_counter('5m', 'sum'), '1h': dump_counter('1h', 'sum'), '1d': dump_counter('1d', 'sum'), 'all': dump_counter('all', 'sum'), }, } application.register_function(webui_update, 'webui_update') import tornado.wsgi import tornado.ioloop import tornado.httpserver container = tornado.wsgi.WSGIContainer(application) self.xmlrpc_ioloop = tornado.ioloop.IOLoop() self.xmlrpc_server = tornado.httpserver.HTTPServer(container, io_loop=self.xmlrpc_ioloop) self.xmlrpc_server.listen(port=port, address=bind) logger.info('scheduler.xmlrpc listening on %s:%s', bind, port) self.xmlrpc_ioloop.start() def on_request(self, task): if self.INQUEUE_LIMIT and len(self.projects[task['project']].task_queue) >= self.INQUEUE_LIMIT: logger.debug('overflow task %(project)s:%(taskid)s %(url)s', task) return oldtask = self.taskdb.get_task(task['project'], task['taskid'], fields=self.merge_task_fields) if oldtask: return self.on_old_request(task, oldtask) else: return self.on_new_request(task) def on_new_request(self, task): '''Called when a new request is arrived''' task['status'] = self.taskdb.ACTIVE self.insert_task(task) self.put_task(task) project = task['project'] self._cnt['5m'].event((project, 'pending'), +1) self._cnt['1h'].event((project, 'pending'), +1) self._cnt['1d'].event((project, 'pending'), +1) self._cnt['all'].event((project, 'pending'), +1) logger.info('new task %(project)s:%(taskid)s %(url)s', task) return task def on_old_request(self, task, old_task): '''Called when a crawled task is arrived''' now = time.time() _schedule = task.get('schedule', self.default_schedule) old_schedule = old_task.get('schedule', {}) if _schedule.get('force_update') and self.projects[task['project']].task_queue.is_processing(task['taskid']): # when a task is in processing, the modify may conflict with the running task. # postpone the modify after task finished. logger.info('postpone modify task %(project)s:%(taskid)s %(url)s', task) self._postpone_request.append(task) return restart = False schedule_age = _schedule.get('age', self.default_schedule['age']) if _schedule.get('itag') and _schedule['itag'] != old_schedule.get('itag'): restart = True elif schedule_age >= 0 and schedule_age + (old_task.get('lastcrawltime', 0) or 0) < now: restart = True elif _schedule.get('force_update'): restart = True if not restart: logger.debug('ignore newtask %(project)s:%(taskid)s %(url)s', task) return if _schedule.get('cancel'): logger.info('cancel task %(project)s:%(taskid)s %(url)s', task) task['status'] = self.taskdb.BAD self.update_task(task) self.projects[task['project']].task_queue.delete(task['taskid']) return task task['status'] = self.taskdb.ACTIVE self.update_task(task) self.put_task(task) project = task['project'] if old_task['status'] != self.taskdb.ACTIVE: self._cnt['5m'].event((project, 'pending'), +1) self._cnt['1h'].event((project, 'pending'), +1) self._cnt['1d'].event((project, 'pending'), +1) if old_task['status'] == self.taskdb.SUCCESS: self._cnt['all'].event((project, 'success'), -1).event((project, 'pending'), +1) elif old_task['status'] == self.taskdb.FAILED: self._cnt['all'].event((project, 'failed'), -1).event((project, 'pending'), +1) logger.info('restart task %(project)s:%(taskid)s %(url)s', task) return task def on_task_status(self, task): '''Called when a status pack is arrived''' try: procesok = task['track']['process']['ok'] if not self.projects[task['project']].task_queue.done(task['taskid']): logging.error('not processing pack: %(project)s:%(taskid)s %(url)s', task) return None except KeyError as e: logger.error("Bad status pack: %s", e) return None if procesok: ret = self.on_task_done(task) else: ret = self.on_task_failed(task) if task['track']['fetch'].get('time'): self._cnt['5m_time'].event((task['project'], 'fetch_time'), task['track']['fetch']['time']) if task['track']['process'].get('time'):<|fim▁hole|> return ret def on_task_done(self, task): '''Called when a task is done and success, called by `on_task_status`''' task['status'] = self.taskdb.SUCCESS task['lastcrawltime'] = time.time() if 'schedule' in task: if task['schedule'].get('auto_recrawl') and 'age' in task['schedule']: task['status'] = self.taskdb.ACTIVE next_exetime = task['schedule'].get('age') task['schedule']['exetime'] = time.time() + next_exetime self.put_task(task) else: del task['schedule'] self.update_task(task) project = task['project'] self._cnt['5m'].event((project, 'success'), +1) self._cnt['1h'].event((project, 'success'), +1) self._cnt['1d'].event((project, 'success'), +1) self._cnt['all'].event((project, 'success'), +1).event((project, 'pending'), -1) logger.info('task done %(project)s:%(taskid)s %(url)s', task) return task def on_task_failed(self, task): '''Called when a task is failed, called by `on_task_status`''' if 'schedule' not in task: old_task = self.taskdb.get_task(task['project'], task['taskid'], fields=['schedule']) if old_task is None: logging.error('unknown status pack: %s' % task) return task['schedule'] = old_task.get('schedule', {}) retries = task['schedule'].get('retries', self.default_schedule['retries']) retried = task['schedule'].get('retried', 0) project_info = self.projects[task['project']] retry_delay = project_info.retry_delay or self.DEFAULT_RETRY_DELAY next_exetime = retry_delay.get(retried, retry_delay.get('', self.DEFAULT_RETRY_DELAY[''])) if task['schedule'].get('auto_recrawl') and 'age' in task['schedule']: next_exetime = min(next_exetime, task['schedule'].get('age')) else: if retried >= retries: next_exetime = -1 elif 'age' in task['schedule'] and next_exetime > task['schedule'].get('age'): next_exetime = task['schedule'].get('age') if next_exetime < 0: task['status'] = self.taskdb.FAILED task['lastcrawltime'] = time.time() self.update_task(task) project = task['project'] self._cnt['5m'].event((project, 'failed'), +1) self._cnt['1h'].event((project, 'failed'), +1) self._cnt['1d'].event((project, 'failed'), +1) self._cnt['all'].event((project, 'failed'), +1).event((project, 'pending'), -1) logger.info('task failed %(project)s:%(taskid)s %(url)s' % task) return task else: task['schedule']['retried'] = retried + 1 task['schedule']['exetime'] = time.time() + next_exetime task['lastcrawltime'] = time.time() self.update_task(task) self.put_task(task) project = task['project'] self._cnt['5m'].event((project, 'retry'), +1) self._cnt['1h'].event((project, 'retry'), +1) self._cnt['1d'].event((project, 'retry'), +1) # self._cnt['all'].event((project, 'retry'), +1) logger.info('task retry %d/%d %%(project)s:%%(taskid)s %%(url)s' % ( retried, retries), task) return task def on_select_task(self, task): '''Called when a task is selected to fetch & process''' # inject informations about project logger.info('select %(project)s:%(taskid)s %(url)s', task) project_info = self.projects.get(task['project']) assert project_info, 'no such project' task['type'] = self.TASK_PACK task['group'] = project_info.group task['project_md5sum'] = project_info.md5sum task['project_updatetime'] = project_info.updatetime # lazy join project.crawl_config if getattr(project_info, 'crawl_config', None): task = BaseHandler.task_join_crawl_config(task, project_info.crawl_config) project_info.active_tasks.appendleft((time.time(), task)) self.send_task(task) return task from tornado import gen class OneScheduler(Scheduler): """ Scheduler Mixin class for one mode overwirted send_task method call processor.on_task(fetcher.fetch(task)) instead of consuming queue """ def _check_select(self): """ interactive mode of select tasks """ if not self.interactive: return super(OneScheduler, self)._check_select() # waiting for running tasks if self.running_task > 0: return is_crawled = [] def run(project=None): return crawl('on_start', project=project) def crawl(url, project=None, **kwargs): """ Crawl given url, same parameters as BaseHandler.crawl url - url or taskid, parameters will be used if in taskdb project - can be ignored if only one project exists. """ # looking up the project instance if project is None: if len(self.projects) == 1: project = list(self.projects.keys())[0] else: raise LookupError('You need specify the project: %r' % list(self.projects.keys())) project_data = self.processor.project_manager.get(project) if not project_data: raise LookupError('no such project: %s' % project) # get task package instance = project_data['instance'] instance._reset() task = instance.crawl(url, **kwargs) if isinstance(task, list): raise Exception('url list is not allowed in interactive mode') # check task in taskdb if not kwargs: dbtask = self.taskdb.get_task(task['project'], task['taskid'], fields=self.request_task_fields) if not dbtask: dbtask = self.taskdb.get_task(task['project'], task['url'], fields=self.request_task_fields) if dbtask: task = dbtask # select the task self.on_select_task(task) is_crawled.append(True) shell.ask_exit() def quit_interactive(): '''Quit interactive mode''' is_crawled.append(True) self.interactive = False shell.ask_exit() def quit_pyspider(): '''Close pyspider''' is_crawled[:] = [] shell.ask_exit() shell = utils.get_python_console() banner = ( 'pyspider shell - Select task\n' 'crawl(url, project=None, **kwargs) - same parameters as BaseHandler.crawl\n' 'quit_interactive() - Quit interactive mode\n' 'quit_pyspider() - Close pyspider' ) if hasattr(shell, 'show_banner'): shell.show_banner(banner) shell.interact() else: shell.interact(banner) if not is_crawled: self.ioloop.add_callback(self.ioloop.stop) def __getattr__(self, name): """patch for crawl(url, callback=self.index_page) API""" if self.interactive: return name raise AttributeError(name) def on_task_status(self, task): """Ignore not processing error in interactive mode""" if not self.interactive: super(OneScheduler, self).on_task_status(task) try: procesok = task['track']['process']['ok'] except KeyError as e: logger.error("Bad status pack: %s", e) return None if procesok: ret = self.on_task_done(task) else: ret = self.on_task_failed(task) if task['track']['fetch'].get('time'): self._cnt['5m_time'].event((task['project'], 'fetch_time'), task['track']['fetch']['time']) if task['track']['process'].get('time'): self._cnt['5m_time'].event((task['project'], 'process_time'), task['track']['process'].get('time')) self.projects[task['project']].active_tasks.appendleft((time.time(), task)) return ret def init_one(self, ioloop, fetcher, processor, result_worker=None, interactive=False): self.ioloop = ioloop self.fetcher = fetcher self.processor = processor self.result_worker = result_worker self.interactive = interactive self.running_task = 0 @gen.coroutine def do_task(self, task): self.running_task += 1 result = yield gen.Task(self.fetcher.fetch, task) type, task, response = result.args self.processor.on_task(task, response) # do with message while not self.processor.inqueue.empty(): _task, _response = self.processor.inqueue.get() self.processor.on_task(_task, _response) # do with results while not self.processor.result_queue.empty(): _task, _result = self.processor.result_queue.get() if self.result_worker: self.result_worker.on_result(_task, _result) self.running_task -= 1 def send_task(self, task, force=True): if self.fetcher.http_client.free_size() <= 0: if force: self._send_buffer.appendleft(task) else: raise self.outqueue.Full self.ioloop.add_future(self.do_task(task), lambda x: x.result()) def run(self): import tornado.ioloop tornado.ioloop.PeriodicCallback(self.run_once, 100, io_loop=self.ioloop).start() self.ioloop.start() def quit(self): self.ioloop.stop() logger.info("scheduler exiting...") import random import threading from pyspider.database.sqlite.sqlitebase import SQLiteMixin class ThreadBaseScheduler(Scheduler): def __init__(self, threads=4, *args, **kwargs): self.local = threading.local() super(ThreadBaseScheduler, self).__init__(*args, **kwargs) if isinstance(self.taskdb, SQLiteMixin): self.threads = 1 else: self.threads = threads self._taskdb = self.taskdb self._projectdb = self.projectdb self._resultdb = self.resultdb self.thread_objs = [] self.thread_queues = [] self._start_threads() assert len(self.thread_queues) > 0 @property def taskdb(self): if not hasattr(self.local, 'taskdb'): self.taskdb = self._taskdb.copy() return self.local.taskdb @taskdb.setter def taskdb(self, taskdb): self.local.taskdb = taskdb @property def projectdb(self): if not hasattr(self.local, 'projectdb'): self.projectdb = self._projectdb.copy() return self.local.projectdb @projectdb.setter def projectdb(self, projectdb): self.local.projectdb = projectdb @property def resultdb(self): if not hasattr(self.local, 'resultdb'): self.resultdb = self._resultdb.copy() return self.local.resultdb @resultdb.setter def resultdb(self, resultdb): self.local.resultdb = resultdb def _start_threads(self): for i in range(self.threads): queue = Queue.Queue() thread = threading.Thread(target=self._thread_worker, args=(queue, )) thread.daemon = True thread.start() self.thread_objs.append(thread) self.thread_queues.append(queue) def _thread_worker(self, queue): while True: method, args, kwargs = queue.get() try: method(*args, **kwargs) except Exception as e: logger.exception(e) def _run_in_thread(self, method, *args, **kwargs): i = kwargs.pop('_i', None) block = kwargs.pop('_block', False) if i is None: while True: for queue in self.thread_queues: if queue.empty(): break else: if block: time.sleep(0.1) continue else: queue = self.thread_queues[random.randint(0, len(self.thread_queues)-1)] break else: queue = self.thread_queues[i % len(self.thread_queues)] queue.put((method, args, kwargs)) if block: self._wait_thread() def _wait_thread(self): while True: if all(queue.empty() for queue in self.thread_queues): break time.sleep(0.1) def _update_project(self, project): self._run_in_thread(Scheduler._update_project, self, project) def on_task_status(self, task): i = hash(task['taskid']) self._run_in_thread(Scheduler.on_task_status, self, task, _i=i) def on_request(self, task): i = hash(task['taskid']) self._run_in_thread(Scheduler.on_request, self, task, _i=i) def _load_put_task(self, project, taskid): i = hash(taskid) self._run_in_thread(Scheduler._load_put_task, self, project, taskid, _i=i) def run_once(self): super(ThreadBaseScheduler, self).run_once() self._wait_thread()<|fim▁end|>
self._cnt['5m_time'].event((task['project'], 'process_time'), task['track']['process'].get('time')) self.projects[task['project']].active_tasks.appendleft((time.time(), task))
<|file_name|>executer.rs<|end_file_name|><|fim▁begin|>use std::error; use std::process; use crate::command::error::CommandError; use crate::config; use crate::output; <|fim▁hole|> config: &'c config::command::ParamsConfig, } impl<'c> Executer<'c> { pub fn from_config(config: &'c config::command::ParamsConfig) -> Self { trace!("command::params::exec::Executer::from_config"); Executer { config: config } } pub async fn run( &self, program: &'c Program<'c>, args: &'c Arguments<'c>, ) -> Result<(), Box<dyn error::Error>> { trace!("command::params::exec::Executer::run"); info!("exec: {} {}", program, args.join(" ")); let params = self.params().await?; let mut cmd = process::Command::new(program); cmd.args(args); for param in params.iter() { if let (Some(name_with_path), Some(value)) = (param.name.as_ref(), param.value.as_ref()) { if let Ok(name) = self.strip_path(name_with_path) { cmd.env(name, value); } } } // TODO: Handle signals let mut child = cmd.spawn()?; let output = child.wait()?; if output.success() { Ok(()) } else { output::PrintLine::error(&format!( "Command exit with status code: {}", output.code().unwrap_or(0) )); Err(Box::new(CommandError::Unknown)) } } } impl<'c> ParamsExecuter for Executer<'c> { fn config(&self) -> &config::command::ParamsConfig { &self.config } }<|fim▁end|>
use super::super::Executer as ParamsExecuter; use super::{Arguments, Program}; pub struct Executer<'c> {
<|file_name|>bin_spike.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Licensed under a 3-clause BSD style license - see LICENSE.rst """ """ import numpy as np from numpy import ma def bin_spike(x, l): """ l is the number of points used for comparison, thus l=2 means that each point will be compared only against the previous and following measurements. l=2 is is probably not a good choice, too small. Maybe use pstsd instead? Dummy way to avoid warnings when x[ini:fin] are all masked. Improve this in the future. """ assert x.ndim == 1, "I'm not ready to deal with multidimensional x" assert l%2 == 0, "l must be an even integer"<|fim▁hole|> bin = ma.masked_all(N) # bin_std = ma.masked_all(N) half_window = int(l/2) idx = (i for i in range(half_window, N - half_window) if np.isfinite(x[i])) for i in idx: ini = max(0, i - half_window) fin = min(N, i + half_window) # At least 3 valid points if ma.compressed(x[ini:fin]).size >= 3: bin[i] = x[i] - ma.median(x[ini:fin]) # bin_std[i] = (np.append(x[ini:i], x[i+1:fin+1])).std() bin[i] /= (np.append(x[ini:i], x[i+1:fin+1])).std() return bin class Bin_Spike(object): def __init__(self, data, varname, cfg, autoflag=True): self.data = data self.varname = varname self.cfg = cfg self.set_features() if autoflag: self.test() def keys(self): return self.features.keys() + \ ["flag_%s" % f for f in self.flags.keys()] def set_features(self): self.features = {'bin_spike': bin_spike(self.data[self.varname], self.cfg['l'])} def test(self): self.flags = {} try: threshold = self.cfg['threshold'] except: print("Deprecated cfg format. It should contain a threshold item.") threshold = self.cfg try: flag_good = self.cfg['flag_good'] flag_bad = self.cfg['flag_bad'] except: print("Deprecated cfg format. It should contain flag_good & flag_bad.") flag_good = 1 flag_bad = 3 assert (np.size(threshold) == 1) and \ (threshold is not None) and \ (np.isfinite(threshold)) flag = np.zeros(self.data[self.varname].shape, dtype='i1') flag[np.nonzero(self.features['bin_spike'] > threshold)] = flag_bad flag[np.nonzero(self.features['bin_spike'] <= threshold)] = flag_good flag[ma.getmaskarray(self.data[self.varname])] = 9 self.flags['bin_spike'] = flag<|fim▁end|>
N = len(x)
<|file_name|>verse_scanner.py<|end_file_name|><|fim▁begin|>"""Parent class and utility class for producing a scansion pattern for a line of Latin verse. Some useful methods * Perform a conservative i to j transformation * Performs elisions * Accents vowels by position * Breaks the line into a list of syllables by calling a Syllabifier class which may be injected into this classes constructor. """ import logging import re from typing import Any, Dict, List import cltk.prosody.lat.string_utils as string_utils from cltk.prosody.lat.metrical_validator import MetricalValidator from cltk.prosody.lat.scansion_constants import ScansionConstants from cltk.prosody.lat.scansion_formatter import ScansionFormatter from cltk.prosody.lat.syllabifier import Syllabifier from cltk.prosody.lat.verse import Verse LOG = logging.getLogger(__name__) LOG.addHandler(logging.NullHandler()) __author__ = ["Todd Cook <[email protected]>"] __license__ = "MIT License" class VerseScanner: """ The scansion symbols used can be configured by passing a suitable constants class to the constructor. """ def __init__( self, constants=ScansionConstants(), syllabifier=Syllabifier(), **kwargs ): self.constants = constants self.remove_punct_map = string_utils.remove_punctuation_dict() self.punctuation_substitutions = string_utils.punctuation_for_spaces_dict() self.metrical_validator = MetricalValidator(constants) self.formatter = ScansionFormatter(constants) self.syllabifier = syllabifier self.inverted_amphibrach_re = re.compile( r"{}\s*{}\s*{}".format( self.constants.STRESSED, self.constants.UNSTRESSED, self.constants.STRESSED, ) ) self.syllable_matcher = re.compile( r"[{}]".format( self.constants.VOWELS + self.constants.ACCENTED_VOWELS + self.constants.LIQUIDS + self.constants.MUTES ) ) def transform_i_to_j(self, line: str) -> str: """ Transform instances of consonantal i to j :param line: :return: >>> print(VerseScanner().transform_i_to_j("iactātus")) jactātus >>> print(VerseScanner().transform_i_to_j("bracchia")) bracchia """ words = line.split(" ") space_list = string_utils.space_list(line) corrected_words = [] for word in words: found = False for prefix in self.constants.PREFIXES: if word.startswith(prefix) and word != prefix: corrected_words.append( self.syllabifier.convert_consonantal_i(prefix) ) corrected_words.append( self.syllabifier.convert_consonantal_i(word[len(prefix) :]) ) found = True break if not found: corrected_words.append(self.syllabifier.convert_consonantal_i(word)) new_line = string_utils.join_syllables_spaces(corrected_words, space_list) char_list = string_utils.overwrite( list(new_line), r"\b[iī][{}]".format( self.constants.VOWELS + self.constants.ACCENTED_VOWELS ), "j", ) char_list = string_utils.overwrite( char_list, r"\b[I][{}]".format(self.constants.VOWELS_WO_I), "J" ) char_list = string_utils.overwrite( char_list, r"[{}][i][{}]".format(self.constants.VOWELS_WO_I, self.constants.VOWELS), "j", 1, ) return "".join(char_list) def transform_i_to_j_optional(self, line: str) -> str: """ Sometimes for the demands of meter a more permissive i to j transformation is warranted. :param line: :return: >>> print(VerseScanner().transform_i_to_j_optional("Italiam")) Italjam >>> print(VerseScanner().transform_i_to_j_optional("Lāvīniaque")) Lāvīnjaque >>> print(VerseScanner().transform_i_to_j_optional("omnium")) omnjum """ words = line.split(" ") space_list = string_utils.space_list(line) corrected_words = [] for word in words: found = False for prefix in self.constants.PREFIXES: if word.startswith(prefix) and word != prefix: corrected_words.append( self.syllabifier.convert_consonantal_i(prefix) ) corrected_words.append( self.syllabifier.convert_consonantal_i(word[len(prefix) :]) ) found = True break if not found: corrected_words.append(self.syllabifier.convert_consonantal_i(word)) new_line = string_utils.join_syllables_spaces(corrected_words, space_list) # the following two may be tunable and subject to improvement char_list = string_utils.overwrite( list(new_line), "[bcdfgjkmpqrstvwxzBCDFGHJKMPQRSTVWXZ][i][{}]".format( self.constants.VOWELS_WO_I ), "j", 1, ) char_list = string_utils.overwrite( char_list, "[{}][iI][{}]".format(self.constants.LIQUIDS, self.constants.VOWELS_WO_I), "j", 1, ) return "".join(char_list) def accent_by_position(self, verse_line: str) -> str: """ Accent vowels according to the rules of scansion. :param verse_line: a line of unaccented verse :return: the same line with vowels accented by position >>> print(VerseScanner().accent_by_position( ... "Arma virumque cano, Troiae qui primus ab oris").lstrip()) Ārma virūmque canō Trojae qui primus ab oris """ line = verse_line.translate(self.punctuation_substitutions) line = self.transform_i_to_j(line) marks = list(line) # locate and save dipthong positions since we don't want them being accented dipthong_positions = [] for dipth in self.constants.DIPTHONGS: if dipth in line: dipthong_positions.append(line.find(dipth)) # Vowels followed by 2 consonants # The digraphs ch, ph, th, qu and sometimes gu and su count as single consonants. # see http://people.virginia.edu/~jdk3t/epicintrog/scansion.htm marks = string_utils.overwrite( marks, "[{}][{}][{}]".format( self.constants.VOWELS, self.constants.CONSONANTS, self.constants.CONSONANTS_WO_H, ), self.constants.STRESSED, ) # one space (or more for 'dropped' punctuation may intervene) marks = string_utils.overwrite( marks, r"[{}][{}]\s*[{}]".format( self.constants.VOWELS, self.constants.CONSONANTS, self.constants.CONSONANTS_WO_H, ), self.constants.STRESSED, ) # ... if both consonants are in the next word, the vowel may be long # .... but it could be short if the vowel is not on the thesis/emphatic part of the foot # ... see Gildersleeve and Lodge p.446 marks = string_utils.overwrite( marks, r"[{}]\s*[{}][{}]".format( self.constants.VOWELS, self.constants.CONSONANTS, self.constants.CONSONANTS_WO_H, ), self.constants.STRESSED, ) # x is considered as two letters marks = string_utils.overwrite( marks, "[{}][xX]".format(self.constants.VOWELS), self.constants.STRESSED ) # z is considered as two letters marks = string_utils.overwrite( marks, r"[{}][zZ]".format(self.constants.VOWELS), self.constants.STRESSED ) original_verse = list(line) for idx, word in enumerate(original_verse): if marks[idx] == self.constants.STRESSED: original_verse[idx] = self.constants.VOWELS_TO_ACCENTS[ original_verse[idx] ] # make sure dipthongs aren't accented for idx in dipthong_positions: if original_verse[idx + 1] in self.constants.ACCENTS_TO_VOWELS: original_verse[idx + 1] = self.constants.ACCENTS_TO_VOWELS[ original_verse[idx + 1] ] return "".join(original_verse) def elide_all(self, line: str) -> str: """ Given a string of space separated syllables, erase with spaces the syllable portions that would disappear according to the rules of elision. :param line: :return: """ marks = list(line.translate(self.remove_punct_map)) all_vowels = self.constants.VOWELS + self.constants.ACCENTED_VOWELS tmp = "".join(marks) # Elision rules are compound but not cummulative: we place all elision edits into a list # of candidates, and then merge, taking the least of each section of the line. candidates = [ tmp, self.elide( tmp, r"[{}][{}]\s+[{}]".format( self.constants.CONSONANTS, all_vowels, all_vowels ), 1, 1, ), self.elide( tmp, r"[{}][{}]\s+[hH]".format(self.constants.CONSONANTS, all_vowels), 1, 1, ), self.elide(tmp, r"[aāuū]m\s+[{}]".format(all_vowels), 2), self.elide(tmp, r"ae\s+[{}]".format(all_vowels), 2), self.elide(tmp, r"[{}]\s+[{}]".format(all_vowels, all_vowels), 1), self.elide(tmp, r"[uū]m\s+h", 2), ] results = string_utils.merge_elisions(candidates) return results def calc_offset(self, syllables_spaces: List[str]) -> Dict[int, int]: """ Calculate a dictionary of accent positions from a list of syllables with spaces. :param syllables_spaces: :return: """ line = string_utils.flatten(syllables_spaces) mydict = {} # type: Dict[int, int] # #defaultdict(int) #type: Dict[int, int] for idx, syl in enumerate(syllables_spaces): target_syllable = syllables_spaces[idx] skip_qu = string_utils.starts_with_qu(target_syllable) matches = list(self.syllable_matcher.finditer(target_syllable)) for position, possible in enumerate(matches): if skip_qu: skip_qu = False continue (start, end) = possible.span() if ( target_syllable[start:end] in self.constants.VOWELS + self.constants.ACCENTED_VOWELS ): part = line[: len("".join(syllables_spaces[:idx]))] offset = len(part) + start if ( line[offset] not in self.constants.VOWELS + self.constants.ACCENTED_VOWELS ): LOG.error("Problem at line {} offset {}".format(line, offset)) mydict[idx] = offset return mydict def produce_scansion( self, stresses: list, syllables_wspaces: List[str], offset_map: Dict[int, int] ) -> str: """ Create a scansion string that has stressed and unstressed syllable positions in locations that correspond with the original texts syllable vowels. :param stresses list of syllable positions :param syllables_wspaces list of syllables with spaces escaped for punctuation or elision :param offset_map dictionary of syllable positions, and an offset amount which is the number of spaces to skip in the original line before inserting the accent. """ scansion = list(" " * len(string_utils.flatten(syllables_wspaces))) unstresses = string_utils.get_unstresses(stresses, len(syllables_wspaces)) try: for idx in unstresses: location = offset_map.get(idx) if location is not None: scansion[location] = self.constants.UNSTRESSED for idx in stresses: location = offset_map.get(idx) if location is not None: scansion[location] = self.constants.STRESSED except Exception as e: LOG.error( "problem with syllables; check syllabification {}, {}".format( syllables_wspaces, e ) ) return "".join(scansion) def flag_dipthongs(self, syllables: List[str]) -> List[int]: """ Return a list of syllables that contain a dipthong :param syllables: :return: """ long_positions = [] for idx, syl in enumerate(syllables): for dipthong in self.constants.DIPTHONGS: if dipthong in syllables[idx]: if not string_utils.starts_with_qu(syllables[idx]): long_positions.append(idx) return long_positions def elide(self, line: str, regexp: str, quantity: int = 1, offset: int = 0) -> str: """ Erase a section of a line, matching on a regex, pushing in a quantity of blank spaces, and jumping forward with an offset if necessary. If the elided vowel was strong, the vowel merged with takes on the stress. :param line: :param regexp: :param quantity: :param offset: :return: >>> print(VerseScanner().elide("uvae avaritia", r"[e]\s*[a]")) uv āvaritia >>> print(VerseScanner().elide("mare avaritia", r"[e]\s*[a]")) mar avaritia """ matcher = re.compile(regexp) positions = matcher.finditer(line) new_line = line for match in positions: (start, end) = match.span() # pylint: disable=unused-variable if (start > 0) and new_line[ start - 1 : start + 1 ] in self.constants.DIPTHONGS: vowel_to_coerce = new_line[end - 1] new_line = ( new_line[: (start - 1) + offset] + (" " * (quantity + 2)) + self.constants.stress_accent_dict[vowel_to_coerce] + new_line[end:] ) else: new_line = ( new_line[: start + offset] + (" " * quantity) + new_line[start + quantity + offset :] ) return new_line def correct_invalid_start(self, scansion: str) -> str: """ If a hexameter, hendecasyllables, or pentameter scansion starts with spondee, an unstressed syllable in the third position must actually be stressed, so we will convert it: - - | U -> - - | - :param scansion: :return: >>> print(VerseScanner().correct_invalid_start( ... " - - U U - - U U U U U U - -").strip()) - - - - - - U U U U U U - - """ mark_list = string_utils.mark_list(scansion) raw_scansion = scansion.replace(" ", "")<|fim▁hole|> corrected = "".join(new_scansion) new_sequence = list(" " * len(scansion)) for idx, car in enumerate(corrected): new_sequence[mark_list[idx]] = car return "".join(new_sequence) return scansion def correct_first_two_dactyls(self, scansion: str) -> str: """ If a hexameter or pentameter starts with spondee, an unstressed syllable in the third position must actually be stressed, so we will convert it: - - | U -> - - | - And/or if the starting pattern is spondee + trochee + stressed, then the unstressed trochee can be corrected: - - | - u | - -> - - | - -| - :param scansion: :return: >>> print(VerseScanner().correct_first_two_dactyls( ... " - - U U - - U U U U U U - -")) # doctest: +NORMALIZE_WHITESPACE - - - - - - U U U U U U - - """ mark_list = string_utils.mark_list(scansion) new_line = self.correct_invalid_start(scansion) raw_scansion = new_line.replace(" ", "") if raw_scansion.startswith( self.constants.SPONDEE + self.constants.TROCHEE + self.constants.STRESSED ): new_scansion = list( self.constants.SPONDEE + self.constants.SPONDEE + self.constants.STRESSED + raw_scansion[5:] ) corrected = "".join(new_scansion) new_sequence = list(" " * len(scansion)) for idx, car in enumerate(corrected): new_sequence[mark_list[idx]] = car return "".join(new_sequence) return new_line def assign_candidate(self, verse: Verse, candidate: str) -> Verse: """ Helper method; make sure that the verse object is properly packaged. :param verse: :param candidate: :return: """ verse.scansion = candidate verse.valid = True verse.accented = self.formatter.merge_line_scansion( verse.original, verse.scansion ) return verse<|fim▁end|>
if raw_scansion.startswith(self.constants.SPONDEE + self.constants.UNSTRESSED): new_scansion = list( self.constants.SPONDEE + self.constants.SPONDEE + raw_scansion[4:] )
<|file_name|>resource_scale_factors.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Presubmit script for Chromium browser resources. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details about the presubmit API built into depot_tools, and see http://www.chromium.org/developers/web-development-style-guide for the rules we're checking against here. """ import os import struct class InvalidPNGException(Exception): pass class ResourceScaleFactors(object): """Verifier of image dimensions for Chromium resources. This class verifies the image dimensions of resources in the various resource subdirectories. Attributes: paths: An array of tuples giving the folders to check and their relevant scale factors. For example: [(100, 'default_100_percent'), (200, 'default_200_percent')] """ def __init__(self, input_api, output_api, paths): """ Initializes ResourceScaleFactors with paths.""" self.input_api = input_api self.output_api = output_api self.paths = paths def RunChecks(self): """Verifies the scale factors of resources being added or modified. <|fim▁hole|> An array of presubmit errors if any images were detected not having the correct dimensions. """ def ImageSize(filename): with open(filename, 'rb', buffering=0) as f: data = f.read(24) if data[:8] != '\x89PNG\r\n\x1A\n' or data[12:16] != 'IHDR': raise InvalidPNGException return struct.unpack('>ii', data[16:24]) # Returns a list of valid scaled image sizes. The valid sizes are the # floor and ceiling of (base_size * scale_percent / 100). This is equivalent # to requiring that the actual scaled size is less than one pixel away from # the exact scaled size. def ValidSizes(base_size, scale_percent): return sorted(set([(base_size * scale_percent) / 100, (base_size * scale_percent + 99) / 100])) repository_path = self.input_api.os_path.relpath( self.input_api.PresubmitLocalPath(), self.input_api.change.RepositoryRoot()) results = [] # Check for affected files in any of the paths specified. affected_files = self.input_api.AffectedFiles(include_deletes=False) files = [] for f in affected_files: for path_spec in self.paths: path_root = self.input_api.os_path.join( repository_path, path_spec[1]) if (f.LocalPath().endswith('.png') and f.LocalPath().startswith(path_root)): # Only save the relative path from the resource directory. relative_path = self.input_api.os_path.relpath(f.LocalPath(), path_root) if relative_path not in files: files.append(relative_path) corrupt_png_error = ('Corrupt PNG in file %s. Note that binaries are not ' 'correctly uploaded to the code review tool and must be directly ' 'submitted using the dcommit command.') for f in files: base_image = self.input_api.os_path.join(self.paths[0][1], f) if not os.path.exists(base_image): results.append(self.output_api.PresubmitError( 'Base image %s does not exist' % self.input_api.os_path.join( repository_path, base_image))) continue try: base_dimensions = ImageSize(base_image) except InvalidPNGException: results.append(self.output_api.PresubmitError(corrupt_png_error % self.input_api.os_path.join(repository_path, base_image))) continue # Find all scaled versions of the base image and verify their sizes. for i in range(1, len(self.paths)): image_path = self.input_api.os_path.join(self.paths[i][1], f) if not os.path.exists(image_path): continue # Ensure that each image for a particular scale factor is the # correct scale of the base image. try: scaled_dimensions = ImageSize(image_path) except InvalidPNGException: results.append(self.output_api.PresubmitError(corrupt_png_error % self.input_api.os_path.join(repository_path, image_path))) continue for dimension_name, base_size, scaled_size in zip( ('width', 'height'), base_dimensions, scaled_dimensions): valid_sizes = ValidSizes(base_size, self.paths[i][0]) if scaled_size not in valid_sizes: results.append(self.output_api.PresubmitError( 'Image %s has %s %d, expected to be %s' % ( self.input_api.os_path.join(repository_path, image_path), dimension_name, scaled_size, ' or '.join(map(str, valid_sizes))))) return results<|fim▁end|>
Returns:
<|file_name|>treeplot.js<|end_file_name|><|fim▁begin|>var margin = {top: 0, right: 0, bottom: 0, left: 130}, width = 1500 - margin.right - margin.left, height = 470 - margin.top - margin.bottom; var i = 0, duration = 750, root; var tree = d3.layout.tree() .size([height, width]); var diagonal = d3.svg.diagonal() .projection(function(d) { return [d.y, d.x]; }); var svg = d3.select("#treeplot").append("svg") .attr("width", width + margin.right + margin.left) .attr("height", height + margin.top + margin.bottom) .append("g") .attr("transform", "translate(" + margin.left + "," + margin.top + ")"); d3.json("/pattern_discovery/data?id={{selections.current_dataset}}", function(error, flare) { if (error) throw error; $("#wait").empty(); root = flare; root.x0 = height / 2; root.y0 = 0; function collapse(d) { if (d.children) { d._children = d.children; d._children.forEach(collapse); d.children = null; } } root.children.forEach(collapse); update(root); }); d3.select(self.frameElement).style("height", "800px"); function update(source) { // Compute the new tree layout. var nodes = tree.nodes(root).reverse(), links = tree.links(nodes); // Normalize for fixed-depth. nodes.forEach(function(d) { d.y = d.depth * 300; }); // Update the nodes… var node = svg.selectAll("g.node") .data(nodes, function(d) { return d.id || (d.id = ++i); }); // Enter any new nodes at the parent's previous position. var nodeEnter = node.enter().append("g") .attr("class", "node") .attr("transform", function(d) { return "translate(" + source.y0 + "," + source.x0 + ")"; }) .on("click", click); nodeEnter.append("circle") .attr("r", 1e-6) .style("fill", function(d) { return d._children ? "lightsteelblue" : "#fff"; }); nodeEnter.append("text") .attr("x", function(d) { return d.children || d._children ? -10 : 10; }) .attr("dy", ".35em") .attr("text-anchor", function(d) { return d.children || d._children ? "end" : "start"; }) .text(function(d) { return d.name; }) .style("fill-opacity", 1e-6); // Transition nodes to their new position. var nodeUpdate = node.transition() .duration(duration) .attr("transform", function(d) { return "translate(" + d.y + "," + d.x + ")"; }); <|fim▁hole|> nodeUpdate.select("text") .style("fill-opacity", 1); // Transition exiting nodes to the parent's new position. var nodeExit = node.exit().transition() .duration(duration) .attr("transform", function(d) { return "translate(" + source.y + "," + source.x + ")"; }) .remove(); nodeExit.select("circle") .attr("r", 1e-6); nodeExit.select("text") .style("fill-opacity", 1e-6); // Update the links… var link = svg.selectAll("path.link") .data(links, function(d) { return d.target.id; }); // Enter any new links at the parent's previous position. link.enter().insert("path", "g") .attr("class", "link") .attr("d", function(d) { var o = {x: source.x0, y: source.y0}; return diagonal({source: o, target: o}); }); // Transition links to their new position. link.transition() .duration(duration) .attr("d", diagonal); // Transition exiting nodes to the parent's new position. link.exit().transition() .duration(duration) .attr("d", function(d) { var o = {x: source.x, y: source.y}; return diagonal({source: o, target: o}); }) .remove(); // Stash the old positions for transition. nodes.forEach(function(d) { d.x0 = d.x; d.y0 = d.y; }); } // Toggle children on click. function click(d) { if (d.children) { d._children = d.children; d.children = null; } else { d.children = d._children; d._children = null; } update(d); }<|fim▁end|>
nodeUpdate.select("circle") .attr("r", 4.5) .style("fill", function(d) { return d._children ? "lightsteelblue" : "#fff"; });
<|file_name|>graph_loader.py<|end_file_name|><|fim▁begin|>""" Provided code for Application portion of Module 1 Imports physics citation graph """ ################################### # Code for loading citation graph CITATION_URL = "phys-cite_graph.txt" def load_graph(graph_url): """ Function that loads a graph given the URL<|fim▁hole|> Returns a dictionary that models a graph """ graph_file = open(graph_url) graph_text = graph_file.read() graph_lines = graph_text.split('\n') graph_lines = graph_lines[ : -1] print "Loaded graph with", len(graph_lines), "nodes" answer_graph = {} for line in graph_lines: neighbors = line.split(' ') node = int(neighbors[0]) answer_graph[node] = set([]) for neighbor in neighbors[1 : -1]: answer_graph[node].add(int(neighbor)) return answer_graph citation_graph = load_graph(CITATION_URL)<|fim▁end|>
for a text representation of the graph
<|file_name|>elasticsearch_new.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2014 Michael Malocha <[email protected]> # # Expanded from the work by Julien Duponchelle <[email protected]>. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Elastic Search Pipeline for scrappy expanded with support for multiple items""" from pyes import ES import hashlib import types import json class ElasticSearchPipeline(object): settings = None es = None @classmethod def from_crawler(cls, crawler): ext = cls() ext.settings = crawler.settings basic_auth = {} if ext.settings['ELASTICSEARCH_USERNAME']: basic_auth['username'] = ext.settings['ELASTICSEARCH_USERNAME'] if ext.settings['ELASTICSEARCH_PASSWORD']: basic_auth['password'] = ext.settings['ELASTICSEARCH_PASSWORD'] if ext.settings['ELASTICSEARCH_PORT']: uri = "%s:%d" % (ext.settings['ELASTICSEARCH_SERVER'], ext.settings['ELASTICSEARCH_PORT']) else: uri = "%s" % (ext.settings['ELASTICSEARCH_SERVER']) if ext.settings['ELASTICSEARCH_MAPPING']: mapping = json.loads(ext.settings['ELASTICSEARCH_MAPPING']) ext.es = ES([uri], basic_auth=basic_auth) return ext def open_spider(self, spider): def index_item(self, item, spider): if self.settings.get('ELASTICSEARCH_UNIQ_KEY'):<|fim▁hole|> local_id = hashlib.sha1(item[uniq_key]).hexdigest() spider.logger.info("Generated unique key %s", local_id) op_type = 'index' else: op_type = 'create' local_id = item['id'] self.es.index(dict(item), self.settings.get('ELASTICSEARCH_INDEX'), self.settings.get('ELASTICSEARCH_TYPE'), id=local_id, op_type=op_type) def process_item(self, item, spider): if isinstance(item, types.GeneratorType) or isinstance(item, types.ListType): for each in item: self.process_item(each, spider) else: self.index_item(item, spider) spider.logger.info("Item sent to Elastic Search %s" % (self.settings.get('ELASTICSEARCH_INDEX'))) return item<|fim▁end|>
uniq_key = self.settings.get('ELASTICSEARCH_UNIQ_KEY')
<|file_name|>ShowRoute.js<|end_file_name|><|fim▁begin|>import React from 'react'; import Helmet from 'react-helmet'; import { Route } from '../../core/router'; import { Model as Waste } from '../../entities/Waste'; import { Deferred } from '../../util/utils'; import NavLink from '../../components/NavLink'; import Progress from 'react-progress-2'; import { PageHeader, Row, Col, Panel, Label } from 'react-bootstrap'; import radio from 'backbone.radio'; const router = radio.channel('router'); export default class WasteShowRoute extends Route { breadcrumb({ params }) { const dfd = new Deferred; const waste = new Waste({ fid: params.wfid }); waste.forSubjectParam(params.fid); waste.fetch({ success: m => dfd.resolve(m.get('title')) }); return dfd.promise; } <|fim▁hole|> this.waste = new Waste({ fid: params.wfid }); this.waste.forSubjectParam(params.fid).expandParam('subtype'); return this.waste.fetch(); } render() { const waste = this.waste.toJSON(); return ( <div> <Helmet title={waste.title} /> <PageHeader>{waste.title}</PageHeader> <Row> <Col md={12}> <ul className="nav menu-nav-pills"> <li> <NavLink to={`/companies/${this.companyFid}/waste/${waste.fid}/edit`} > <i className="fa fa-pencil-square-o" /> Редактировать </NavLink> </li> <li> <a href="javascript:;" onClick={() => { Progress.show(); this.waste.destroy({ success: () => { Progress.hide(); router.request('navigate', `companies/${this.companyFid}`); }, }); }} > <i className="fa fa-ban" aria-hidden="true" /> Удалить </a> </li> </ul> </Col> </Row> <Row> <Col md={12}> <Panel> <h4><Label>Название</Label>{' '} {waste.title} </h4> <h4><Label>Вид отходов</Label>{' '} <NavLink to={`/waste-types/${waste.subtype.fid}`}>{waste.subtype.title}</NavLink> </h4> <h4><Label>Количество</Label>{' '} {waste.amount} т </h4> </Panel> </Col> </Row> </div> ); } }<|fim▁end|>
fetch({ params }) { this.companyFid = params.fid;
<|file_name|>set_version.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2 # -*- coding: iso-8859-1 -*- # WebKOM - a web based LysKOM client # # Copyright (C) 2000 by Peter Åstrand # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; version 2 # of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. <|fim▁hole|>template = open("webkom.spec.template") new = open("webkom.spec", "w") while 1: line = template.readline() if not line: break if line.find("Version:") != -1: line = "Version: " + VERSION + "\n" new.write(line)<|fim▁end|>
from webkom_constants import VERSION
<|file_name|>process.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # process.py # This script consists of all core functions. # Author: Orhan Odabasi (0rh.odabasi[at]gmail.com) import locale import csv import os from PIL import Image import re from collections import Counter def scanDir(path): # scan the path and collect media data for copy process while os.path.exists(path) and os.path.isdir(path): photos_dataset, totalsize, folder_count, videos_dataset = listphotos(path) p_count = len(photos_dataset) p_size = "{:.2f} MB".format(float(totalsize/1000000)) return p_count, p_size, folder_count, photos_dataset, videos_dataset def saveReport(photo_datas, video_datas, target_path): # save summary data to a csv file report_dest_p = os.path.join(target_path, "photo_list.csv") report_dest_v = os.path.join(target_path, "video_list.csv") with open(report_dest_p, "w") as f: w = csv.writer(f, delimiter="\t") w.writerows(photo_datas) f.close() with open(report_dest_v, "w") as f: w = csv.writer(f, delimiter="\t") w.writerows(video_datas) f.close() def listphotos(path): # Listing all files in target directory photos_dataset = [] videos_dataset = [] for root, dirs, files in os.walk(path): for name in files: p_data_list = [] v_data_list = [] # filename name [0] file_name = name # file path [1] file_path = os.path.join(root, file_name)<|fim▁hole|> file_size = os.path.getsize(file_path) try: # date taken [3] date_taken = Image.open(file_path)._getexif()[36867] # year/month/day format required ymd_format = re.match("(\d{4}):(\d{2}):(\d{2})", date_taken) # year taken [4] year = ymd_format.group(1) # month taken [5] month = ymd_format.group(2) # day taken [6] day = ymd_format.group(3) # date info will be our new folder name date_info = "{0}-{1}".format(year, month) except: date_taken = "NOT_FOUND" day = "NOT_FOUND" year = "NOT_FOUND" month = "NOT_FOUND" # destination folder name [7] date_info = "NOT_FOUND" if name.lower().endswith((".jpeg", ".jpg", ".png", ".dng")): p_data_list.extend([file_name, file_path, file_size, date_taken, year, month, day, date_info]) photos_dataset.append(p_data_list) elif name.lower().endswith((".mov", ".mkv", ".mp4", ".3gp", ".wmv", ".avi")): v_data_list.extend([file_name, file_path, file_size, date_taken, year, month, day, date_info]) videos_dataset.append(v_data_list) # total size of photos archive (only jpeg and png files) totalsize = 0 for s in photos_dataset: totalsize += int(s[2]) #total file count dirs = [] for x in photos_dataset: dirs.append(x[7]) foldercount = len(Counter(dirs).most_common()) return photos_dataset, totalsize, foldercount, videos_dataset<|fim▁end|>
# file size [2]
<|file_name|>cooperation.js<|end_file_name|><|fim▁begin|>/** * Created by cin on 1/18/14. */ /** * Created by cin on 1/18/14. */ var _ = require('underscore'), chance = new (require('chance'))(), syBookshelf = require('./base'), User = require('./user'), CoStatus = require('./co-status'), UserCooperation = require('./user-cooperation'), UserCooperations = UserCooperation.Set, Picture = require('./picture'), Pictures = Picture.Set, CoComment = require('./co-comment'), Cooperation, Cooperations, config = require('../config'), tbCooperation = 'cooperations', fkStatus = 'statusid', fkCooperation = 'cooperationid', fkOwner = 'ownerid'; Cooperation = module.exports = syBookshelf.Model.extend({ tableName: tbCooperation, fields: [ 'id', 'name', 'description', 'company', 'avatar', 'statusid', 'ownerid', 'isprivate', 'regdeadline', 'createtime' ], appended: ['user', 'status'], fieldToAssets: { avatar: 'cooperations' }, defaults: function () { return { createtime: new Date() } }, toJSON: function () { var self = this, Model = this.constructor, ret = Model.__super__.toJSON.apply(this, arguments); _.each(this.fieldToAssets, function (type, field) { if (self.get(field) != null) { var file = self.getAssetPath(type); ret[field] = config.toStaticURI(file) + '?t=' + ret[field]; } }); return ret; }, saving: function () { return Cooperation.__super__ .saving.apply(this, arguments); }, usership: function () { return this.hasMany(UserCooperations, fkCooperation); }, fetched: function (model, attrs, options) { return Cooperation.__super__.fetched.apply(this, arguments) .return(model) .call('countComments') .call('countUsership') .call('countPictures') .then(function (cooperation) { return model.related('pictures').fetch(); }) .then(function () { if (!options['detailed']) return; return model.related('cocomments') .query(function (qb) { qb.orderBy('id', 'desc'); }).fetch(); }) }, countUsership: function () { var self = this; return this.usership().fetch() .then(function (userships) { var numUserships = userships.length; return self.data('numUserships', numUserships); }) }, status: function () { return this.belongsTo(CoStatus, fkStatus); }, user: function () { return this.belongsTo(require('./user'), fkOwner); }, cocomments: function () { return this.hasMany(CoComment, 'cooperationid'); }, pictures: function () { return this.hasMany(Picture, 'cooperationid'); }, countComments: function () { var self = this; return this.cocomments().fetch() .then(function (cocomments) { var numComments = cocomments.length; return self.data('numComments', numComments); }); }, countPictures: function () { var self = this; return Pictures.forge().query() .where(fkCooperation, '=', self.id) .count('id') .then(function (d) {<|fim▁hole|> }); } }, { randomForge: function () { var status = _.random(1, 2); return Cooperation.forge({ 'name': chance.word(), 'description': chance.paragraph(), 'ownerid': chance.integer({ min: 1, max: 20 }), 'company': chance.word(), 'avatar': chance.word(), 'statusid': status, 'isprivate': chance.bool(), 'regdeadline': chance.date({ year: 2013 }) }); } }); Cooperations = Cooperation.Set = syBookshelf.Collection.extend({ model: Cooperation, lister: function (req, qb) { var query = req.query; this.qbWhere(qb, req, query, ['id', 'statusid', 'ownerid', 'isprivate'], tbCooperation); if (!req.query['fuzzy']) { this.qbWhere(qb, req, query, ['name', 'company'], tbCooperation); } else { this.qbWhereLike(qb, req, query, ['name', 'description', 'company'], tbCooperation); } } });<|fim▁end|>
return self.data('numPictures', d[0]["count(`id`)"]);
<|file_name|>country_info.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: MIT. See LICENSE # all country info import os, json, frappe from frappe.utils.momentjs import get_all_timezones def get_country_info(country=None): data = get_all() data = frappe._dict(data.get(country, {})) if 'date_format' not in data: data.date_format = "dd-mm-yyyy" if 'time_format' not in data:<|fim▁hole|> data.time_format = "HH:mm:ss" return data def get_all(): with open(os.path.join(os.path.dirname(__file__), "country_info.json"), "r") as local_info: all_data = json.loads(local_info.read()) return all_data @frappe.whitelist() def get_country_timezone_info(): return { "country_info": get_all(), "all_timezones": get_all_timezones() } def get_translated_dict(): from babel.dates import get_timezone, get_timezone_name, Locale translated_dict = {} locale = Locale.parse(frappe.local.lang, sep="-") # timezones for tz in get_all_timezones(): timezone_name = get_timezone_name(get_timezone(tz), locale=locale, width='short') if timezone_name: translated_dict[tz] = timezone_name + ' - ' + tz # country names && currencies for country, info in get_all().items(): country_name = locale.territories.get((info.get("code") or "").upper()) if country_name: translated_dict[country] = country_name currency = info.get("currency") currency_name = locale.currencies.get(currency) if currency_name: translated_dict[currency] = currency_name return translated_dict def update(): with open(os.path.join(os.path.dirname(__file__), "currency_info.json"), "r") as nformats: nformats = json.loads(nformats.read()) all_data = get_all() for country in all_data: data = all_data[country] data["number_format"] = nformats.get(data.get("currency", "default"), nformats.get("default"))["display"] with open(os.path.join(os.path.dirname(__file__), "country_info.json"), "w") as local_info: local_info.write(json.dumps(all_data, indent=1))<|fim▁end|>
<|file_name|>blockchain.js<|end_file_name|><|fim▁begin|>/* globals Promise:true */ var _ = require('lodash') var EventEmitter = require('events').EventEmitter var inherits = require('util').inherits var LRU = require('lru-cache') var Promise = require('bluebird') var Snapshot = require('./snapshot') var errors = require('../errors') var util = require('../util') /** * @event Blockchain#error * @param {Error} error */ /** * @event Blockchain#syncStart */ /** * @event Blockchain#syncStop */ /** * @event Blockchain#newBlock * @param {string} hash * @param {number} height */ /** * @event Blockchain#touchAddress * @param {string} address */ /** * @class Blockchain * @extends events.EventEmitter * * @param {Connector} connector * @param {Object} [opts] * @param {string} [opts.networkName=livenet] * @param {number} [opts.txCacheSize=100] */ function Blockchain (connector, opts) { var self = this EventEmitter.call(self) opts = _.extend({ networkName: 'livenet', txCacheSize: 100 }, opts) self.connector = connector self.networkName = opts.networkName self.latest = {hash: util.zfill('', 64), height: -1} self._txCache = LRU({max: opts.txCacheSize, allowSlate: true}) self._isSyncing = false<|fim▁hole|> inherits(Blockchain, EventEmitter) Blockchain.prototype._syncStart = function () { if (!this.isSyncing()) this.emit('syncStart') } Blockchain.prototype._syncStop = function () { if (this.isSyncing()) this.emit('syncStop') } /** * @param {errors.Connector} err * @throws {errors.Connector} */ Blockchain.prototype._rethrow = function (err) { var nerr switch (err.name) { case 'ErrorBlockchainJSConnectorHeaderNotFound': nerr = new errors.Blockchain.HeaderNotFound() break case 'ErrorBlockchainJSConnectorTxNotFound': nerr = new errors.Blockchain.TxNotFound() break case 'ErrorBlockchainJSConnectorTxSendError': nerr = new errors.Blockchain.TxSendError() break default: nerr = err break } nerr.message = err.message throw nerr } /** * Return current syncing status * * @return {boolean} */ Blockchain.prototype.isSyncing = function () { return this._isSyncing } /** * @return {Promise<Snapshot>} */ Blockchain.prototype.getSnapshot = function () { return Promise.resolve(new Snapshot(this)) } /** * @abstract * @param {(number|string)} id height or hash * @return {Promise<Connector~HeaderObject>} */ Blockchain.prototype.getHeader = function () { return Promise.reject(new errors.NotImplemented('Blockchain.getHeader')) } /** * @abstract * @param {string} txid * @return {Promise<string>} */ Blockchain.prototype.getTx = function () { return Promise.reject(new errors.NotImplemented('Blockchain.getTx')) } /** * @typedef {Object} Blockchain~TxBlockHashObject * @property {string} source `blocks` or `mempool` * @property {Object} [block] defined only when source is blocks * @property {string} data.hash * @property {number} data.height */ /** * @abstract * @param {string} txid * @return {Promise<Blockchain~TxBlockHashObject>} */ Blockchain.prototype.getTxBlockHash = function () { return Promise.reject(new errors.NotImplemented('Blockchain.getTxBlockHash')) } /** * @abstract * @param {string} rawtx * @return {Promise<string>} */ Blockchain.prototype.sendTx = function () { return Promise.reject(new errors.NotImplemented('Blockchain.sendTx')) } /** * @abstract * @param {string[]} addresses * @param {Object} [opts] * @param {string} [opts.source] `blocks` or `mempool` * @param {(string|number)} [opts.from] `hash` or `height` * @param {(string|number)} [opts.to] `hash` or `height` * @param {string} [opts.status] * @return {Promise<Connector~AddressesQueryObject>} */ Blockchain.prototype.addressesQuery = function () { return Promise.reject(new errors.NotImplemented('Blockchain.addressesQuery')) } /** * @abstract * @param {string} address * @return {Promise} */ Blockchain.prototype.subscribeAddress = function () { return Promise.reject(new errors.NotImplemented('Blockchain.subscribeAddress')) } module.exports = Blockchain<|fim▁end|>
self.on('syncStart', function () { self._isSyncing = true }) self.on('syncStop', function () { self._isSyncing = false }) }
<|file_name|>black-menu.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, Input, Output, EventEmitter } from '@angular/core'; @Component({ selector: 'black-menu-component',<|fim▁hole|> styleUrls: ['./black-menu.component.scss'] }) export class BlackMenuComponent implements OnInit { @Input() private menuInfo; @Output() private blockMenuConfirm: EventEmitter<any> = new EventEmitter(); @Output() private delSingleBlack: EventEmitter<any> = new EventEmitter(); constructor() { // pass } public ngOnInit() { // pass } private blockMenuEmit() { this.blockMenuConfirm.emit(); } private delSingleBlackAction(item) { this.delSingleBlack.emit(item); } }<|fim▁end|>
templateUrl: './black-menu.component.html',
<|file_name|>test_nbpy.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*- from unittest import TestCase from . import formattest from .. import nbpy from .nbexamples import nb0, nb0_py class TestPy(formattest.NBFormatTest, TestCase): nb0_ref = nb0_py ext = 'py' mod = nbpy ignored_keys = ['collapsed', 'outputs', 'prompt_number', 'metadata'] def assertSubset(self, da, db): """assert that da is a subset of db, ignoring self.ignored_keys. Called recursively on containers, ultimately comparing individual elements. """ if isinstance(da, dict): for k,v in da.iteritems(): if k in self.ignored_keys: continue self.assertTrue(k in db) self.assertSubset(v, db[k]) elif isinstance(da, list): for a,b in zip(da, db): self.assertSubset(a,b) else:<|fim▁hole|> da = da.strip('\n') db = db.strip('\n') self.assertEquals(da, db) return True def assertNBEquals(self, nba, nbb): # since roundtrip is lossy, only compare keys that are preserved # assumes nba is read from my file format return self.assertSubset(nba, nbb)<|fim▁end|>
if isinstance(da, basestring) and isinstance(db, basestring): # pyfile is not sensitive to preserving leading/trailing # newlines in blocks through roundtrip
<|file_name|>test_basic.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals import base from misc import GetPageInfo from models import PageIdentifier from category import GetSubcategoryInfos from revisions import GetCurrentContent, GetPageRevisionInfos from meta import GetSourceInfo def test_unicode_title(): get_beyonce = GetCurrentContent("Beyoncé Knowles") assert get_beyonce() def test_coercion_basic(): pid = PageIdentifier(title='Africa', page_id=123, ns=4, source='enwp') get_subcats = GetSubcategoryInfos(pid) assert get_subcats.input_param == 'Category:Africa' def test_web_request(): url = 'http://upload.wikimedia.org/wikipedia/commons/d/d2/Mcgregor.jpg' get_photo = base.WebRequestOperation(url) res = get_photo() text = res[0] assert len(text) == 16408 def test_get_html(): get_africa_html = base.GetPageHTML('Africa') res = get_africa_html() text = res[0] assert len(text) > 350000 def test_missing_revisions(): get_revs = GetPageRevisionInfos('Coffee_lololololol')<|fim▁hole|> ''' Should return 'missing' and negative pageid ''' assert len(rev_list) == 0 def test_get_meta(): get_source_info = GetSourceInfo() meta = get_source_info() assert meta def test_client_passed_to_subops(): # This tests whether the client object given to the initial operation # is passed to its sub-operations. # Use just enough titles to force multiplexing so that we can get # sub ops to test. titles = ['a'] * (base.DEFAULT_QUERY_LIMIT.get_limit() + 1) client = base.MockClient() op = GetPageInfo(titles, client=client) assert id(op.subop_queues[0].peek().client) == id(client)<|fim▁end|>
rev_list = get_revs()
<|file_name|>test_bounce.py<|end_file_name|><|fim▁begin|># Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """Test cases for bounce message generation """ from twisted.trial import unittest from twisted.mail import bounce import cStringIO<|fim▁hole|> """ testcases for bounce message generation """ def testBounceFormat(self): from_, to, s = bounce.generateBounce(cStringIO.StringIO('''\ From: Moshe Zadka <[email protected]> To: [email protected] Subject: test '''), '[email protected]', '[email protected]') self.assertEqual(from_, '') self.assertEqual(to, '[email protected]') emailParser = email.parser.Parser() mess = emailParser.parse(cStringIO.StringIO(s)) self.assertEqual(mess['To'], '[email protected]') self.assertEqual(mess['From'], '[email protected]') self.assertEqual(mess['subject'], 'Returned Mail: see transcript for details') def testBounceMIME(self): pass<|fim▁end|>
import email.message import email.parser class BounceTests(unittest.TestCase):
<|file_name|>main.py<|end_file_name|><|fim▁begin|>#------------------------------------------------------------------------------- # Name: Main.py # Purpose: This script creates chainages from a single or mutile line # # Author: smithc5 # # Created: 10/02/2015 # Copyright: (c) smithc5 2015 # Licence: <your licence> #------------------------------------------------------------------------------ import os import arcpy import sys import traceback from modules import create_chainages<|fim▁hole|> source_align_location = arcpy.GetParameterAsText(0) # Variable to store the location of the original source alignment. database_location = arcpy.GetParameterAsText(1) # Variable to store the location where the database is created to store the. # feature classes. chainage_distance = arcpy.GetParameterAsText(2) new_fc_name = os.path.basename(source_align_location[:-4]) # New name for the copied feature class. Original name minus file extension database_name = "{}.gdb".format(new_fc_name) # Variable to store the name of the .gdb to store the feature classes. DATABASE_FLIEPATH = os.path.join(database_location, database_name) new_fc_filepath = os.path.join(DATABASE_FLIEPATH, new_fc_name) # New file path to the copied feature class new_fc_filepath_with_m = "{0}_M".format(new_fc_filepath) # New file path to the copied feature class chainage_feature_class = "{0}_Chainages".format(new_fc_filepath) # This is the output feature class to store the chainages. def main(): try: create_chainages.check_if_gdb_exist(DATABASE_FLIEPATH) create_chainages.create_gdb(database_location, database_name) create_chainages.copy_features(source_align_location, new_fc_filepath) create_chainages.create_route(new_fc_filepath, "Name", new_fc_filepath_with_m) create_chainages.create_chainages(new_fc_filepath_with_m, chainage_distance, database_location, new_fc_filepath_with_m, DATABASE_FLIEPATH, chainage_feature_class) except: tb = sys.exc_info()[2] tbinfo = traceback.format_tb(tb)[0] pymsg = "PYTHON ERRORS:\nTraceback Info:\n{0}\nError Info:\n {1}: {2}\n".format(tbinfo, str(sys.exc_type), str(sys.exc_value)) msgs = "ARCPY ERRORS:\n{}\n".format(arcpy.GetMessages(2)) arcpy.AddError(msgs) arcpy.AddError(pymsg) print msgs print pymsg arcpy.AddMessage(arcpy.GetMessages(1)) print arcpy.GetMessages(1) if __name__ == '__main__': main()<|fim▁end|>
<|file_name|>flowfield.js<|end_file_name|><|fim▁begin|>/* * Background sketch * Author: Uriel Sade * Date: Feb. 22, 2017 */ var canvas; var time_x, time_y, time_z, time_inc; var field = []; var particles = []; var rows, cols; var scl = 20; function setup() { canvas = createCanvas(windowWidth, windowHeight); canvas.position(0,0);<|fim▁hole|> background(0,0,0,0); rows = 25; scl = floor(height/rows); cols = floor(width/scl); time_inc = 0.2; time_x = time_y = time_z = 0; for(var i = 0; i < 20; i++){ particles[i] = new Particle(); } } function draw(){ background(0,0,0,10); fill(255); // text("by Uriel Sade", width/40, height- height/40); noFill(); field = []; time_y = 0; for(var y = 0; y < rows; y++){ time_x = 0; for(var x = 0; x < cols; x++){ push(); translate(x*scl + scl/2, y*scl + scl/2); var direction_vector = p5.Vector.fromAngle(noise(time_x, time_y, time_z)*2*PI + PI); rotate(direction_vector.heading()); stroke(0,255,0, 7); strokeWeight(1); line(-scl/6,0,scl/6,0); pop(); field[y* cols + x] = direction_vector; time_x += time_inc; } time_y += time_inc; time_z += 0.0002; } updateParticles(); } function updateParticles(){ for(var i = 0; i < particles.length; i++){ particles[i].accelerate(field); } } function windowResized(){ setup(); }<|fim▁end|>
canvas.style('z-value', '-1'); canvas.style('opacity', '0.99');
<|file_name|>mitkAccessByItkTest.cpp<|end_file_name|><|fim▁begin|>/*=================================================================== The Medical Imaging Interaction Toolkit (MITK) Copyright (c) German Cancer Research Center, Division of Medical and Biological Informatics. All rights reserved. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See LICENSE.txt or http://www.mitk.org for details. ===================================================================*/ #include <stdexcept> #include "mitkTestingMacros.h" #include <mitkITKImageImport.h> #include <mitkImageAccessByItk.h> #define TestImageType(type, dim) \ MITK_TEST_CONDITION(typeid(type) == typeid(TPixel) && dim == VDimension, \ "Checking for correct type itk::Image<" #type "," #dim ">") #define TestVectorImageType(type, dim) \ MITK_TEST_CONDITION(typeid(type) == typeid(TPixel) && dim == VDimension && \ typeid(itk::VariableLengthVector<type>) == typeid(typename ImageType::PixelType), \ "Checking for correct type itk::VectorImage<" #type "," #dim ">") class AccessByItkTest { public: typedef AccessByItkTest Self; typedef itk::Image<int, 2> IntImage2D; typedef itk::Image<int, 3> IntImage3D; typedef itk::Image<float, 2> FloatImage2D; typedef itk::Image<float, 3> FloatImage3D; typedef itk::VectorImage<int, 3> IntVectorImage3D; enum EImageType { Unknown = 0, Int2D, Int3D, Float2D, Float3D }; void testAccessByItk() { mitk::Image::Pointer mitkIntImage2D = createMitkImage<IntImage2D>(); mitk::Image::ConstPointer mitkIntImage3D(createMitkImage<IntImage3D>()); mitk::Image::ConstPointer mitkFloatImage2D(createMitkImage<FloatImage2D>()); mitk::Image::Pointer mitkFloatImage3D = createMitkImage<FloatImage3D>(); AccessByItk(mitkIntImage2D, AccessItkImage); AccessByItk(mitkIntImage3D, AccessItkImage); AccessByItk(mitkFloatImage2D, AccessItkImage); AccessByItk(mitkFloatImage3D, AccessItkImage); AccessByItk_n(mitkIntImage2D, AccessItkImage, (Int2D, 2)); AccessByItk_n(mitkIntImage3D, AccessItkImage, (Int3D, 2)); AccessByItk_n(mitkFloatImage2D, AccessItkImage, (Float2D, 2)); AccessByItk_n(mitkFloatImage3D, AccessItkImage, (Float3D, 2)); mitk::Image::Pointer mitkIntVectorImage3D = createMitkImage<IntVectorImage3D>(2); // Test for wrong pixel type (the AccessByItk macro multi-plexes integral // types only by default) MITK_TEST_FOR_EXCEPTION_BEGIN(const mitk::AccessByItkException &) AccessByItk(mitkIntVectorImage3D, AccessItkImage); MITK_TEST_FOR_EXCEPTION_END(const mitk::AccessByItkException &) // Test for correct handling of vector images AccessVectorPixelTypeByItk(mitkIntVectorImage3D, AccessItkImage); AccessVectorPixelTypeByItk_n(mitkIntVectorImage3D, AccessItkImage, (Int3D, 2)); } void testAccessFixedDimensionByItk() { mitk::Image::Pointer mitkIntImage2D = createMitkImage<IntImage2D>(); mitk::Image::ConstPointer mitkIntImage3D(createMitkImage<IntImage3D>()); mitk::Image::ConstPointer mitkFloatImage2D(createMitkImage<FloatImage2D>()); mitk::Image::Pointer mitkFloatImage3D = createMitkImage<FloatImage3D>(); AccessFixedDimensionByItk(mitkIntImage2D, AccessItkImage, 2); AccessFixedDimensionByItk(mitkIntImage3D, AccessItkImage, 3); AccessFixedDimensionByItk(mitkFloatImage2D, AccessItkImage, 2); AccessFixedDimensionByItk(mitkFloatImage3D, AccessItkImage, 3); AccessFixedDimensionByItk_n(mitkIntImage2D, AccessItkImage, 2, (Int2D, 2)); AccessFixedDimensionByItk_n(mitkIntImage3D, AccessItkImage, 3, (Int3D, 2)); AccessFixedDimensionByItk_n(mitkFloatImage2D, AccessItkImage, 2, (Float2D, 2)); AccessFixedDimensionByItk_n(mitkFloatImage3D, AccessItkImage, 3, (Float3D, 2)); // Test for wrong dimension MITK_TEST_FOR_EXCEPTION_BEGIN(const mitk::AccessByItkException &) AccessFixedDimensionByItk(mitkFloatImage3D, AccessItkImage, 2); MITK_TEST_FOR_EXCEPTION_END(const mitk::AccessByItkException &) MITK_TEST_FOR_EXCEPTION_BEGIN(const mitk::AccessByItkException &) AccessFixedDimensionByItk_n(mitkFloatImage3D, AccessItkImage, 2, (Float3D, 2)); MITK_TEST_FOR_EXCEPTION_END(const mitk::AccessByItkException &) } void testAccessFixedPixelTypeByItk() { mitk::Image::Pointer mitkIntImage2D = createMitkImage<IntImage2D>(); mitk::Image::ConstPointer mitkIntImage3D(createMitkImage<IntImage3D>()); mitk::Image::ConstPointer mitkFloatImage2D(createMitkImage<FloatImage2D>()); mitk::Image::Pointer mitkFloatImage3D = createMitkImage<FloatImage3D>(); AccessFixedPixelTypeByItk(mitkIntImage2D, AccessItkImage, (int)(float)); AccessFixedPixelTypeByItk(mitkIntImage3D, AccessItkImage, (int)(float)); AccessFixedPixelTypeByItk(mitkFloatImage2D, AccessItkImage, (int)(float)); AccessFixedPixelTypeByItk(mitkFloatImage3D, AccessItkImage, (int)(float)); AccessFixedPixelTypeByItk_n(mitkIntImage2D, AccessItkImage, (int)(float), (Int2D, 2)); AccessFixedPixelTypeByItk_n(mitkIntImage3D, AccessItkImage, (int)(float), (Int3D, 2)); AccessFixedPixelTypeByItk_n(mitkFloatImage2D, AccessItkImage, (int)(float), (Float2D, 2)); AccessFixedPixelTypeByItk_n(mitkFloatImage3D, AccessItkImage, (int)(float), (Float3D, 2)); // Test for wrong pixel type MITK_TEST_FOR_EXCEPTION_BEGIN(const mitk::AccessByItkException &) AccessFixedPixelTypeByItk(mitkFloatImage3D, AccessItkImage, (int)); MITK_TEST_FOR_EXCEPTION_END(const mitk::AccessByItkException &) MITK_TEST_FOR_EXCEPTION_BEGIN(const mitk::AccessByItkException &) AccessFixedPixelTypeByItk_n(mitkFloatImage3D, AccessItkImage, (int), (Float3D, 2)); MITK_TEST_FOR_EXCEPTION_END(const mitk::AccessByItkException &) } void testAccessFixedTypeByItk() { mitk::Image::Pointer mitkIntImage2D = createMitkImage<IntImage2D>(); mitk::Image::ConstPointer mitkIntImage3D(createMitkImage<IntImage3D>()); mitk::Image::ConstPointer mitkFloatImage2D(createMitkImage<FloatImage2D>()); mitk::Image::Pointer mitkFloatImage3D = createMitkImage<FloatImage3D>(); AccessFixedTypeByItk(mitkIntImage2D, AccessItkImage, (int)(float), (2)(3)); AccessFixedTypeByItk(mitkIntImage3D, AccessItkImage, (int)(float), (2)(3)); AccessFixedTypeByItk(mitkFloatImage2D, AccessItkImage, (int)(float), (2)(3)); AccessFixedTypeByItk(mitkFloatImage3D, AccessItkImage, (int)(float), (2)(3)); AccessFixedTypeByItk_n(mitkIntImage2D, AccessItkImage, (int)(float), (2)(3), (Int2D, 2)); AccessFixedTypeByItk_n(mitkIntImage3D, AccessItkImage, (int)(float), (2)(3), (Int3D, 2)); AccessFixedTypeByItk_n(mitkFloatImage2D, AccessItkImage, (int)(float), (2)(3), (Float2D, 2)); AccessFixedTypeByItk_n(mitkFloatImage3D, AccessItkImage, (int)(float), (2)(3), (Float3D, 2)); // Test for wrong dimension MITK_TEST_FOR_EXCEPTION_BEGIN(const mitk::AccessByItkException &) AccessFixedTypeByItk(mitkFloatImage3D, AccessItkImage, (float), (2)); MITK_TEST_FOR_EXCEPTION_END(const mitk::AccessByItkException &) MITK_TEST_FOR_EXCEPTION_BEGIN(const mitk::AccessByItkException &) AccessFixedTypeByItk_n(mitkFloatImage3D, AccessItkImage, (float), (2), (Float3D, 2)); MITK_TEST_FOR_EXCEPTION_END(const mitk::AccessByItkException &) // Test for wrong pixel type MITK_TEST_FOR_EXCEPTION_BEGIN(const mitk::AccessByItkException &) AccessFixedTypeByItk(mitkFloatImage3D, AccessItkImage, (int), (3)); MITK_TEST_FOR_EXCEPTION_END(const mitk::AccessByItkException &) MITK_TEST_FOR_EXCEPTION_BEGIN(const mitk::AccessByItkException &) AccessFixedTypeByItk_n(mitkFloatImage3D, AccessItkImage, (int), (3), (Float3D, 2)); MITK_TEST_FOR_EXCEPTION_END(const mitk::AccessByItkException &) } void testAccessTwoImagesFixedDimensionByItk() { mitk::Image::Pointer mitkIntImage2D = createMitkImage<IntImage2D>(); mitk::Image::ConstPointer mitkFloatImage2D(createMitkImage<FloatImage2D>()); AccessTwoImagesFixedDimensionByItk(mitkIntImage2D, mitkFloatImage2D, AccessTwoItkImages, 2); } template <typename TPixel, unsigned int VDimension> void AccessItkImage(const itk::Image<TPixel, VDimension> *, EImageType param1 = Unknown, int param2 = 0, int param3 = 0) { switch (param1) { case Int2D: TestImageType(int, 2) break; case Int3D: TestImageType(int, 3) break; case Float2D: TestImageType(float, 2) break; case Float3D: TestImageType(float, 3) break; default: break; } if (param2) { MITK_TEST_CONDITION(param2 == 2, "Checking for correct second parameter") } if (param3) { MITK_TEST_CONDITION(param3 == 3, "Checking for correct third parameter") } } template <typename TPixel, unsigned int VDimension> void AccessItkImage(itk::VectorImage<TPixel, VDimension> *, EImageType param1 = Unknown, int param2 = 0, int param3 = 0) { typedef itk::VectorImage<TPixel, VDimension> ImageType; switch (param1) { case Int2D: TestVectorImageType(int, 2) break; case Int3D: TestVectorImageType(int, 3) break; case Float2D:<|fim▁hole|> default: break; } if (param2) { MITK_TEST_CONDITION(param2 == 2, "Checking for correct second parameter") } if (param3) { MITK_TEST_CONDITION(param3 == 3, "Checking for correct third parameter") } } private: template <typename TPixel1, unsigned int VDimension1, typename TPixel2, unsigned int VDimension2> void AccessTwoItkImages(itk::Image<TPixel1, VDimension1> * /*itkImage1*/, itk::Image<TPixel2, VDimension2> * /*itkImage2*/) { if (!(typeid(int) == typeid(TPixel1) && typeid(float) == typeid(TPixel2) && VDimension1 == 2 && VDimension2 == 2)) { throw std::runtime_error("Image type mismatch"); } } template <typename ImageType> mitk::Image::Pointer createMitkImage() { typename ImageType::Pointer itkImage = ImageType::New(); typename ImageType::IndexType start; start.Fill(0); typename ImageType::SizeType size; size.Fill(3); typename ImageType::RegionType region; region.SetSize(size); region.SetIndex(start); itkImage->SetRegions(region); itkImage->Allocate(); return mitk::GrabItkImageMemory(itkImage); } template <typename ImageType> mitk::Image::Pointer createMitkImage(std::size_t vectorLength) { typename ImageType::Pointer itkImage = ImageType::New(); typename ImageType::IndexType start; start.Fill(0); typename ImageType::SizeType size; size.Fill(3); typename ImageType::RegionType region; region.SetSize(size); region.SetIndex(start); itkImage->SetRegions(region); itkImage->SetVectorLength(vectorLength); itkImage->Allocate(); return mitk::GrabItkImageMemory(itkImage); } }; int mitkAccessByItkTest(int /*argc*/, char * /*argv*/ []) { MITK_TEST_BEGIN("AccessByItk") AccessByItkTest accessTest; MITK_TEST_OUTPUT(<< "Testing AccessByItk macro") accessTest.testAccessByItk(); MITK_TEST_OUTPUT(<< "Testing AccessFixedDimensionByItk macro") accessTest.testAccessFixedDimensionByItk(); MITK_TEST_OUTPUT(<< "Testing AccessFixedTypeByItk macro") accessTest.testAccessFixedTypeByItk(); MITK_TEST_OUTPUT(<< "Testing AccessFixedPixelTypeByItk macro") accessTest.testAccessFixedPixelTypeByItk(); MITK_TEST_OUTPUT(<< "Testing AccessTwoImagesFixedDimensionByItk macro") accessTest.testAccessTwoImagesFixedDimensionByItk(); MITK_TEST_END() }<|fim▁end|>
TestVectorImageType(float, 2) break; case Float3D: TestVectorImageType(float, 3) break;
<|file_name|>TestCase_Web_Baspd.py<|end_file_name|><|fim▁begin|>#coding=utf-8 #author='Shichao-Dong' import unittest import Web_Method_Baspd import Public_Base_Method import requests import time import HTMLTestRunner class ST_Bas_pd(unittest.TestCase): u'商品功能性测试' def setUp(self): global cookie r = Public_Base_Method.login_func("172.31.3.73:6020", "dongshichao", "dong", "a111111") cookie="WQSESSIONID="+"".join(r.cookies["WQSESSIONID"]) print cookie def tearDown(self): pass def test_pd_queryData_all(self): u'查询所有商品' r=Web_Method_Baspd.pd_query(cookie) print r.status_code print r.content self.assertTrue('r.status_code==200',msg='获取商品数据失败') def test_pd_queryData_onsale(self): u'查询在售商品' r=Web_Method_Baspd.pd_query_onsale(cookie) print r.status_code print r.content self.assertTrue('r.status_code==200',msg='获取在售商品失败') def test_pd_queryData_offsale(self): u'查询停售商品' r=Web_Method_Baspd.pd_query_offsale(cookie) print r.status_code<|fim▁hole|> if __name__=="__main__": suite=unittest.TestSuite() suite.addTest(ST_Bas_pd("test_pd_queryData_all")) suite.addTest(ST_Bas_pd("test_pd_queryData_onsale")) suite.addTest(ST_Bas_pd("test_pd_queryData_offsale")) runner=unittest.TextTestRunner() runner.run(suite) # #定义报告路径及名称 # now = time.strftime("%Y-%m-%d-%H-%M-%S",time.localtime(time.time())) # filename=r'D://Ptest/Report/'+now+'result.html' # fp=open(filename,'wb') # runner=HTMLTestRunner.HTMLTestRunner(stream=fp,title='test result',description=u'测试情况:') # # #执行测试 # runner.run(suite) # fp.close()<|fim▁end|>
self.assertTrue('r.status_code==200',msg='获取停售商品失败')
<|file_name|>description.py<|end_file_name|><|fim▁begin|># ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2013, Numenta, Inc. Unless you have purchased from # Numenta, Inc. a separate commercial license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details. #<|fim▁hole|># along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- """ Tests the following set of sequences: z-a-b-c: (1X) a-b-c: (6X) a-d-e: (2X) a-f-g-a-h: (1X) We want to insure that when we see 'a', that we predict 'b' with highest confidence, then 'd', then 'f' and 'h' with equally low confidence. We expect the following prediction scores: inputPredScore_at1 : 0.7 inputPredScore_at2 : 1.0 inputPredScore_at3 : 1.0 inputPredScore_at4 : 1.0 """ from nupic.frameworks.prediction.helpers import importBaseDescription config = dict( sensorVerbosity=0, spVerbosity=0, tpVerbosity=0, ppVerbosity=2, filenameTrain = 'confidence/confidence2.csv', filenameTest = 'confidence/confidence2.csv', iterationCountTrain=None, iterationCountTest=None, trainTPRepeats = 5, trainTP=True, ) mod = importBaseDescription('../base/description.py', config) locals().update(mod.__dict__)<|fim▁end|>
# You should have received a copy of the GNU General Public License
<|file_name|>viewImpl.ts<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ 'use strict'; import { onUnexpectedError } from 'vs/base/common/errors'; import { IDisposable } from 'vs/base/common/lifecycle'; import * as dom from 'vs/base/browser/dom'; import { FastDomNode, createFastDomNode } from 'vs/base/browser/fastDomNode'; import { ICommandService } from 'vs/platform/commands/common/commands'; import { Range } from 'vs/editor/common/core/range'; import { ViewEventHandler } from 'vs/editor/common/viewModel/viewEventHandler'; import { Configuration } from 'vs/editor/browser/config/configuration'; import { TextAreaHandler, ITextAreaHandlerHelper } from 'vs/editor/browser/controller/textAreaHandler'; import { PointerHandler } from 'vs/editor/browser/controller/pointerHandler'; import * as editorBrowser from 'vs/editor/browser/editorBrowser'; import { ViewController, ExecCoreEditorCommandFunc } from 'vs/editor/browser/view/viewController'; import { ViewEventDispatcher } from 'vs/editor/common/view/viewEventDispatcher'; import { ContentViewOverlays, MarginViewOverlays } from 'vs/editor/browser/view/viewOverlays'; import { ViewContentWidgets } from 'vs/editor/browser/viewParts/contentWidgets/contentWidgets'; import { CurrentLineHighlightOverlay } from 'vs/editor/browser/viewParts/currentLineHighlight/currentLineHighlight'; import { CurrentLineMarginHighlightOverlay } from 'vs/editor/browser/viewParts/currentLineMarginHighlight/currentLineMarginHighlight'; import { DecorationsOverlay } from 'vs/editor/browser/viewParts/decorations/decorations'; import { GlyphMarginOverlay } from 'vs/editor/browser/viewParts/glyphMargin/glyphMargin'; import { LineNumbersOverlay } from 'vs/editor/browser/viewParts/lineNumbers/lineNumbers'; import { IndentGuidesOverlay } from 'vs/editor/browser/viewParts/indentGuides/indentGuides'; import { ViewLines } from 'vs/editor/browser/viewParts/lines/viewLines'; import { Margin } from 'vs/editor/browser/viewParts/margin/margin'; import { LinesDecorationsOverlay } from 'vs/editor/browser/viewParts/linesDecorations/linesDecorations'; import { MarginViewLineDecorationsOverlay } from 'vs/editor/browser/viewParts/marginDecorations/marginDecorations'; import { ViewOverlayWidgets } from 'vs/editor/browser/viewParts/overlayWidgets/overlayWidgets'; import { DecorationsOverviewRuler } from 'vs/editor/browser/viewParts/overviewRuler/decorationsOverviewRuler'; import { OverviewRuler } from 'vs/editor/browser/viewParts/overviewRuler/overviewRuler'; import { Rulers } from 'vs/editor/browser/viewParts/rulers/rulers'; import { ScrollDecorationViewPart } from 'vs/editor/browser/viewParts/scrollDecoration/scrollDecoration'; import { SelectionsOverlay } from 'vs/editor/browser/viewParts/selections/selections'; import { ViewCursors } from 'vs/editor/browser/viewParts/viewCursors/viewCursors'; import { ViewZones } from 'vs/editor/browser/viewParts/viewZones/viewZones'; import { ViewPart, PartFingerprint, PartFingerprints } from 'vs/editor/browser/view/viewPart'; import { ViewContext } from 'vs/editor/common/view/viewContext'; import { IViewModel } from 'vs/editor/common/viewModel/viewModel'; import { RenderingContext } from 'vs/editor/common/view/renderingContext'; import { IPointerHandlerHelper } from 'vs/editor/browser/controller/mouseHandler'; import { ViewOutgoingEvents } from 'vs/editor/browser/view/viewOutgoingEvents'; import { ViewportData } from 'vs/editor/common/viewLayout/viewLinesViewportData'; import { EditorScrollbar } from 'vs/editor/browser/viewParts/editorScrollbar/editorScrollbar'; import { Minimap } from 'vs/editor/browser/viewParts/minimap/minimap'; import * as viewEvents from 'vs/editor/common/view/viewEvents'; import { IThemeService, getThemeTypeSelector } from 'vs/platform/theme/common/themeService'; import { Cursor } from 'vs/editor/common/controller/cursor'; import { IMouseEvent } from 'vs/base/browser/mouseEvent'; export interface IContentWidgetData { widget: editorBrowser.IContentWidget; position: editorBrowser.IContentWidgetPosition; } export interface IOverlayWidgetData { widget: editorBrowser.IOverlayWidget; position: editorBrowser.IOverlayWidgetPosition; } export class View extends ViewEventHandler { private eventDispatcher: ViewEventDispatcher; private _scrollbar: EditorScrollbar; private _context: ViewContext; private _cursor: Cursor; // The view lines private viewLines: ViewLines; // These are parts, but we must do some API related calls on them, so we keep a reference private viewZones: ViewZones; private contentWidgets: ViewContentWidgets; private overlayWidgets: ViewOverlayWidgets; private viewCursors: ViewCursors; private viewParts: ViewPart[]; private readonly _textAreaHandler: TextAreaHandler; private readonly pointerHandler: PointerHandler; private readonly outgoingEvents: ViewOutgoingEvents; // Dom nodes private linesContent: FastDomNode<HTMLElement>; public domNode: FastDomNode<HTMLElement>; private overflowGuardContainer: FastDomNode<HTMLElement>; // Actual mutable state private _isDisposed: boolean; private _renderAnimationFrame: IDisposable; constructor( commandService: ICommandService, configuration: Configuration, themeService: IThemeService, model: IViewModel, cursor: Cursor, execCoreEditorCommandFunc: ExecCoreEditorCommandFunc ) { super(); this._isDisposed = false; this._cursor = cursor; this._renderAnimationFrame = null; this.outgoingEvents = new ViewOutgoingEvents(model); let viewController = new ViewController(configuration, model, execCoreEditorCommandFunc, this.outgoingEvents, commandService); // The event dispatcher will always go through _renderOnce before dispatching any events this.eventDispatcher = new ViewEventDispatcher((callback: () => void) => this._renderOnce(callback)); // Ensure the view is the first event handler in order to update the layout this.eventDispatcher.addEventHandler(this); // The view context is passed on to most classes (basically to reduce param. counts in ctors) this._context = new ViewContext(configuration, themeService.getTheme(), model, this.eventDispatcher); this._register(themeService.onThemeChange(theme => { this._context.theme = theme; this.eventDispatcher.emit(new viewEvents.ViewThemeChangedEvent()); this.render(true, false); })); this.viewParts = []; // Keyboard handler this._textAreaHandler = new TextAreaHandler(this._context, viewController, this.createTextAreaHandlerHelper()); this.viewParts.push(this._textAreaHandler); this.createViewParts(); this._setLayout(); // Pointer handler this.pointerHandler = new PointerHandler(this._context, viewController, this.createPointerHandlerHelper()); this._register(model.addEventListener((events: viewEvents.ViewEvent[]) => { this.eventDispatcher.emitMany(events); })); this._register(this._cursor.addEventListener((events: viewEvents.ViewEvent[]) => { this.eventDispatcher.emitMany(events); })); } private createViewParts(): void { // These two dom nodes must be constructed up front, since references are needed in the layout provider (scrolling & co.) this.linesContent = createFastDomNode(document.createElement('div')); this.linesContent.setClassName('lines-content' + ' monaco-editor-background'); this.linesContent.setPosition('absolute'); this.domNode = createFastDomNode(document.createElement('div')); this.domNode.setClassName(this.getEditorClassName()); this.overflowGuardContainer = createFastDomNode(document.createElement('div')); PartFingerprints.write(this.overflowGuardContainer, PartFingerprint.OverflowGuard); this.overflowGuardContainer.setClassName('overflow-guard'); this._scrollbar = new EditorScrollbar(this._context, this.linesContent, this.domNode, this.overflowGuardContainer); this.viewParts.push(this._scrollbar); // View Lines this.viewLines = new ViewLines(this._context, this.linesContent); // View Zones this.viewZones = new ViewZones(this._context); this.viewParts.push(this.viewZones); // Decorations overview ruler let decorationsOverviewRuler = new DecorationsOverviewRuler(this._context); this.viewParts.push(decorationsOverviewRuler); let scrollDecoration = new ScrollDecorationViewPart(this._context); this.viewParts.push(scrollDecoration); let contentViewOverlays = new ContentViewOverlays(this._context); this.viewParts.push(contentViewOverlays); contentViewOverlays.addDynamicOverlay(new CurrentLineHighlightOverlay(this._context)); contentViewOverlays.addDynamicOverlay(new SelectionsOverlay(this._context)); contentViewOverlays.addDynamicOverlay(new DecorationsOverlay(this._context)); contentViewOverlays.addDynamicOverlay(new IndentGuidesOverlay(this._context)); let marginViewOverlays = new MarginViewOverlays(this._context); this.viewParts.push(marginViewOverlays); marginViewOverlays.addDynamicOverlay(new CurrentLineMarginHighlightOverlay(this._context)); marginViewOverlays.addDynamicOverlay(new GlyphMarginOverlay(this._context)); marginViewOverlays.addDynamicOverlay(new MarginViewLineDecorationsOverlay(this._context)); marginViewOverlays.addDynamicOverlay(new LinesDecorationsOverlay(this._context)); marginViewOverlays.addDynamicOverlay(new LineNumbersOverlay(this._context)); let margin = new Margin(this._context); margin.getDomNode().appendChild(this.viewZones.marginDomNode); margin.getDomNode().appendChild(marginViewOverlays.getDomNode()); this.viewParts.push(margin); // Content widgets this.contentWidgets = new ViewContentWidgets(this._context, this.domNode); this.viewParts.push(this.contentWidgets); this.viewCursors = new ViewCursors(this._context); this.viewParts.push(this.viewCursors); // Overlay widgets this.overlayWidgets = new ViewOverlayWidgets(this._context); this.viewParts.push(this.overlayWidgets); let rulers = new Rulers(this._context); this.viewParts.push(rulers); let minimap = new Minimap(this._context); this.viewParts.push(minimap); // -------------- Wire dom nodes up if (decorationsOverviewRuler) { let overviewRulerData = this._scrollbar.getOverviewRulerLayoutInfo(); overviewRulerData.parent.insertBefore(decorationsOverviewRuler.getDomNode(), overviewRulerData.insertBefore); } this.linesContent.appendChild(contentViewOverlays.getDomNode()); this.linesContent.appendChild(rulers.domNode); this.linesContent.appendChild(this.viewZones.domNode); this.linesContent.appendChild(this.viewLines.getDomNode()); this.linesContent.appendChild(this.contentWidgets.domNode); this.linesContent.appendChild(this.viewCursors.getDomNode()); this.overflowGuardContainer.appendChild(margin.getDomNode()); this.overflowGuardContainer.appendChild(this._scrollbar.getDomNode()); this.overflowGuardContainer.appendChild(scrollDecoration.getDomNode()); this.overflowGuardContainer.appendChild(this._textAreaHandler.textArea); this.overflowGuardContainer.appendChild(this._textAreaHandler.textAreaCover); this.overflowGuardContainer.appendChild(this.overlayWidgets.getDomNode()); this.overflowGuardContainer.appendChild(minimap.getDomNode()); this.domNode.appendChild(this.overflowGuardContainer); this.domNode.appendChild(this.contentWidgets.overflowingContentWidgetsDomNode); } private _flushAccumulatedAndRenderNow(): void { this._renderNow(); } private createPointerHandlerHelper(): IPointerHandlerHelper { return { viewDomNode: this.domNode.domNode, linesContentDomNode: this.linesContent.domNode, focusTextArea: () => { this.focus(); }, getLastViewCursorsRenderData: () => { return this.viewCursors.getLastRenderData() || []; }, shouldSuppressMouseDownOnViewZone: (viewZoneId: number) => { return this.viewZones.shouldSuppressMouseDownOnViewZone(viewZoneId); }, shouldSuppressMouseDownOnWidget: (widgetId: string) => { return this.contentWidgets.shouldSuppressMouseDownOnWidget(widgetId); }, getPositionFromDOMInfo: (spanNode: HTMLElement, offset: number) => { this._flushAccumulatedAndRenderNow(); return this.viewLines.getPositionFromDOMInfo(spanNode, offset); }, visibleRangeForPosition2: (lineNumber: number, column: number) => { this._flushAccumulatedAndRenderNow(); let visibleRanges = this.viewLines.visibleRangesForRange2(new Range(lineNumber, column, lineNumber, column)); if (!visibleRanges) { return null; } return visibleRanges[0]; }, getLineWidth: (lineNumber: number) => { this._flushAccumulatedAndRenderNow(); return this.viewLines.getLineWidth(lineNumber); } }; } private createTextAreaHandlerHelper(): ITextAreaHandlerHelper { return { visibleRangeForPositionRelativeToEditor: (lineNumber: number, column: number) => { this._flushAccumulatedAndRenderNow(); let visibleRanges = this.viewLines.visibleRangesForRange2(new Range(lineNumber, column, lineNumber, column)); if (!visibleRanges) { return null; } return visibleRanges[0]; } }; } private _setLayout(): void { const layoutInfo = this._context.configuration.editor.layoutInfo; this.domNode.setWidth(layoutInfo.width); this.domNode.setHeight(layoutInfo.height); this.overflowGuardContainer.setWidth(layoutInfo.width); this.overflowGuardContainer.setHeight(layoutInfo.height); <|fim▁hole|> this.linesContent.setHeight(1000000); } private getEditorClassName() { let focused = this._textAreaHandler.isFocused() ? ' focused' : ''; return this._context.configuration.editor.editorClassName + ' ' + getThemeTypeSelector(this._context.theme.type) + focused; } // --- begin event handlers public onConfigurationChanged(e: viewEvents.ViewConfigurationChangedEvent): boolean { if (e.editorClassName) { this.domNode.setClassName(this.getEditorClassName()); } if (e.layoutInfo) { this._setLayout(); } return false; } public onFocusChanged(e: viewEvents.ViewFocusChangedEvent): boolean { this.domNode.setClassName(this.getEditorClassName()); if (e.isFocused) { this.outgoingEvents.emitViewFocusGained(); } else { this.outgoingEvents.emitViewFocusLost(); } return false; } public onScrollChanged(e: viewEvents.ViewScrollChangedEvent): boolean { this.outgoingEvents.emitScrollChanged(e); return false; } public onThemeChanged(e: viewEvents.ViewThemeChangedEvent): boolean { this.domNode.setClassName(this.getEditorClassName()); return false; } // --- end event handlers public dispose(): void { this._isDisposed = true; if (this._renderAnimationFrame !== null) { this._renderAnimationFrame.dispose(); this._renderAnimationFrame = null; } this.eventDispatcher.removeEventHandler(this); this.outgoingEvents.dispose(); this.pointerHandler.dispose(); this.viewLines.dispose(); // Destroy view parts for (let i = 0, len = this.viewParts.length; i < len; i++) { this.viewParts[i].dispose(); } this.viewParts = []; super.dispose(); } private _renderOnce(callback: () => any): any { let r = safeInvokeNoArg(callback); this._scheduleRender(); return r; } private _scheduleRender(): void { if (this._renderAnimationFrame === null) { this._renderAnimationFrame = dom.runAtThisOrScheduleAtNextAnimationFrame(this._onRenderScheduled.bind(this), 100); } } private _onRenderScheduled(): void { this._renderAnimationFrame = null; this._flushAccumulatedAndRenderNow(); } private _renderNow(): void { safeInvokeNoArg(() => this._actualRender()); } private _getViewPartsToRender(): ViewPart[] { let result: ViewPart[] = [], resultLen = 0; for (let i = 0, len = this.viewParts.length; i < len; i++) { let viewPart = this.viewParts[i]; if (viewPart.shouldRender()) { result[resultLen++] = viewPart; } } return result; } private _actualRender(): void { if (!dom.isInDOM(this.domNode.domNode)) { return; } let viewPartsToRender = this._getViewPartsToRender(); if (!this.viewLines.shouldRender() && viewPartsToRender.length === 0) { // Nothing to render return; } const partialViewportData = this._context.viewLayout.getLinesViewportData(); this._context.model.setViewport(partialViewportData.startLineNumber, partialViewportData.endLineNumber, partialViewportData.centeredLineNumber); let viewportData = new ViewportData( this._cursor.getViewSelections(), partialViewportData, this._context.viewLayout.getWhitespaceViewportData(), this._context.model ); if (this.contentWidgets.shouldRender()) { // Give the content widgets a chance to set their max width before a possible synchronous layout this.contentWidgets.onBeforeRender(viewportData); } if (this.viewLines.shouldRender()) { this.viewLines.renderText(viewportData); this.viewLines.onDidRender(); // Rendering of viewLines might cause scroll events to occur, so collect view parts to render again viewPartsToRender = this._getViewPartsToRender(); } let renderingContext = new RenderingContext(this._context.viewLayout, viewportData, this.viewLines); // Render the rest of the parts for (let i = 0, len = viewPartsToRender.length; i < len; i++) { let viewPart = viewPartsToRender[i]; viewPart.prepareRender(renderingContext); } for (let i = 0, len = viewPartsToRender.length; i < len; i++) { let viewPart = viewPartsToRender[i]; viewPart.render(renderingContext); viewPart.onDidRender(); } } // --- BEGIN CodeEditor helpers public delegateVerticalScrollbarMouseDown(browserEvent: IMouseEvent): void { this._scrollbar.delegateVerticalScrollbarMouseDown(browserEvent); } public getOffsetForColumn(modelLineNumber: number, modelColumn: number): number { let modelPosition = this._context.model.validateModelPosition({ lineNumber: modelLineNumber, column: modelColumn }); let viewPosition = this._context.model.coordinatesConverter.convertModelPositionToViewPosition(modelPosition); this._flushAccumulatedAndRenderNow(); let visibleRanges = this.viewLines.visibleRangesForRange2(new Range(viewPosition.lineNumber, viewPosition.column, viewPosition.lineNumber, viewPosition.column)); if (!visibleRanges) { return -1; } return visibleRanges[0].left; } public getTargetAtClientPoint(clientX: number, clientY: number): editorBrowser.IMouseTarget { return this.pointerHandler.getTargetAtClientPoint(clientX, clientY); } public getInternalEventBus(): ViewOutgoingEvents { return this.outgoingEvents; } public createOverviewRuler(cssClassName: string, minimumHeight: number, maximumHeight: number): OverviewRuler { return new OverviewRuler(this._context, cssClassName, minimumHeight, maximumHeight); } public change(callback: (changeAccessor: editorBrowser.IViewZoneChangeAccessor) => any): boolean { let zonesHaveChanged = false; this._renderOnce(() => { let changeAccessor: editorBrowser.IViewZoneChangeAccessor = { addZone: (zone: editorBrowser.IViewZone): number => { zonesHaveChanged = true; return this.viewZones.addZone(zone); }, removeZone: (id: number): void => { if (!id) { return; } zonesHaveChanged = this.viewZones.removeZone(id) || zonesHaveChanged; }, layoutZone: (id: number): void => { if (!id) { return; } zonesHaveChanged = this.viewZones.layoutZone(id) || zonesHaveChanged; } }; safeInvoke1Arg(callback, changeAccessor); // Invalidate changeAccessor changeAccessor.addZone = null; changeAccessor.removeZone = null; if (zonesHaveChanged) { this._context.viewLayout.onHeightMaybeChanged(); this._context.privateViewEventBus.emit(new viewEvents.ViewZonesChangedEvent()); } }); return zonesHaveChanged; } public render(now: boolean, everything: boolean): void { if (everything) { // Force everything to render... this.viewLines.forceShouldRender(); for (let i = 0, len = this.viewParts.length; i < len; i++) { let viewPart = this.viewParts[i]; viewPart.forceShouldRender(); } } if (now) { this._flushAccumulatedAndRenderNow(); } else { this._scheduleRender(); } } public setAriaActiveDescendant(id: string): void { this._textAreaHandler.setAriaActiveDescendant(id); } public focus(): void { this._textAreaHandler.focusTextArea(); } public isFocused(): boolean { return this._textAreaHandler.isFocused(); } public addContentWidget(widgetData: IContentWidgetData): void { this.contentWidgets.addWidget(widgetData.widget); this.layoutContentWidget(widgetData); this._scheduleRender(); } public layoutContentWidget(widgetData: IContentWidgetData): void { let newPosition = widgetData.position ? widgetData.position.position : null; let newPreference = widgetData.position ? widgetData.position.preference : null; this.contentWidgets.setWidgetPosition(widgetData.widget, newPosition, newPreference); this._scheduleRender(); } public removeContentWidget(widgetData: IContentWidgetData): void { this.contentWidgets.removeWidget(widgetData.widget); this._scheduleRender(); } public addOverlayWidget(widgetData: IOverlayWidgetData): void { this.overlayWidgets.addWidget(widgetData.widget); this.layoutOverlayWidget(widgetData); this._scheduleRender(); } public layoutOverlayWidget(widgetData: IOverlayWidgetData): void { let newPreference = widgetData.position ? widgetData.position.preference : null; let shouldRender = this.overlayWidgets.setWidgetPosition(widgetData.widget, newPreference); if (shouldRender) { this._scheduleRender(); } } public removeOverlayWidget(widgetData: IOverlayWidgetData): void { this.overlayWidgets.removeWidget(widgetData.widget); this._scheduleRender(); } // --- END CodeEditor helpers } function safeInvokeNoArg(func: Function): any { try { return func(); } catch (e) { onUnexpectedError(e); } } function safeInvoke1Arg(func: Function, arg1: any): any { try { return func(arg1); } catch (e) { onUnexpectedError(e); } }<|fim▁end|>
this.linesContent.setWidth(1000000);
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>/// bedrock/main.cpp /// ================= /// Process entry point for Bedrock server. /// #include <dlfcn.h> #include <iostream> #include <signal.h> #include <sys/resource.h> #include <sys/stat.h> #include <bedrockVersion.h> #include <BedrockServer.h> #include <BedrockPlugin.h> #include <plugins/Cache.h> #include <plugins/DB.h> #include <plugins/Jobs.h> #include <plugins/MySQL.h> #include <libstuff/libstuff.h> #include <sqlitecluster/SQLite.h> ///////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////// void RetrySystem(const string& command) { // We might be waiting for some threads to unlink, so retry a few times int numRetries = 3; SINFO("Trying to run '" << command << "' up to " << numRetries << " times..."); while (numRetries--) { // Try it and see if it works int returnCode = system(command.c_str()); if (returnCode) { // Didn't work SWARN("'" << command << "' failed with return code " << returnCode << ", waiting 5s and retrying " << numRetries << " more times"); this_thread::sleep_for(chrono::seconds(5)); } else { // Done! SINFO("Successfully ran '" << command << "'"); return; } } // Didn't work -- fatal error SERROR("Failed to run '" << command << "', aborting."); } ///////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////// void VacuumDB(const string& db) { RetrySystem("sqlite3 " + db + " 'VACUUM;'"); } ///////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////// #define BACKUP_DIR "/var/tmp/" void BackupDB(const string& dbPath) { const string& dbFile = string(basename((char*)dbPath.c_str())); SINFO("Starting " << dbFile << " database backup."); SASSERT(SFileCopy(dbPath, BACKUP_DIR + dbFile)); SINFO("Finished " << dbFile << " database backup."); const string& dbWalPath = dbPath + "-wal"; SINFO("Checking for existence of " << dbWalPath); if (SFileExists(dbWalPath)) { SALERT("WAL file exists for " << dbFile << ". Backing up"); SASSERT(SFileCopy(dbWalPath, BACKUP_DIR + string(basename((char*)dbWalPath.c_str())))); SINFO("Finished " << dbFile << "-wal database backup."); } const string& dbShmPath = dbPath + "-shm"; SINFO("Checking for existence of " << dbShmPath); if (SFileExists(dbShmPath)) { SALERT("SHM file exists for " << dbFile << ". Backing up"); SASSERT(SFileCopy(dbShmPath, BACKUP_DIR + string(basename((char*)dbShmPath.c_str())))); SINFO("Finished " << dbFile << "-shm database backup."); } } set<string> loadPlugins(SData& args) { list<string> plugins = SParseList(args["-plugins"]); // We'll return the names of the plugins we've loaded, which don't necessarily match the file names we're passed. // Those are stored here. set <string> postProcessedNames; // Register all of our built-in plugins. BedrockPlugin::g_registeredPluginList.emplace(make_pair("DB", [](BedrockServer& s){return new BedrockPlugin_DB(s);})); BedrockPlugin::g_registeredPluginList.emplace(make_pair("JOBS", [](BedrockServer& s){return new BedrockPlugin_Jobs(s);})); BedrockPlugin::g_registeredPluginList.emplace(make_pair("CACHE", [](BedrockServer& s){return new BedrockPlugin_Cache(s);})); BedrockPlugin::g_registeredPluginList.emplace(make_pair("MYSQL", [](BedrockServer& s){return new BedrockPlugin_MySQL(s);})); for (string pluginName : plugins) { // If it's one of our standard plugins, just move on to the next one. if (BedrockPlugin::g_registeredPluginList.find(SToUpper(pluginName)) != BedrockPlugin::g_registeredPluginList.end()) { postProcessedNames.emplace(SToUpper(pluginName)); continue; } // Any non-standard plugin is loaded from a shared library. If a name is passed without a trailing '.so', we // will add it, and look for a file with that name. A file should be passed with either a complete absolute // path, or the file should exist in a place that dlopen() can find it (like, /usr/lib). // We look for the 'base name' of the plugin. I.e., the filename excluding a path or extension. We'll look for // a symbol based on this name to call to instantiate our plugin. size_t slash = pluginName.rfind('/'); size_t dot = pluginName.find('.', slash); string name = pluginName.substr(slash + 1, dot - slash - 1); string symbolName = "BEDROCK_PLUGIN_REGISTER_" + SToUpper(name); // Save the base name of the plugin. if(postProcessedNames.find(SToUpper(name)) != postProcessedNames.end()) { SWARN("Duplicate entry for plugin " << name << ", skipping."); continue; } postProcessedNames.insert(SToUpper(name)); // Add the file extension if it's missing. if (!SEndsWith(pluginName, ".so")) { pluginName += ".so"; } // Open the library. void* lib = dlopen(pluginName.c_str(), RTLD_NOW);<|fim▁hole|> SWARN("Error loading bedrock plugin " << pluginName << ": " << dlerror()); } else { void* sym = dlsym(lib, symbolName.c_str()); if (!sym) { SWARN("Couldn't find symbol " << symbolName); } else { // Call the plugin registration function with the same name. BedrockPlugin::g_registeredPluginList.emplace(make_pair(SToUpper(name), (BedrockPlugin*(*)(BedrockServer&))sym)); } } } return postProcessedNames; } ///////////////////////////////////////////////////////////////////////////// int main(int argc, char* argv[]) { // Process the command line SData args = SParseCommandLine(argc, argv); if (args.empty()) { // It's valid to run bedrock with no parameters provided, but unusual // -- let's provide some help just in case cout << "Protip: check syslog for details, or run 'bedrock -?' for help" << endl; } // Initialize the sqlite library before any other code has a chance to do anything with it. // Set the logging callback for sqlite errors. SASSERT(sqlite3_config(SQLITE_CONFIG_LOG, SQLite::_sqliteLogCallback, 0) == SQLITE_OK); // Enable memory-mapped files. int64_t mmapSizeGB = args.isSet("-mmapSizeGB") ? stoll(args["-mmapSizeGB"]) : 0; if (mmapSizeGB) { SINFO("Enabling Memory-Mapped I/O with " << mmapSizeGB << " GB."); const int64_t GB = 1024 * 1024 * 1024; SASSERT(sqlite3_config(SQLITE_CONFIG_MMAP_SIZE, mmapSizeGB * GB, 16 * 1024 * GB) == SQLITE_OK); // Max is 16TB } // Disable a mutex around `malloc`, which is *EXTREMELY IMPORTANT* for multi-threaded performance. Without this // setting, all reads are essentially single-threaded as they'll all fight with each other for this mutex. SASSERT(sqlite3_config(SQLITE_CONFIG_MEMSTATUS, 0) == SQLITE_OK); sqlite3_initialize(); SASSERT(sqlite3_threadsafe()); // Disabled by default, but lets really beat it in. This way checkpointing does not need to wait on locks // created in this thread. SASSERT(sqlite3_enable_shared_cache(0) == SQLITE_OK); // Fork if requested if (args.isSet("-fork")) { // Do the fork int pid = fork(); SASSERT(pid >= 0); if (pid > 0) { // Successful fork -- write the pidfile (if requested) and exit if (args.isSet("-pidfile")) SASSERT(SFileSave(args["-pidfile"], SToStr(pid))); return 0; } // Daemonize // **NOTE: See http://www-theorie.physik.unizh.ch/~dpotter/howto/daemonize umask(0); SASSERT(setsid() >= 0); SASSERT(chdir("/") >= 0); if (!freopen("/dev/null", "r", stdin) || !freopen("/dev/null", "w", stdout) || !freopen("/dev/null", "w", stderr) ) { cout << "Couldn't daemonize." << endl; return -1; } } if (args.isSet("-version")) { // Just output the version cout << VERSION << endl; return 0; } if (args.isSet("-h") || args.isSet("-?") || args.isSet("-help")) { // Ouput very basic documentation cout << "Usage:" << endl; cout << "------" << endl; cout << "bedrock [-? | -h | -help]" << endl; cout << "bedrock -version" << endl; cout << "bedrock [-clean] [-v] [-db <filename>] [-serverHost <host:port>] [-nodeHost <host:port>] [-nodeName " "<name>] [-peerList <list>] [-priority <value>] [-plugins <list>] [-cacheSize <kb>] [-workerThreads <#>] " "[-versionOverride <version>]" << endl; cout << endl; cout << "Common Commands:" << endl; cout << "----------------" << endl; cout << "-?, -h, -help Outputs instructions and exits" << endl; cout << "-version Outputs version and exits" << endl; cout << "-v Enables verbose logging" << endl; cout << "-q Enables quiet logging" << endl; cout << "-clean Recreate a new database from scratch" << endl; cout << "-enableMultiWrite Enable multi-write mode (default: true)" << endl; cout << "-versionOverride <version> Pretends to be a different version when talking to peers" << endl; cout << "-db <filename> Use a database with the given name (default 'bedrock.db')" << endl; cout << "-serverHost <host:port> Listen on this host:port for cluster connections (default 'localhost:8888')" << endl; cout << "-nodeName <name> Name this specfic node in the cluster as indicated (defaults to '" << SGetHostName() << "')" << endl; cout << "-nodeHost <host:port> Listen on this host:port for connections from other nodes" << endl; cout << "-peerList <list> See below" << endl; cout << "-priority <value> See '-peerList Details' below (defaults to 100)" << endl; cout << "-plugins <list> Enable these plugins (defaults to 'db,jobs,cache,mysql')" << endl; cout << "-cacheSize <kb> number of KB to allocate for a page cache (defaults to 1GB)" << endl; cout << "-workerThreads <#> Number of worker threads to start (min 1, defaults to # of cores)" << endl; cout << "-queryLog <filename> Set the query log filename (default 'queryLog.csv', SIGUSR2/SIGQUIT to " "enable/disable)" << endl; cout << "-maxJournalSize <#commits> Number of commits to retain in the historical journal (default 1000000)" << endl; cout << "-synchronous <value> Set the PRAGMA schema.synchronous " "(defaults see https://sqlite.org/pragma.html#pragma_synchronous)" << endl; cout << endl; cout << "Quick Start Tips:" << endl; cout << "-----------------" << endl; cout << "In a hurry? Just run 'bedrock -clean' the first time, and it'll create a new database called " "'bedrock.db', then use all the defaults listed above. (After the first time, leave out the '-clean' " "to reuse the same database.) Once running, you can verify it's working using NetCat to manualy send " "a Ping request as follows:" << endl; cout << endl; cout << "$ bedrock -clean &" << endl; cout << "$ nc local 8888" << endl; cout << "Ping" << endl; cout << endl; cout << "200 OK" << endl; cout << endl; cout << "-peerList Details:" << endl; cout << "------------------" << endl; cout << "The -peerList parameter enables you to configure multiple Bedrock nodes into a redundant cluster. " "Bedrock supports any number of nodes: simply start each node with a comma-separated list of the " "'-nodeHost' of all other nodes. You can safely send any command to any node. Some best practices:" << endl; cout << endl; cout << "- Put each Bedrock node on a different server." << endl; cout << endl; cout << "- Assign each node a different priority (greater than 0). The highest priority node will be the " "'leader', which will coordinate distributed transactions." << endl; cout << endl; return 1; } // Start libstuff. Generally, we want to initialize libstuff immediately on any new thread, but we wait until after // the `fork` above has completed, as we can get strange behaviors from signal handlers across forked processes. SInitialize("main", (args.isSet("-overrideProcessName") ? args["-overrideProcessName"].c_str() : 0)); SLogLevel(LOG_INFO); if (args.isSet("-v")) { // Verbose logging SINFO("Enabling verbose logging"); SLogLevel(LOG_DEBUG); } else if (args.isSet("-q")) { // Quiet logging SLogLevel(LOG_WARNING); } // Set the defaults #define SETDEFAULT(_NAME_, _VAL_) \ do { \ if (!args.isSet(_NAME_)) \ args[_NAME_] = _VAL_; \ } while (false) SETDEFAULT("-db", "bedrock.db"); SETDEFAULT("-serverHost", "localhost:8888"); SETDEFAULT("-nodeHost", "localhost:8889"); SETDEFAULT("-commandPortPrivate", "localhost:8890"); SETDEFAULT("-controlPort", "localhost:9999"); SETDEFAULT("-nodeName", SGetHostName()); SETDEFAULT("-cacheSize", SToStr(1024 * 1024)); // 1024 * 1024KB = 1GB. SETDEFAULT("-plugins", "db,jobs,cache,mysql"); SETDEFAULT("-priority", "100"); SETDEFAULT("-maxJournalSize", "1000000"); SETDEFAULT("-queryLog", "queryLog.csv"); SETDEFAULT("-enableMultiWrite", "true"); args["-plugins"] = SComposeList(loadPlugins(args)); // Reset the database if requested if (args.isSet("-clean")) { // Remove it SDEBUG("Resetting database"); string db = args["-db"]; unlink(db.c_str()); } else if (args.isSet("-bootstrap")) { // Allow for bootstraping a node with no database file in place. SINFO("Loading in bootstrap mode, skipping check for database existance."); } else { // Otherwise verify the database exists SDEBUG("Verifying database exists"); SASSERT(SFileExists(args["-db"])); } // Set our soft limit to the same as our hard limit to allow for more file handles. struct rlimit limits; if (!getrlimit(RLIMIT_NOFILE, &limits)) { limits.rlim_cur = limits.rlim_max; if (setrlimit(RLIMIT_NOFILE, &limits)) { SERROR("Couldn't set FD limit"); } } else { SERROR("Couldn't get FD limit"); } // Log stack traces if we have unhandled exceptions. set_terminate(STerminateHandler); // Create our BedrockServer object so we can keep it for the life of the // program. SINFO("Starting bedrock server"); BedrockServer* _server = new BedrockServer(args); BedrockServer& server = *_server; // Keep going until someone kills it (either via TERM or Control^C) while (!(SGetSignal(SIGTERM) || SGetSignal(SIGINT))) { if (SGetSignals()) { // Log and clear any outstanding signals. SALERT("Uncaught signals (" << SGetSignalDescription() << "), ignoring."); SClearSignals(); } // Counters for seeing how long we spend in postPoll. chrono::steady_clock::duration pollCounter(0); chrono::steady_clock::duration postPollCounter(0); chrono::steady_clock::time_point start = chrono::steady_clock::now(); uint64_t nextActivity = STimeNow(); while (!server.shutdownComplete()) { if (server.shouldBackup() && server.isDetached()) { BackupDB(args["-db"]); server.setDetach(false); } // Wait and process fd_map fdm; server.prePoll(fdm); const uint64_t now = STimeNow(); auto timeBeforePoll = chrono::steady_clock::now(); S_poll(fdm, max(nextActivity, now) - now); nextActivity = STimeNow() + STIME_US_PER_S; // 1s max period auto timeAfterPoll = chrono::steady_clock::now(); server.postPoll(fdm, nextActivity); auto timeAfterPostPoll = chrono::steady_clock::now(); pollCounter += timeAfterPoll - timeBeforePoll; postPollCounter += timeAfterPostPoll - timeAfterPoll; // Every 10s, log and reset. if (timeAfterPostPoll > (start + 10s)) { SINFO("[performance] main poll loop timing: " << chrono::duration_cast<chrono::milliseconds>(timeAfterPostPoll - start).count() << " ms elapsed. " << chrono::duration_cast<chrono::milliseconds>(pollCounter).count() << " ms in poll. " << chrono::duration_cast<chrono::milliseconds>(postPollCounter).count() << " ms in postPoll."); pollCounter = chrono::microseconds::zero(); postPollCounter = chrono::microseconds::zero(); start = timeAfterPostPoll; } } if (server.shutdownWhileDetached) { // We need to actually shut down here. break; } } SINFO("Deleting BedrockServer"); delete _server; SINFO("BedrockServer deleted"); // Finished with our signal handler. SStopSignalThread(); // All done SINFO("Graceful process shutdown complete"); return 0; }<|fim▁end|>
if(!lib) {
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from tablelist import *<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#pylint: disable=C0301, C0103, W0212, W0401 <|fim▁hole|> :platform: Unix :synopsis: RADICAL-Pilot is a distributed Pilot-Job framework. .. moduleauthor:: Ole Weidner <[email protected]> """ __copyright__ = "Copyright 2013-2014, http://radical.rutgers.edu" __license__ = "MIT" # ------------------------------------------------------------------------------ # Scheduler name constant from types import * from states import * from logentry import * from scheduler import * # ------------------------------------------------------------------------------ # from url import Url from exceptions import * from session import Session from context import Context from unit_manager import UnitManager from compute_unit import ComputeUnit from compute_unit_description import ComputeUnitDescription from pilot_manager import PilotManager from compute_pilot import ComputePilot from compute_pilot_description import ComputePilotDescription from resource_config import ResourceConfig from staging_directives import COPY, LINK, MOVE, TRANSFER, SKIP_FAILED, CREATE_PARENTS # ------------------------------------------------------------------------------ # from utils.logger import logger import os import radical.utils as ru import radical.utils.logger as rul pwd = os.path.dirname (__file__) root = "%s/.." % pwd version, version_detail, version_branch, sdist_name, sdist_path = ru.get_version ([root, pwd]) # FIXME: the logger init will require a 'classical' ini based config, which is # different from the json based config we use now. May need updating once the # radical configuration system has changed to json _logger = rul.logger.getLogger ('radical.pilot') _logger.info ('radical.pilot version: %s' % version_detail) # ------------------------------------------------------------------------------<|fim▁end|>
""" .. module:: pilot
<|file_name|>_policy_tracked_resources_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class PolicyTrackedResourcesOperations: """PolicyTrackedResourcesOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.policyinsights.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer.<|fim▁hole|> models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def list_query_results_for_management_group( self, management_group_name: str, query_options: Optional["_models.QueryOptions"] = None, **kwargs: Any ) -> AsyncIterable["_models.PolicyTrackedResourcesQueryResults"]: """Queries policy tracked resources under the management group. :param management_group_name: Management group name. :type management_group_name: str :param query_options: Parameter group. :type query_options: ~azure.mgmt.policyinsights.models.QueryOptions :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PolicyTrackedResourcesQueryResults or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.policyinsights.models.PolicyTrackedResourcesQueryResults] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyTrackedResourcesQueryResults"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) _top = None _filter = None if query_options is not None: _top = query_options.top _filter = query_options.filter management_groups_namespace = "Microsoft.Management" policy_tracked_resources_resource = "default" api_version = "2018-07-01-preview" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_query_results_for_management_group.metadata['url'] # type: ignore path_format_arguments = { 'managementGroupsNamespace': self._serialize.url("management_groups_namespace", management_groups_namespace, 'str'), 'managementGroupName': self._serialize.url("management_group_name", management_group_name, 'str'), 'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if _top is not None: query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0) if _filter is not None: query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.post(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('PolicyTrackedResourcesQueryResults', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.QueryFailure, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_query_results_for_management_group.metadata = {'url': '/providers/{managementGroupsNamespace}/managementGroups/{managementGroupName}/providers/Microsoft.PolicyInsights/policyTrackedResources/{policyTrackedResourcesResource}/queryResults'} # type: ignore def list_query_results_for_subscription( self, query_options: Optional["_models.QueryOptions"] = None, **kwargs: Any ) -> AsyncIterable["_models.PolicyTrackedResourcesQueryResults"]: """Queries policy tracked resources under the subscription. :param query_options: Parameter group. :type query_options: ~azure.mgmt.policyinsights.models.QueryOptions :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PolicyTrackedResourcesQueryResults or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.policyinsights.models.PolicyTrackedResourcesQueryResults] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyTrackedResourcesQueryResults"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) _top = None _filter = None if query_options is not None: _top = query_options.top _filter = query_options.filter policy_tracked_resources_resource = "default" api_version = "2018-07-01-preview" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_query_results_for_subscription.metadata['url'] # type: ignore path_format_arguments = { 'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if _top is not None: query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0) if _filter is not None: query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.post(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('PolicyTrackedResourcesQueryResults', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.QueryFailure, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_query_results_for_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.PolicyInsights/policyTrackedResources/{policyTrackedResourcesResource}/queryResults'} # type: ignore def list_query_results_for_resource_group( self, resource_group_name: str, query_options: Optional["_models.QueryOptions"] = None, **kwargs: Any ) -> AsyncIterable["_models.PolicyTrackedResourcesQueryResults"]: """Queries policy tracked resources under the resource group. :param resource_group_name: Resource group name. :type resource_group_name: str :param query_options: Parameter group. :type query_options: ~azure.mgmt.policyinsights.models.QueryOptions :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PolicyTrackedResourcesQueryResults or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.policyinsights.models.PolicyTrackedResourcesQueryResults] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyTrackedResourcesQueryResults"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) _top = None _filter = None if query_options is not None: _top = query_options.top _filter = query_options.filter policy_tracked_resources_resource = "default" api_version = "2018-07-01-preview" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_query_results_for_resource_group.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if _top is not None: query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0) if _filter is not None: query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.post(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('PolicyTrackedResourcesQueryResults', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.QueryFailure, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_query_results_for_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.PolicyInsights/policyTrackedResources/{policyTrackedResourcesResource}/queryResults'} # type: ignore def list_query_results_for_resource( self, resource_id: str, query_options: Optional["_models.QueryOptions"] = None, **kwargs: Any ) -> AsyncIterable["_models.PolicyTrackedResourcesQueryResults"]: """Queries policy tracked resources under the resource. :param resource_id: Resource ID. :type resource_id: str :param query_options: Parameter group. :type query_options: ~azure.mgmt.policyinsights.models.QueryOptions :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either PolicyTrackedResourcesQueryResults or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.policyinsights.models.PolicyTrackedResourcesQueryResults] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.PolicyTrackedResourcesQueryResults"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) _top = None _filter = None if query_options is not None: _top = query_options.top _filter = query_options.filter policy_tracked_resources_resource = "default" api_version = "2018-07-01-preview" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_query_results_for_resource.metadata['url'] # type: ignore path_format_arguments = { 'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True), 'policyTrackedResourcesResource': self._serialize.url("policy_tracked_resources_resource", policy_tracked_resources_resource, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] if _top is not None: query_parameters['$top'] = self._serialize.query("top", _top, 'int', minimum=0) if _filter is not None: query_parameters['$filter'] = self._serialize.query("filter", _filter, 'str') query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.post(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('PolicyTrackedResourcesQueryResults', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: error = self._deserialize.failsafe_deserialize(_models.QueryFailure, response) map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list_query_results_for_resource.metadata = {'url': '/{resourceId}/providers/Microsoft.PolicyInsights/policyTrackedResources/{policyTrackedResourcesResource}/queryResults'} # type: ignore<|fim▁end|>
"""
<|file_name|>test.js<|end_file_name|><|fim▁begin|>'use strict'; var spawn = require('child_process').spawn; var font2svg = require('../'); var fs = require('graceful-fs'); var noop = require('nop'); var rimraf = require('rimraf'); var test = require('tape'); var xml2js = require('xml2js'); var parseXML = xml2js.parseString; var fontPath = 'test/SourceHanSansJP-Normal.otf'; var fontBuffer = fs.readFileSync(fontPath); var pkg = require('../package.json'); rimraf.sync('test/tmp'); test('font2svg()', function(t) { t.plan(22); font2svg(fontBuffer, {include: 'Hello,☆世界★(^_^)b!'}, function(err, buf) { t.error(err, 'should create a font buffer when `include` option is a string.'); parseXML(buf.toString(), function(err, result) { t.error(err, 'should create a valid SVG buffer.'); var glyphs = result.svg.font[0].glyph; var unicodes = glyphs.map(function(glyph) { return glyph.$.unicode; }); t.deepEqual( unicodes, [ String.fromCharCode('57344'), '!', '(', ')', ',', 'H', '^', '_', 'b', 'e', 'l', 'o', '★', '☆', '世', '界' ], 'should create glyphs including private use area automatically.' ); t.strictEqual(glyphs[0].$.d, undefined, 'should place `.notdef` at the first glyph'); }); }); font2svg(fontBuffer, { include: ['\u0000', '\ufffe', '\uffff'], encoding: 'utf8' }, function(err, str) { t.error(err, 'should create a font buffer when `include` option is an array.'); parseXML(str, function(err, result) { t.error(err, 'should create a valid SVG string when the encoding is utf8.'); var glyphs = result.svg.font[0].glyph; t.equal(glyphs.length, 1, 'should ignore glyphs which are not included in CMap.'); }); }); font2svg(fontBuffer, {encoding: 'base64'}, function(err, str) { t.error(err, 'should create a font buffer even if `include` option is not specified.'); parseXML(new Buffer(str, 'base64').toString(), function(err, result) { t.error(err, 'should encode the result according to `encoding` option.'); t.equal( result.svg.font[0].glyph.length, 1, 'should create a SVG including at least one glyph.' ); }); }); font2svg(fontBuffer, null, function(err) { t.error(err, 'should not throw errors even if `include` option is falsy value.'); }); font2svg(fontBuffer, {include: 1}, function(err) { t.error(err, 'should not throw errors even if `include` option is not an array or a string.'); }); font2svg(fontBuffer, {include: 'a', maxBuffer: 1}, function(err) { t.equal(err.message, 'stdout maxBuffer exceeded.', 'should pass an error of child_process.'); }); font2svg(fontBuffer, { include: 'foo', fontFaceAttr: { 'font-weight': 'bold', 'underline-position': '-100' } }, function(err, buf) { t.error(err, 'should accept `fontFaceAttr` option.'); parseXML(buf.toString(), function(err, result) { t.error(err, 'should create a valid SVG buffer when `fontFaceAttr` option is enabled.'); var fontFace = result.svg.font[0]['font-face'][0]; t.equal( fontFace.$['font-weight'], 'bold', 'should change the property of the `font-face` element, using `fontFaceAttr` option.' ); t.equal( fontFace.$['underline-position'], '-100', 'should change the property of the `font-face` element, using `fontFaceAttr` option.' ); }); }); t.throws( font2svg.bind(null, new Buffer('foo'), noop), /out/, 'should throw an error when the buffer doesn\'t represent a font.' ); t.throws( font2svg.bind(null, 'foo', {include: 'a'}, noop), /is not a buffer/, 'should throw an error when the first argument is not a buffer.' ); t.throws( font2svg.bind(null, fontBuffer, {include: 'a'}, [noop]), /TypeError/, 'should throw a type error when the last argument is not a function.' ); t.throws( font2svg.bind(null, fontBuffer, {fontFaceAttr: 'bold'}, noop), /TypeError/, 'should throw a type error when the `fontFaceAttr` is not an object.' ); t.throws( font2svg.bind(null, fontBuffer, { fontFaceAttr: {foo: 'bar'} }, noop), /foo is not a valid attribute name/, 'should throw an error when the `fontFaceAttr` has an invalid property..' ); }); test('"font2svg" command inside a TTY context', function(t) { t.plan(20); var cmd = function(args) { var tmpCp = spawn('node', [pkg.bin].concat(args), { stdio: [process.stdin, null, null] }); tmpCp.stdout.setEncoding('utf8'); tmpCp.stderr.setEncoding('utf8'); return tmpCp; }; cmd([fontPath]) .stdout.on('data', function(data) { t.ok(/<\/svg>/.test(data), 'should print font data to stdout.'); }); cmd([fontPath, 'test/tmp/foo.svg']).on('close', function() { fs.exists('test/tmp/foo.svg', function(result) { t.ok(result, 'should create a font file.'); }); }); <|fim▁hole|> }); cmd([fontPath, '--in', '123']) .stdout.on('data', function(data) { t.ok(/<\/svg>/.test(data), 'should use --in flag as an alias of --include.'); }); cmd([fontPath, '-i', 'あ']) .stdout.on('data', function(data) { t.ok(/<\/svg>/.test(data), 'should use -i flag as an alias of --include.'); }); cmd([fontPath, '-g', '亜']) .stdout.on('data', function(data) { t.ok(/<\/svg>/.test(data), 'should use -g flag as an alias of --include.'); }); cmd([fontPath, '--font-weight', 'bold']) .stdout.on('data', function(data) { t.ok( /font-weight="bold"/.test(data), 'should set the property of font-face element, using property name flag.' ); }); cmd(['--help']) .stdout.on('data', function(data) { t.ok(/Usage/.test(data), 'should print usage information with --help flag.'); }); cmd(['-h']) .stdout.on('data', function(data) { t.ok(/Usage/.test(data), 'should use -h flag as an alias of --help.'); }); cmd(['--version']) .stdout.on('data', function(data) { t.equal(data, pkg.version + '\n', 'should print version with --version flag.'); }); cmd(['-v']) .stdout.on('data', function(data) { t.equal(data, pkg.version + '\n', 'should use -v as an alias of --version.'); }); cmd([]) .stdout.on('data', function(data) { t.ok(/Usage/.test(data), 'should print usage information when it takes no arguments.'); }); var unsupportedErr = ''; cmd(['cli.js']) .on('close', function(code) { t.notEqual(code, 0, 'should fail when it cannot parse the input.'); t.ok( /Unsupported/.test(unsupportedErr), 'should print `Unsupported OpenType` error message to stderr.' ); }) .stderr.on('data', function(data) { unsupportedErr += data; }); var invalidAttrErr = ''; cmd([fontPath, '--font-eight', 'bold', '--font-smile']) .on('close', function(code) { t.notEqual(code, 0, 'should fail when it takes invalid flags.'); t.ok( /font-eight is not a valid attribute name/.test(invalidAttrErr), 'should print `invalid attribute` error message to stderr.' ); }) .stderr.on('data', function(data) { invalidAttrErr += data; }); var enoentErr = ''; cmd(['foo']) .on('close', function(code) { t.notEqual(code, 0, 'should fail when the file doesn\'t exist.'); t.ok(/ENOENT/.test(enoentErr), 'should print ENOENT error message to stderr.'); }) .stderr.on('data', function(data) { enoentErr += data; }); var eisdirErr = ''; cmd([fontPath, 'node_modules']) .on('close', function(code) { t.notEqual(code, 0, 'should fail when a directory exists in the destination path.'); t.ok(/EISDIR/.test(eisdirErr), 'should print EISDIR error message to stderr.'); }) .stderr.on('data', function(data) { eisdirErr += data; }); }); test('"font2svg" command outside a TTY context', function(t) { t.plan(4); var cmd = function(args) { var tmpCp = spawn('node', [pkg.bin].concat(args), { stdio: ['pipe', null, null] }); tmpCp.stdout.setEncoding('utf8'); tmpCp.stderr.setEncoding('utf8'); return tmpCp; }; var cp = cmd([]); cp.stdout.on('data', function(data) { t.ok(/<\/svg>/.test(data), 'should parse stdin and print SVG data.'); }); cp.stdin.end(fontBuffer); cmd(['test/tmp/bar.svg', '--include', 'ア']) .on('close', function() { fs.exists('test/tmp/bar.svg', function(result) { t.ok(result, 'should write a SVG file.'); }); }) .stdin.end(fontBuffer); var err = ''; var cpErr = cmd([]); cpErr.on('close', function(code) { t.notEqual(code, 0, 'should fail when stdin receives unsupported file buffer.'); t.ok( /Unsupported/.test(err), 'should print an error when stdin receives unsupported file buffer.' ); }); cpErr.stderr.on('data', function(output) { err += output; }); cpErr.stdin.end(new Buffer('invalid data')); });<|fim▁end|>
cmd([fontPath, '--include', 'abc']) .stdout.on('data', function(data) { t.ok(/<\/svg>/.test(data), 'should accept --include flag.');
<|file_name|>package.py<|end_file_name|><|fim▁begin|># Copyright 2013-2021 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. #<|fim▁hole|> class RIlluminahumanmethylation450kannoIlmn12Hg19(RPackage): """Annotation for Illumina's 450k methylation arrays Manifests and annotation for Illumina's 450k array data.""" # This package is available via bioconductor but there is no available git # repository. homepage = "https://bioconductor.org/packages/release/data/annotation/html/IlluminaHumanMethylation450kanno.ilmn12.hg19.html" url = "https://bioconductor.org/packages/release/data/annotation/src/contrib/IlluminaHumanMethylation450kanno.ilmn12.hg19_0.6.0.tar.gz" version('0.6.0', sha256='249b8fd62add3c95b5047b597cff0868d26a98862a47cebd656edcd175a73b15') depends_on('[email protected]:', type=('build', 'run')) depends_on('[email protected]:', type=('build', 'run'))<|fim▁end|>
# SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import *
<|file_name|>test_flake8.py<|end_file_name|><|fim▁begin|># -*- coding: utf8 -*- from __future__ import unicode_literals import unittest import os import sys <|fim▁hole|> unicode = str if sys.version_info[:2] == (2, 6): # Monkeypatch to make tests work on 2.6 def assert_less(first, second, msg=None): assert first > second unittest.TestCase.assertLess = assert_less class TestCodeComplexity(unittest.TestCase): def test_flake8_conformance(self): flake8style = engine.get_style_guide( ignore=['E501'], max_complexity=6 ) directory = 'flask_rollbar' self.assertEqual(os.path.isdir(directory), True, "Invalid test directory '%s'. You need to update test_flake8.py" % directory) # Get all the files to check files = [] for dirpath, dirnames, filenames in os.walk(directory): for filename in [f for f in filenames if f.endswith(".py")]: files += [os.path.join(dirpath, filename)] result = flake8style.check_files(files) self.assertEqual(result.total_errors, 0, "Code found to be too complex or failing PEP8") if __name__ == '__main__': unittest.main()<|fim▁end|>
from flake8.api import legacy as engine if sys.version_info[0] == 3:
<|file_name|>flattened_product.py<|end_file_name|><|fim▁begin|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from .resource import Resource class FlattenedProduct(Resource): """FlattenedProduct :param id: Resource Id<|fim▁hole|> :type tags: dict :param location: Resource Location :type location: str :param name: Resource Name :type name: str :param pname: :type pname: str :param flattened_product_type: :type flattened_product_type: str :param provisioning_state_values: Possible values include: 'Succeeded', 'Failed', 'canceled', 'Accepted', 'Creating', 'Created', 'Updating', 'Updated', 'Deleting', 'Deleted', 'OK' :type provisioning_state_values: str :param provisioning_state: :type provisioning_state: str """ _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'location': {'key': 'location', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'pname': {'key': 'properties.pname', 'type': 'str'}, 'flattened_product_type': {'key': 'properties.type', 'type': 'str'}, 'provisioning_state_values': {'key': 'properties.provisioningStateValues', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } def __init__(self, id=None, type=None, tags=None, location=None, name=None, pname=None, flattened_product_type=None, provisioning_state_values=None, provisioning_state=None, **kwargs): super(FlattenedProduct, self).__init__(id=id, type=type, tags=tags, location=location, name=name, **kwargs) self.pname = pname self.flattened_product_type = flattened_product_type self.provisioning_state_values = provisioning_state_values self.provisioning_state = provisioning_state<|fim▁end|>
:type id: str :param type: Resource Type :type type: str :param tags: